summaryrefslogtreecommitdiff
path: root/libavformat
diff options
context:
space:
mode:
Diffstat (limited to 'libavformat')
-rw-r--r--libavformat/bfi.c2
-rw-r--r--libavformat/bmv.c2
-rw-r--r--libavformat/dvenc.c2
-rw-r--r--libavformat/filmstripdec.c2
-rw-r--r--libavformat/filmstripenc.c4
-rw-r--r--libavformat/gif.c6
-rw-r--r--libavformat/gxfenc.c6
-rw-r--r--libavformat/icoenc.c14
-rw-r--r--libavformat/iff.c4
-rw-r--r--libavformat/img2dec.c6
-rw-r--r--libavformat/lxfdec.c2
-rw-r--r--libavformat/movenc.c38
-rw-r--r--libavformat/mtv.c2
-rw-r--r--libavformat/mxf.c36
-rw-r--r--libavformat/mxf.h2
-rw-r--r--libavformat/mxfdec.c10
-rw-r--r--libavformat/rawdec.c4
-rw-r--r--libavformat/rtpdec_xiph.c6
-rw-r--r--libavformat/rtpenc_jpeg.c6
-rw-r--r--libavformat/sdp.c6
-rw-r--r--libavformat/segafilm.c2
-rw-r--r--libavformat/siff.c2
-rw-r--r--libavformat/smacker.c2
-rw-r--r--libavformat/tmv.c2
-rw-r--r--libavformat/utils.c2
-rw-r--r--libavformat/yuv4mpeg.c168
26 files changed, 169 insertions, 169 deletions
diff --git a/libavformat/bfi.c b/libavformat/bfi.c
index c03a1deb29..446ef57fc9 100644
--- a/libavformat/bfi.c
+++ b/libavformat/bfi.c
@@ -90,7 +90,7 @@ static int bfi_read_header(AVFormatContext * s)
avpriv_set_pts_info(vstream, 32, 1, fps);
vstream->codec->codec_type = AVMEDIA_TYPE_VIDEO;
vstream->codec->codec_id = AV_CODEC_ID_BFI;
- vstream->codec->pix_fmt = PIX_FMT_PAL8;
+ vstream->codec->pix_fmt = AV_PIX_FMT_PAL8;
/* Set up the audio codec now... */
astream->codec->codec_type = AVMEDIA_TYPE_AUDIO;
diff --git a/libavformat/bmv.c b/libavformat/bmv.c
index 474f4e3bc3..fe5db3f004 100644
--- a/libavformat/bmv.c
+++ b/libavformat/bmv.c
@@ -50,7 +50,7 @@ static int bmv_read_header(AVFormatContext *s)
st->codec->codec_id = AV_CODEC_ID_BMV_VIDEO;
st->codec->width = 640;
st->codec->height = 429;
- st->codec->pix_fmt = PIX_FMT_PAL8;
+ st->codec->pix_fmt = AV_PIX_FMT_PAL8;
avpriv_set_pts_info(st, 16, 1, 12);
ast = avformat_new_stream(s, 0);
if (!ast)
diff --git a/libavformat/dvenc.c b/libavformat/dvenc.c
index 604f4ce833..a132edbadf 100644
--- a/libavformat/dvenc.c
+++ b/libavformat/dvenc.c
@@ -123,7 +123,7 @@ static int dv_write_pack(enum dv_pack_type pack_id, DVMuxContext *c, uint8_t* bu
(1 << 3) | /* recording mode: 1 -- original */
7;
buf[3] = (1 << 7) | /* direction: 1 -- forward */
- (c->sys->pix_fmt == PIX_FMT_YUV420P ? 0x20 : /* speed */
+ (c->sys->pix_fmt == AV_PIX_FMT_YUV420P ? 0x20 : /* speed */
c->sys->ltc_divisor * 4);
buf[4] = (1 << 7) | /* reserved -- always 1 */
0x7f; /* genre category */
diff --git a/libavformat/filmstripdec.c b/libavformat/filmstripdec.c
index 0d02818fbf..b41fdb73e5 100644
--- a/libavformat/filmstripdec.c
+++ b/libavformat/filmstripdec.c
@@ -62,7 +62,7 @@ static int read_header(AVFormatContext *s)
avio_skip(pb, 2);
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
- st->codec->pix_fmt = PIX_FMT_RGBA;
+ st->codec->pix_fmt = AV_PIX_FMT_RGBA;
st->codec->codec_tag = 0; /* no fourcc */
st->codec->width = avio_rb16(pb);
st->codec->height = avio_rb16(pb);
diff --git a/libavformat/filmstripenc.c b/libavformat/filmstripenc.c
index afbca5c6d9..99fbf0b570 100644
--- a/libavformat/filmstripenc.c
+++ b/libavformat/filmstripenc.c
@@ -35,8 +35,8 @@ typedef struct {
static int write_header(AVFormatContext *s)
{
- if (s->streams[0]->codec->pix_fmt != PIX_FMT_RGBA) {
- av_log(s, AV_LOG_ERROR, "only PIX_FMT_RGBA is supported\n");
+ if (s->streams[0]->codec->pix_fmt != AV_PIX_FMT_RGBA) {
+ av_log(s, AV_LOG_ERROR, "only AV_PIX_FMT_RGBA is supported\n");
return AVERROR_INVALIDDATA;
}
return 0;
diff --git a/libavformat/gif.c b/libavformat/gif.c
index 2cff81a493..31b01018ad 100644
--- a/libavformat/gif.c
+++ b/libavformat/gif.c
@@ -223,7 +223,7 @@ static int gif_image_write_image(AVIOContext *pb,
put_bits(&p, 9, 0x0100); /* clear code */
for (i = (left < GIF_CHUNKS) ? left : GIF_CHUNKS; i; i--) {
- if (pix_fmt == PIX_FMT_RGB24) {
+ if (pix_fmt == AV_PIX_FMT_RGB24) {
v = gif_clut_index(ptr[0], ptr[1], ptr[2]);
ptr += 3;
} else {
@@ -290,7 +290,7 @@ static int gif_write_header(AVFormatContext *s)
// rate = video_enc->time_base.den;
}
- if (video_enc->pix_fmt != PIX_FMT_RGB24) {
+ if (video_enc->pix_fmt != AV_PIX_FMT_RGB24) {
av_log(s, AV_LOG_ERROR,
"ERROR: gif only handles the rgb24 pixel format. Use -pix_fmt rgb24.\n");
return AVERROR(EIO);
@@ -327,7 +327,7 @@ static int gif_write_video(AVFormatContext *s, AVCodecContext *enc,
avio_w8(pb, 0x00);
gif_image_write_image(pb, 0, 0, enc->width, enc->height,
- buf, enc->width * 3, PIX_FMT_RGB24);
+ buf, enc->width * 3, AV_PIX_FMT_RGB24);
avio_flush(s->pb);
return 0;
diff --git a/libavformat/gxfenc.c b/libavformat/gxfenc.c
index 70370a4162..1c2ff5d399 100644
--- a/libavformat/gxfenc.c
+++ b/libavformat/gxfenc.c
@@ -209,7 +209,7 @@ static int gxf_write_mpeg_auxiliary(AVIOContext *pb, AVStream *st)
size = snprintf(buffer, sizeof(buffer), "Ver 1\nBr %.6f\nIpg 1\nPpi %d\nBpiop %d\n"
"Pix 0\nCf %d\nCg %d\nSl %d\nnl16 %d\nVi 1\nf1 1\n",
(float)st->codec->bit_rate, sc->p_per_gop, sc->b_per_i_or_p,
- st->codec->pix_fmt == PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1,
+ st->codec->pix_fmt == AV_PIX_FMT_YUV422P ? 2 : 1, sc->first_gop_closed == 1,
starting_line, (st->codec->height + 15) / 16);
av_assert0(size < sizeof(buffer));
avio_w8(pb, TRACK_MPG_AUX);
@@ -503,7 +503,7 @@ static int gxf_write_umf_media_mpeg(AVIOContext *pb, AVStream *st)
{
GXFStreamContext *sc = st->priv_data;
- if (st->codec->pix_fmt == PIX_FMT_YUV422P)
+ if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P)
avio_wl32(pb, 2);
else
avio_wl32(pb, 1); /* default to 420 */
@@ -772,7 +772,7 @@ static int gxf_write_header(AVFormatContext *s)
media_info = 'M';
break;
case AV_CODEC_ID_DVVIDEO:
- if (st->codec->pix_fmt == PIX_FMT_YUV422P) {
+ if (st->codec->pix_fmt == AV_PIX_FMT_YUV422P) {
sc->media_type += 2;
sc->track_type = 6;
gxf->flags |= 0x00002000;
diff --git a/libavformat/icoenc.c b/libavformat/icoenc.c
index e755b211ac..12f959ef5d 100644
--- a/libavformat/icoenc.c
+++ b/libavformat/icoenc.c
@@ -45,18 +45,18 @@ typedef struct {
static int ico_check_attributes(AVFormatContext *s, const AVCodecContext *c)
{
if (c->codec_id == CODEC_ID_BMP) {
- if (c->pix_fmt == PIX_FMT_PAL8 && PIX_FMT_RGB32 != PIX_FMT_BGRA) {
+ if (c->pix_fmt == AV_PIX_FMT_PAL8 && AV_PIX_FMT_RGB32 != AV_PIX_FMT_BGRA) {
av_log(s, AV_LOG_ERROR, "Wrong endianness for bmp pixel format\n");
return AVERROR(EINVAL);
- } else if (c->pix_fmt != PIX_FMT_PAL8 &&
- c->pix_fmt != PIX_FMT_RGB555LE &&
- c->pix_fmt != PIX_FMT_BGR24 &&
- c->pix_fmt != PIX_FMT_BGRA) {
+ } else if (c->pix_fmt != AV_PIX_FMT_PAL8 &&
+ c->pix_fmt != AV_PIX_FMT_RGB555LE &&
+ c->pix_fmt != AV_PIX_FMT_BGR24 &&
+ c->pix_fmt != AV_PIX_FMT_BGRA) {
av_log(s, AV_LOG_ERROR, "BMP must be 1bit, 4bit, 8bit, 16bit, 24bit, or 32bit\n");
return AVERROR(EINVAL);
}
} else if (c->codec_id == CODEC_ID_PNG) {
- if (c->pix_fmt != PIX_FMT_RGBA) {
+ if (c->pix_fmt != AV_PIX_FMT_RGBA) {
av_log(s, AV_LOG_ERROR, "PNG in ico requires pixel format to be rgba\n");
return AVERROR(EINVAL);
}
@@ -171,7 +171,7 @@ static int ico_write_trailer(AVFormatContext *s)
avio_w8(pb, ico->images[i].height);
if (s->streams[i]->codec->codec_id == CODEC_ID_BMP &&
- s->streams[i]->codec->pix_fmt == PIX_FMT_PAL8) {
+ s->streams[i]->codec->pix_fmt == AV_PIX_FMT_PAL8) {
avio_w8(pb, (ico->images[i].bits >= 8) ? 0 : 1 << ico->images[i].bits);
} else {
avio_w8(pb, 0);
diff --git a/libavformat/iff.c b/libavformat/iff.c
index 0055283b0e..d559ad3c2b 100644
--- a/libavformat/iff.c
+++ b/libavformat/iff.c
@@ -238,9 +238,9 @@ static int iff_read_header(AVFormatContext *s)
if ((fmt_size = avio_read(pb, fmt, sizeof(fmt))) < 0)
return fmt_size;
if (fmt_size == sizeof(deep_rgb24) && !memcmp(fmt, deep_rgb24, sizeof(deep_rgb24)))
- st->codec->pix_fmt = PIX_FMT_RGB24;
+ st->codec->pix_fmt = AV_PIX_FMT_RGB24;
else if (fmt_size == sizeof(deep_rgba) && !memcmp(fmt, deep_rgba, sizeof(deep_rgba)))
- st->codec->pix_fmt = PIX_FMT_RGBA;
+ st->codec->pix_fmt = AV_PIX_FMT_RGBA;
else {
av_log_ask_for_sample(s, "unsupported color format\n");
return AVERROR_PATCHWELCOME;
diff --git a/libavformat/img2dec.c b/libavformat/img2dec.c
index d1edf04015..1190af30c8 100644
--- a/libavformat/img2dec.c
+++ b/libavformat/img2dec.c
@@ -191,7 +191,7 @@ static int read_header(AVFormatContext *s1)
int first_index, last_index, ret = 0;
int width = 0, height = 0;
AVStream *st;
- enum PixelFormat pix_fmt = PIX_FMT_NONE;
+ enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
AVRational framerate;
s1->ctx_flags |= AVFMTCTX_NOHEADER;
@@ -201,7 +201,7 @@ static int read_header(AVFormatContext *s1)
return AVERROR(ENOMEM);
}
- if (s->pixel_format && (pix_fmt = av_get_pix_fmt(s->pixel_format)) == PIX_FMT_NONE) {
+ if (s->pixel_format && (pix_fmt = av_get_pix_fmt(s->pixel_format)) == AV_PIX_FMT_NONE) {
av_log(s1, AV_LOG_ERROR, "No such pixel format: %s.\n", s->pixel_format);
return AVERROR(EINVAL);
}
@@ -317,7 +317,7 @@ static int read_header(AVFormatContext *s1)
if (st->codec->codec_id == AV_CODEC_ID_LJPEG)
st->codec->codec_id = AV_CODEC_ID_MJPEG;
}
- if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO && pix_fmt != PIX_FMT_NONE)
+ if(st->codec->codec_type == AVMEDIA_TYPE_VIDEO && pix_fmt != AV_PIX_FMT_NONE)
st->codec->pix_fmt = pix_fmt;
return 0;
diff --git a/libavformat/lxfdec.c b/libavformat/lxfdec.c
index 539a86a467..bf17d87a33 100644
--- a/libavformat/lxfdec.c
+++ b/libavformat/lxfdec.c
@@ -40,7 +40,7 @@ static const AVCodecTag lxf_tags[] = {
{ AV_CODEC_ID_DVVIDEO, 4 }, //DV25
{ AV_CODEC_ID_DVVIDEO, 5 }, //DVCPRO
{ AV_CODEC_ID_DVVIDEO, 6 }, //DVCPRO50
- { AV_CODEC_ID_RAWVIDEO, 7 }, //PIX_FMT_ARGB, where alpha is used for chroma keying
+ { AV_CODEC_ID_RAWVIDEO, 7 }, //AV_PIX_FMT_ARGB, where alpha is used for chroma keying
{ AV_CODEC_ID_RAWVIDEO, 8 }, //16-bit chroma key
{ AV_CODEC_ID_MPEG2VIDEO, 9 }, //4:2:2 CBP ("Constrained Bytes per Gop")
{ AV_CODEC_ID_NONE, 0 },
diff --git a/libavformat/movenc.c b/libavformat/movenc.c
index 0e675855a0..2ec5d520ad 100644
--- a/libavformat/movenc.c
+++ b/libavformat/movenc.c
@@ -867,10 +867,10 @@ static int mov_get_dv_codec_tag(AVFormatContext *s, MOVTrack *track)
if (track->enc->width == 720) /* SD */
if (track->enc->height == 480) /* NTSC */
- if (track->enc->pix_fmt == PIX_FMT_YUV422P) tag = MKTAG('d','v','5','n');
+ if (track->enc->pix_fmt == AV_PIX_FMT_YUV422P) tag = MKTAG('d','v','5','n');
else tag = MKTAG('d','v','c',' ');
- else if (track->enc->pix_fmt == PIX_FMT_YUV422P) tag = MKTAG('d','v','5','p');
- else if (track->enc->pix_fmt == PIX_FMT_YUV420P) tag = MKTAG('d','v','c','p');
+ else if (track->enc->pix_fmt == AV_PIX_FMT_YUV422P) tag = MKTAG('d','v','5','p');
+ else if (track->enc->pix_fmt == AV_PIX_FMT_YUV420P) tag = MKTAG('d','v','c','p');
else tag = MKTAG('d','v','p','p');
else if (track->enc->height == 720) /* HD 720 line */
if (track->enc->time_base.den == 50) tag = MKTAG('d','v','h','q');
@@ -887,25 +887,25 @@ static int mov_get_dv_codec_tag(AVFormatContext *s, MOVTrack *track)
}
static const struct {
- enum PixelFormat pix_fmt;
+ enum AVPixelFormat pix_fmt;
uint32_t tag;
unsigned bps;
} mov_pix_fmt_tags[] = {
- { PIX_FMT_YUYV422, MKTAG('y','u','v','2'), 0 },
- { PIX_FMT_YUYV422, MKTAG('y','u','v','s'), 0 },
- { PIX_FMT_UYVY422, MKTAG('2','v','u','y'), 0 },
- { PIX_FMT_RGB555BE,MKTAG('r','a','w',' '), 16 },
- { PIX_FMT_RGB555LE,MKTAG('L','5','5','5'), 16 },
- { PIX_FMT_RGB565LE,MKTAG('L','5','6','5'), 16 },
- { PIX_FMT_RGB565BE,MKTAG('B','5','6','5'), 16 },
- { PIX_FMT_GRAY16BE,MKTAG('b','1','6','g'), 16 },
- { PIX_FMT_RGB24, MKTAG('r','a','w',' '), 24 },
- { PIX_FMT_BGR24, MKTAG('2','4','B','G'), 24 },
- { PIX_FMT_ARGB, MKTAG('r','a','w',' '), 32 },
- { PIX_FMT_BGRA, MKTAG('B','G','R','A'), 32 },
- { PIX_FMT_RGBA, MKTAG('R','G','B','A'), 32 },
- { PIX_FMT_ABGR, MKTAG('A','B','G','R'), 32 },
- { PIX_FMT_RGB48BE, MKTAG('b','4','8','r'), 48 },
+ { AV_PIX_FMT_YUYV422, MKTAG('y','u','v','2'), 0 },
+ { AV_PIX_FMT_YUYV422, MKTAG('y','u','v','s'), 0 },
+ { AV_PIX_FMT_UYVY422, MKTAG('2','v','u','y'), 0 },
+ { AV_PIX_FMT_RGB555BE,MKTAG('r','a','w',' '), 16 },
+ { AV_PIX_FMT_RGB555LE,MKTAG('L','5','5','5'), 16 },
+ { AV_PIX_FMT_RGB565LE,MKTAG('L','5','6','5'), 16 },
+ { AV_PIX_FMT_RGB565BE,MKTAG('B','5','6','5'), 16 },
+ { AV_PIX_FMT_GRAY16BE,MKTAG('b','1','6','g'), 16 },
+ { AV_PIX_FMT_RGB24, MKTAG('r','a','w',' '), 24 },
+ { AV_PIX_FMT_BGR24, MKTAG('2','4','B','G'), 24 },
+ { AV_PIX_FMT_ARGB, MKTAG('r','a','w',' '), 32 },
+ { AV_PIX_FMT_BGRA, MKTAG('B','G','R','A'), 32 },
+ { AV_PIX_FMT_RGBA, MKTAG('R','G','B','A'), 32 },
+ { AV_PIX_FMT_ABGR, MKTAG('A','B','G','R'), 32 },
+ { AV_PIX_FMT_RGB48BE, MKTAG('b','4','8','r'), 48 },
};
static int mov_get_rawvideo_codec_tag(AVFormatContext *s, MOVTrack *track)
diff --git a/libavformat/mtv.c b/libavformat/mtv.c
index cb50033a57..622a9b8f5e 100644
--- a/libavformat/mtv.c
+++ b/libavformat/mtv.c
@@ -136,7 +136,7 @@ static int mtv_read_header(AVFormatContext *s)
avpriv_set_pts_info(st, 64, 1, mtv->video_fps);
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = AV_CODEC_ID_RAWVIDEO;
- st->codec->pix_fmt = PIX_FMT_RGB565BE;
+ st->codec->pix_fmt = AV_PIX_FMT_RGB565BE;
st->codec->width = mtv->img_width;
st->codec->height = mtv->img_height;
st->codec->sample_rate = mtv->video_fps;
diff --git a/libavformat/mxf.c b/libavformat/mxf.c
index 7230444b5f..4a4158a1d3 100644
--- a/libavformat/mxf.c
+++ b/libavformat/mxf.c
@@ -58,13 +58,13 @@ const MXFCodecUL ff_mxf_codec_uls[] = {
};
const MXFCodecUL ff_mxf_pixel_format_uls[] = {
- { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x01 }, 16, PIX_FMT_UYVY422 },
- { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x02 }, 16, PIX_FMT_YUYV422 },
- { { 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 }, 0, PIX_FMT_NONE },
+ { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x01 }, 16, AV_PIX_FMT_UYVY422 },
+ { { 0x06,0x0E,0x2B,0x34,0x04,0x01,0x01,0x0A,0x04,0x01,0x02,0x01,0x01,0x02,0x01,0x02 }, 16, AV_PIX_FMT_YUYV422 },
+ { { 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 }, 0, AV_PIX_FMT_NONE },
};
static const struct {
- enum PixelFormat pix_fmt;
+ enum AVPixelFormat pix_fmt;
const char data[16];
} ff_mxf_pixel_layouts[] = {
/**
@@ -76,24 +76,24 @@ static const struct {
* Note: Do not use these for encoding descriptors for little-endian formats until we
* get samples or official word from SMPTE on how/if those can be encoded.
*/
- {PIX_FMT_ABGR, {'A', 8, 'B', 8, 'G', 8, 'R', 8 }},
- {PIX_FMT_ARGB, {'A', 8, 'R', 8, 'G', 8, 'B', 8 }},
- {PIX_FMT_BGR24, {'B', 8, 'G', 8, 'R', 8 }},
- {PIX_FMT_BGRA, {'B', 8, 'G', 8, 'R', 8, 'A', 8 }},
- {PIX_FMT_RGB24, {'R', 8, 'G', 8, 'B', 8 }},
- {PIX_FMT_RGB444BE,{'F', 4, 'R', 4, 'G', 4, 'B', 4 }},
- {PIX_FMT_RGB48BE, {'R', 8, 'r', 8, 'G', 8, 'g', 8, 'B', 8, 'b', 8 }},
- {PIX_FMT_RGB48BE, {'R', 16, 'G', 16, 'B', 16 }},
- {PIX_FMT_RGB48LE, {'r', 8, 'R', 8, 'g', 8, 'G', 8, 'b', 8, 'B', 8 }},
- {PIX_FMT_RGB555BE,{'F', 1, 'R', 5, 'G', 5, 'B', 5 }},
- {PIX_FMT_RGB565BE,{'R', 5, 'G', 6, 'B', 5 }},
- {PIX_FMT_RGBA, {'R', 8, 'G', 8, 'B', 8, 'A', 8 }},
- {PIX_FMT_PAL8, {'P', 8 }},
+ {AV_PIX_FMT_ABGR, {'A', 8, 'B', 8, 'G', 8, 'R', 8 }},
+ {AV_PIX_FMT_ARGB, {'A', 8, 'R', 8, 'G', 8, 'B', 8 }},
+ {AV_PIX_FMT_BGR24, {'B', 8, 'G', 8, 'R', 8 }},
+ {AV_PIX_FMT_BGRA, {'B', 8, 'G', 8, 'R', 8, 'A', 8 }},
+ {AV_PIX_FMT_RGB24, {'R', 8, 'G', 8, 'B', 8 }},
+ {AV_PIX_FMT_RGB444BE,{'F', 4, 'R', 4, 'G', 4, 'B', 4 }},
+ {AV_PIX_FMT_RGB48BE, {'R', 8, 'r', 8, 'G', 8, 'g', 8, 'B', 8, 'b', 8 }},
+ {AV_PIX_FMT_RGB48BE, {'R', 16, 'G', 16, 'B', 16 }},
+ {AV_PIX_FMT_RGB48LE, {'r', 8, 'R', 8, 'g', 8, 'G', 8, 'b', 8, 'B', 8 }},
+ {AV_PIX_FMT_RGB555BE,{'F', 1, 'R', 5, 'G', 5, 'B', 5 }},
+ {AV_PIX_FMT_RGB565BE,{'R', 5, 'G', 6, 'B', 5 }},
+ {AV_PIX_FMT_RGBA, {'R', 8, 'G', 8, 'B', 8, 'A', 8 }},
+ {AV_PIX_FMT_PAL8, {'P', 8 }},
};
static const int num_pixel_layouts = FF_ARRAY_ELEMS(ff_mxf_pixel_layouts);
-int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum PixelFormat *pix_fmt)
+int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum AVPixelFormat *pix_fmt)
{
int x;
diff --git a/libavformat/mxf.h b/libavformat/mxf.h
index a91d53fdb2..4c751e8eef 100644
--- a/libavformat/mxf.h
+++ b/libavformat/mxf.h
@@ -76,7 +76,7 @@ extern const MXFCodecUL ff_mxf_data_definition_uls[];
extern const MXFCodecUL ff_mxf_codec_uls[];
extern const MXFCodecUL ff_mxf_pixel_format_uls[];
-int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum PixelFormat *pix_fmt);
+int ff_mxf_decode_pixel_layout(const char pixel_layout[16], enum AVPixelFormat *pix_fmt);
const MXFSamplesPerFrame *ff_mxf_get_samples_per_frame(AVFormatContext *s, AVRational time_base);
#define PRINT_KEY(pc, s, x) av_dlog(pc, "%s %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X %02X\n", s, \
diff --git a/libavformat/mxfdec.c b/libavformat/mxfdec.c
index a503289582..bee368ee96 100644
--- a/libavformat/mxfdec.c
+++ b/libavformat/mxfdec.c
@@ -154,7 +154,7 @@ typedef struct {
int linked_track_id;
uint8_t *extradata;
int extradata_size;
- enum PixelFormat pix_fmt;
+ enum AVPixelFormat pix_fmt;
} MXFDescriptor;
typedef struct {
@@ -801,7 +801,7 @@ static void mxf_read_pixel_layout(AVIOContext *pb, MXFDescriptor *descriptor)
static int mxf_read_generic_descriptor(void *arg, AVIOContext *pb, int tag, int size, UID uid, int64_t klv_offset)
{
MXFDescriptor *descriptor = arg;
- descriptor->pix_fmt = PIX_FMT_NONE;
+ descriptor->pix_fmt = AV_PIX_FMT_NONE;
switch(tag) {
case 0x3F01:
descriptor->sub_descriptors_count = avio_rb32(pb);
@@ -1518,17 +1518,17 @@ static int mxf_parse_structural_metadata(MXFContext *mxf)
}
if (st->codec->codec_id == AV_CODEC_ID_RAWVIDEO) {
st->codec->pix_fmt = descriptor->pix_fmt;
- if (st->codec->pix_fmt == PIX_FMT_NONE) {
+ if (st->codec->pix_fmt == AV_PIX_FMT_NONE) {
pix_fmt_ul = mxf_get_codec_ul(ff_mxf_pixel_format_uls,
&descriptor->essence_codec_ul);
st->codec->pix_fmt = pix_fmt_ul->id;
- if (st->codec->pix_fmt == PIX_FMT_NONE) {
+ if (st->codec->pix_fmt == AV_PIX_FMT_NONE) {
/* support files created before RP224v10 by defaulting to UYVY422
if subsampling is 4:2:2 and component depth is 8-bit */
if (descriptor->horiz_subsampling == 2 &&
descriptor->vert_subsampling == 1 &&
descriptor->component_depth == 8) {
- st->codec->pix_fmt = PIX_FMT_UYVY422;
+ st->codec->pix_fmt = AV_PIX_FMT_UYVY422;
}
}
}
diff --git a/libavformat/rawdec.c b/libavformat/rawdec.c
index 974e58ca31..4020666ed8 100644
--- a/libavformat/rawdec.c
+++ b/libavformat/rawdec.c
@@ -76,14 +76,14 @@ int ff_raw_read_header(AVFormatContext *s)
case AVMEDIA_TYPE_VIDEO: {
FFRawVideoDemuxerContext *s1 = s->priv_data;
int width = 0, height = 0, ret = 0;
- enum PixelFormat pix_fmt;
+ enum AVPixelFormat pix_fmt;
AVRational framerate;
if (s1->video_size && (ret = av_parse_video_size(&width, &height, s1->video_size)) < 0) {
av_log(s, AV_LOG_ERROR, "Couldn't parse video size.\n");
goto fail;
}
- if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == PIX_FMT_NONE) {
+ if ((pix_fmt = av_get_pix_fmt(s1->pixel_format)) == AV_PIX_FMT_NONE) {
av_log(s, AV_LOG_ERROR, "No such pixel format: %s.\n", s1->pixel_format);
ret = AVERROR(EINVAL);
goto fail;
diff --git a/libavformat/rtpdec_xiph.c b/libavformat/rtpdec_xiph.c
index 097d1a32bd..ae10c78304 100644
--- a/libavformat/rtpdec_xiph.c
+++ b/libavformat/rtpdec_xiph.c
@@ -312,11 +312,11 @@ static int xiph_parse_fmtp_pair(AVStream* stream,
if (!strcmp(attr, "sampling")) {
if (!strcmp(value, "YCbCr-4:2:0")) {
- codec->pix_fmt = PIX_FMT_YUV420P;
+ codec->pix_fmt = AV_PIX_FMT_YUV420P;
} else if (!strcmp(value, "YCbCr-4:4:2")) {
- codec->pix_fmt = PIX_FMT_YUV422P;
+ codec->pix_fmt = AV_PIX_FMT_YUV422P;
} else if (!strcmp(value, "YCbCr-4:4:4")) {
- codec->pix_fmt = PIX_FMT_YUV444P;
+ codec->pix_fmt = AV_PIX_FMT_YUV444P;
} else {
av_log(codec, AV_LOG_ERROR,
"Unsupported pixel format %s\n", attr);
diff --git a/libavformat/rtpenc_jpeg.c b/libavformat/rtpenc_jpeg.c
index b2cfb8d5c0..7eb0e23c6f 100644
--- a/libavformat/rtpenc_jpeg.c
+++ b/libavformat/rtpenc_jpeg.c
@@ -29,7 +29,7 @@ void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size)
RTPMuxContext *s = s1->priv_data;
const uint8_t *qtables = NULL;
int nb_qtables = 0;
- uint8_t type = 1; /* default pixel format is PIX_FMT_YUVJ420P */
+ uint8_t type = 1; /* default pixel format is AV_PIX_FMT_YUVJ420P */
uint8_t w, h;
uint8_t *p;
int off = 0; /* fragment offset of the current JPEG frame */
@@ -44,9 +44,9 @@ void ff_rtp_send_jpeg(AVFormatContext *s1, const uint8_t *buf, int size)
h = s1->streams[0]->codec->height >> 3;
/* check if pixel format is not the normal 420 case */
- if (s1->streams[0]->codec->pix_fmt == PIX_FMT_YUVJ422P) {
+ if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ422P) {
type = 0;
- } else if (s1->streams[0]->codec->pix_fmt == PIX_FMT_YUVJ420P) {
+ } else if (s1->streams[0]->codec->pix_fmt == AV_PIX_FMT_YUVJ420P) {
type = 1;
} else {
av_log(s1, AV_LOG_ERROR, "Unsupported pixel format\n");
diff --git a/libavformat/sdp.c b/libavformat/sdp.c
index 30941b7f89..9a4dec4bb2 100644
--- a/libavformat/sdp.c
+++ b/libavformat/sdp.c
@@ -521,13 +521,13 @@ static char *sdp_write_media_attributes(char *buff, int size, AVCodecContext *c,
return NULL;
switch (c->pix_fmt) {
- case PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUV420P:
pix_fmt = "YCbCr-4:2:0";
break;
- case PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUV422P:
pix_fmt = "YCbCr-4:2:2";
break;
- case PIX_FMT_YUV444P:
+ case AV_PIX_FMT_YUV444P:
pix_fmt = "YCbCr-4:4:4";
break;
default:
diff --git a/libavformat/segafilm.c b/libavformat/segafilm.c
index d497d7b31f..1be2c5d042 100644
--- a/libavformat/segafilm.c
+++ b/libavformat/segafilm.c
@@ -151,7 +151,7 @@ static int film_read_header(AVFormatContext *s)
if (film->video_type == AV_CODEC_ID_RAWVIDEO) {
if (scratch[20] == 24) {
- st->codec->pix_fmt = PIX_FMT_RGB24;
+ st->codec->pix_fmt = AV_PIX_FMT_RGB24;
} else {
av_log(s, AV_LOG_ERROR, "raw video is using unhandled %dbpp\n", scratch[20]);
return -1;
diff --git a/libavformat/siff.c b/libavformat/siff.c
index df94dbfdd0..26c381753b 100644
--- a/libavformat/siff.c
+++ b/libavformat/siff.c
@@ -125,7 +125,7 @@ static int siff_parse_vbv1(AVFormatContext *s, SIFFContext *c, AVIOContext *pb)
st->codec->codec_tag = MKTAG('V', 'B', 'V', '1');
st->codec->width = width;
st->codec->height = height;
- st->codec->pix_fmt = PIX_FMT_PAL8;
+ st->codec->pix_fmt = AV_PIX_FMT_PAL8;
avpriv_set_pts_info(st, 16, 1, 12);
c->cur_frame = 0;
diff --git a/libavformat/smacker.c b/libavformat/smacker.c
index cf1ab858d8..829233f032 100644
--- a/libavformat/smacker.c
+++ b/libavformat/smacker.c
@@ -161,7 +161,7 @@ static int smacker_read_header(AVFormatContext *s)
smk->videoindex = st->index;
st->codec->width = smk->width;
st->codec->height = smk->height;
- st->codec->pix_fmt = PIX_FMT_PAL8;
+ st->codec->pix_fmt = AV_PIX_FMT_PAL8;
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = AV_CODEC_ID_SMACKVIDEO;
st->codec->codec_tag = smk->magic;
diff --git a/libavformat/tmv.c b/libavformat/tmv.c
index 513436982c..acc3460cfd 100644
--- a/libavformat/tmv.c
+++ b/libavformat/tmv.c
@@ -124,7 +124,7 @@ static int tmv_read_header(AVFormatContext *s)
vst->codec->codec_type = AVMEDIA_TYPE_VIDEO;
vst->codec->codec_id = AV_CODEC_ID_TMV;
- vst->codec->pix_fmt = PIX_FMT_PAL8;
+ vst->codec->pix_fmt = AV_PIX_FMT_PAL8;
vst->codec->width = char_cols * 8;
vst->codec->height = char_rows * 8;
avpriv_set_pts_info(vst, 32, fps.den, fps.num);
diff --git a/libavformat/utils.c b/libavformat/utils.c
index 69ad761077..4399de7629 100644
--- a/libavformat/utils.c
+++ b/libavformat/utils.c
@@ -2293,7 +2293,7 @@ static int has_codec_parameters(AVStream *st, const char **errmsg_ptr)
case AVMEDIA_TYPE_VIDEO:
if (!avctx->width)
FAIL("unspecified size");
- if (st->info->found_decoder >= 0 && avctx->pix_fmt == PIX_FMT_NONE)
+ if (st->info->found_decoder >= 0 && avctx->pix_fmt == AV_PIX_FMT_NONE)
FAIL("unspecified pixel format");
break;
case AVMEDIA_TYPE_SUBTITLE:
diff --git a/libavformat/yuv4mpeg.c b/libavformat/yuv4mpeg.c
index b67209423c..cc13eafef1 100644
--- a/libavformat/yuv4mpeg.c
+++ b/libavformat/yuv4mpeg.c
@@ -58,53 +58,53 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
inter = st->codec->coded_frame->top_field_first ? 't' : 'b';
switch (st->codec->pix_fmt) {
- case PIX_FMT_GRAY8:
+ case AV_PIX_FMT_GRAY8:
colorspace = " Cmono";
break;
- case PIX_FMT_GRAY16:
+ case AV_PIX_FMT_GRAY16:
colorspace = " Cmono16";
break;
- case PIX_FMT_YUV411P:
+ case AV_PIX_FMT_YUV411P:
colorspace = " C411 XYSCSS=411";
break;
- case PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUV420P:
switch (st->codec->chroma_sample_location) {
case AVCHROMA_LOC_TOPLEFT: colorspace = " C420paldv XYSCSS=420PALDV"; break;
case AVCHROMA_LOC_LEFT: colorspace = " C420mpeg2 XYSCSS=420MPEG2"; break;
default: colorspace = " C420jpeg XYSCSS=420JPEG"; break;
}
break;
- case PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUV422P:
colorspace = " C422 XYSCSS=422";
break;
- case PIX_FMT_YUV444P:
+ case AV_PIX_FMT_YUV444P:
colorspace = " C444 XYSCSS=444";
break;
- case PIX_FMT_YUV420P9:
+ case AV_PIX_FMT_YUV420P9:
colorspace = " C420p9 XYSCSS=420P9";
break;
- case PIX_FMT_YUV422P9:
+ case AV_PIX_FMT_YUV422P9:
colorspace = " C422p9 XYSCSS=422P9";
break;
- case PIX_FMT_YUV444P9:
+ case AV_PIX_FMT_YUV444P9:
colorspace = " C444p9 XYSCSS=444P9";
break;
- case PIX_FMT_YUV420P10:
+ case AV_PIX_FMT_YUV420P10:
colorspace = " C420p10 XYSCSS=420P10";
break;
- case PIX_FMT_YUV422P10:
+ case AV_PIX_FMT_YUV422P10:
colorspace = " C422p10 XYSCSS=422P10";
break;
- case PIX_FMT_YUV444P10:
+ case AV_PIX_FMT_YUV444P10:
colorspace = " C444p10 XYSCSS=444P10";
break;
- case PIX_FMT_YUV420P16:
+ case AV_PIX_FMT_YUV420P16:
colorspace = " C420p16 XYSCSS=420P16";
break;
- case PIX_FMT_YUV422P16:
+ case AV_PIX_FMT_YUV422P16:
colorspace = " C422p16 XYSCSS=422P16";
break;
- case PIX_FMT_YUV444P16:
+ case AV_PIX_FMT_YUV444P16:
colorspace = " C444p16 XYSCSS=444P16";
break;
}
@@ -155,22 +155,22 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
ptr = picture->data[0];
switch (st->codec->pix_fmt) {
- case PIX_FMT_GRAY8:
- case PIX_FMT_YUV411P:
- case PIX_FMT_YUV420P:
- case PIX_FMT_YUV422P:
- case PIX_FMT_YUV444P:
+ case AV_PIX_FMT_GRAY8:
+ case AV_PIX_FMT_YUV411P:
+ case AV_PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUV444P:
break;
- case PIX_FMT_GRAY16:
- case PIX_FMT_YUV420P9:
- case PIX_FMT_YUV422P9:
- case PIX_FMT_YUV444P9:
- case PIX_FMT_YUV420P10:
- case PIX_FMT_YUV422P10:
- case PIX_FMT_YUV444P10:
- case PIX_FMT_YUV420P16:
- case PIX_FMT_YUV422P16:
- case PIX_FMT_YUV444P16:
+ case AV_PIX_FMT_GRAY16:
+ case AV_PIX_FMT_YUV420P9:
+ case AV_PIX_FMT_YUV422P9:
+ case AV_PIX_FMT_YUV444P9:
+ case AV_PIX_FMT_YUV420P10:
+ case AV_PIX_FMT_YUV422P10:
+ case AV_PIX_FMT_YUV444P10:
+ case AV_PIX_FMT_YUV420P16:
+ case AV_PIX_FMT_YUV422P16:
+ case AV_PIX_FMT_YUV444P16:
width *= 2;
break;
default:
@@ -184,8 +184,8 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
ptr += picture->linesize[0];
}
- if (st->codec->pix_fmt != PIX_FMT_GRAY8 &&
- st->codec->pix_fmt != PIX_FMT_GRAY16) {
+ if (st->codec->pix_fmt != AV_PIX_FMT_GRAY8 &&
+ st->codec->pix_fmt != AV_PIX_FMT_GRAY16) {
// Adjust for smaller Cb and Cr planes
avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift,
&v_chroma_shift);
@@ -222,25 +222,25 @@ static int yuv4_write_header(AVFormatContext *s)
}
switch (s->streams[0]->codec->pix_fmt) {
- case PIX_FMT_YUV411P:
+ case AV_PIX_FMT_YUV411P:
av_log(s, AV_LOG_WARNING, "Warning: generating rarely used 4:1:1 YUV "
"stream, some mjpegtools might not work.\n");
break;
- case PIX_FMT_GRAY8:
- case PIX_FMT_GRAY16:
- case PIX_FMT_YUV420P:
- case PIX_FMT_YUV422P:
- case PIX_FMT_YUV444P:
+ case AV_PIX_FMT_GRAY8:
+ case AV_PIX_FMT_GRAY16:
+ case AV_PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUV444P:
break;
- case PIX_FMT_YUV420P9:
- case PIX_FMT_YUV422P9:
- case PIX_FMT_YUV444P9:
- case PIX_FMT_YUV420P10:
- case PIX_FMT_YUV422P10:
- case PIX_FMT_YUV444P10:
- case PIX_FMT_YUV420P16:
- case PIX_FMT_YUV422P16:
- case PIX_FMT_YUV444P16:
+ case AV_PIX_FMT_YUV420P9:
+ case AV_PIX_FMT_YUV422P9:
+ case AV_PIX_FMT_YUV444P9:
+ case AV_PIX_FMT_YUV420P10:
+ case AV_PIX_FMT_YUV422P10:
+ case AV_PIX_FMT_YUV444P10:
+ case AV_PIX_FMT_YUV420P16:
+ case AV_PIX_FMT_YUV422P16:
+ case AV_PIX_FMT_YUV444P16:
if (s->streams[0]->codec->strict_std_compliance >= FF_COMPLIANCE_NORMAL) {
av_log(s, AV_LOG_ERROR, "'%s' is not a official yuv4mpegpipe pixel format. "
"Use '-strict -1' to encode to this pixel format.\n",
@@ -291,7 +291,7 @@ static int yuv4_read_header(AVFormatContext *s)
AVIOContext *pb = s->pb;
int width = -1, height = -1, raten = 0,
rated = 0, aspectn = 0, aspectd = 0;
- enum PixelFormat pix_fmt = PIX_FMT_NONE, alt_pix_fmt = PIX_FMT_NONE;
+ enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE, alt_pix_fmt = AV_PIX_FMT_NONE;
enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED;
AVStream *st;
struct frame_attributes *s1 = s->priv_data;
@@ -328,49 +328,49 @@ static int yuv4_read_header(AVFormatContext *s)
break;
case 'C': // Color space
if (strncmp("420jpeg", tokstart, 7) == 0) {
- pix_fmt = PIX_FMT_YUV420P;
+ pix_fmt = AV_PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_CENTER;
} else if (strncmp("420mpeg2", tokstart, 8) == 0) {
- pix_fmt = PIX_FMT_YUV420P;
+ pix_fmt = AV_PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_LEFT;
} else if (strncmp("420paldv", tokstart, 8) == 0) {
- pix_fmt = PIX_FMT_YUV420P;
+ pix_fmt = AV_PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
} else if (strncmp("420p16", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV420P16;
+ pix_fmt = AV_PIX_FMT_YUV420P16;
} else if (strncmp("422p16", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV422P16;
+ pix_fmt = AV_PIX_FMT_YUV422P16;
} else if (strncmp("444p16", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV444P16;
+ pix_fmt = AV_PIX_FMT_YUV444P16;
} else if (strncmp("420p10", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV420P10;
+ pix_fmt = AV_PIX_FMT_YUV420P10;
} else if (strncmp("422p10", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV422P10;
+ pix_fmt = AV_PIX_FMT_YUV422P10;
} else if (strncmp("444p10", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_YUV444P10;
+ pix_fmt = AV_PIX_FMT_YUV444P10;
} else if (strncmp("420p9", tokstart, 5) == 0) {
- pix_fmt = PIX_FMT_YUV420P9;
+ pix_fmt = AV_PIX_FMT_YUV420P9;
} else if (strncmp("422p9", tokstart, 5) == 0) {
- pix_fmt = PIX_FMT_YUV422P9;
+ pix_fmt = AV_PIX_FMT_YUV422P9;
} else if (strncmp("444p9", tokstart, 5) == 0) {
- pix_fmt = PIX_FMT_YUV444P9;
+ pix_fmt = AV_PIX_FMT_YUV444P9;
} else if (strncmp("420", tokstart, 3) == 0) {
- pix_fmt = PIX_FMT_YUV420P;
+ pix_fmt = AV_PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_CENTER;
} else if (strncmp("411", tokstart, 3) == 0) {
- pix_fmt = PIX_FMT_YUV411P;
+ pix_fmt = AV_PIX_FMT_YUV411P;
} else if (strncmp("422", tokstart, 3) == 0) {
- pix_fmt = PIX_FMT_YUV422P;
+ pix_fmt = AV_PIX_FMT_YUV422P;
} else if (strncmp("444alpha", tokstart, 8) == 0 ) {
av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 "
"YUV4MPEG stream.\n");
return -1;
} else if (strncmp("444", tokstart, 3) == 0) {
- pix_fmt = PIX_FMT_YUV444P;
+ pix_fmt = AV_PIX_FMT_YUV444P;
} else if (strncmp("mono16", tokstart, 6) == 0) {
- pix_fmt = PIX_FMT_GRAY16;
+ pix_fmt = AV_PIX_FMT_GRAY16;
} else if (strncmp("mono", tokstart, 4) == 0) {
- pix_fmt = PIX_FMT_GRAY8;
+ pix_fmt = AV_PIX_FMT_GRAY8;
} else {
av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown "
"pixel format.\n");
@@ -418,35 +418,35 @@ static int yuv4_read_header(AVFormatContext *s)
// Older nonstandard pixel format representation
tokstart += 6;
if (strncmp("420JPEG", tokstart, 7) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P;
else if (strncmp("420MPEG2", tokstart, 8) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P;
else if (strncmp("420PALDV", tokstart, 8) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P;
else if (strncmp("420P9", tokstart, 5) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P9;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P9;
else if (strncmp("422P9", tokstart, 5) == 0)
- alt_pix_fmt = PIX_FMT_YUV422P9;
+ alt_pix_fmt = AV_PIX_FMT_YUV422P9;
else if (strncmp("444P9", tokstart, 5) == 0)
- alt_pix_fmt = PIX_FMT_YUV444P9;
+ alt_pix_fmt = AV_PIX_FMT_YUV444P9;
else if (strncmp("420P10", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P10;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P10;
else if (strncmp("422P10", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV422P10;
+ alt_pix_fmt = AV_PIX_FMT_YUV422P10;
else if (strncmp("444P10", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV444P10;
+ alt_pix_fmt = AV_PIX_FMT_YUV444P10;
else if (strncmp("420P16", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV420P16;
+ alt_pix_fmt = AV_PIX_FMT_YUV420P16;
else if (strncmp("422P16", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV422P16;
+ alt_pix_fmt = AV_PIX_FMT_YUV422P16;
else if (strncmp("444P16", tokstart, 6) == 0)
- alt_pix_fmt = PIX_FMT_YUV444P16;
+ alt_pix_fmt = AV_PIX_FMT_YUV444P16;
else if (strncmp("411", tokstart, 3) == 0)
- alt_pix_fmt = PIX_FMT_YUV411P;
+ alt_pix_fmt = AV_PIX_FMT_YUV411P;
else if (strncmp("422", tokstart, 3) == 0)
- alt_pix_fmt = PIX_FMT_YUV422P;
+ alt_pix_fmt = AV_PIX_FMT_YUV422P;
else if (strncmp("444", tokstart, 3) == 0)
- alt_pix_fmt = PIX_FMT_YUV444P;
+ alt_pix_fmt = AV_PIX_FMT_YUV444P;
}
while (tokstart < header_end && *tokstart != 0x20)
tokstart++;
@@ -459,9 +459,9 @@ static int yuv4_read_header(AVFormatContext *s)
return -1;
}
- if (pix_fmt == PIX_FMT_NONE) {
- if (alt_pix_fmt == PIX_FMT_NONE)
- pix_fmt = PIX_FMT_YUV420P;
+ if (pix_fmt == AV_PIX_FMT_NONE) {
+ if (alt_pix_fmt == AV_PIX_FMT_NONE)
+ pix_fmt = AV_PIX_FMT_YUV420P;
else
pix_fmt = alt_pix_fmt;
}