summaryrefslogtreecommitdiff
path: root/libavformat/img.c
diff options
context:
space:
mode:
authorFabrice Bellard <fabrice@bellard.org>2003-01-11 05:02:14 +0000
committerFabrice Bellard <fabrice@bellard.org>2003-01-11 05:02:14 +0000
commit87a0a6816f63c37fd6ea78ff8801e9f2d18e93f7 (patch)
tree7fef66f2bcce32f3d60bc8306e3c6de8df6524bf /libavformat/img.c
parentf746a0461694a8951d003395d79af8eba9dae5ed (diff)
added still image support
Originally committed as revision 1439 to svn://svn.ffmpeg.org/ffmpeg/trunk
Diffstat (limited to 'libavformat/img.c')
-rw-r--r--libavformat/img.c888
1 files changed, 143 insertions, 745 deletions
diff --git a/libavformat/img.c b/libavformat/img.c
index 6afcb4e029..70f6307d4d 100644
--- a/libavformat/img.c
+++ b/libavformat/img.c
@@ -19,22 +19,6 @@
#include <unistd.h>
#include "avformat.h"
-extern AVInputFormat pgm_iformat;
-extern AVOutputFormat pgm_oformat;
-extern AVInputFormat pgmyuv_iformat;
-extern AVOutputFormat pgmyuv_oformat;
-extern AVInputFormat ppm_iformat;
-extern AVOutputFormat ppm_oformat;
-extern AVInputFormat imgyuv_iformat;
-extern AVOutputFormat imgyuv_oformat;
-extern AVInputFormat pgmpipe_iformat;
-extern AVOutputFormat pgmpipe_oformat;
-extern AVInputFormat pgmyuvpipe_iformat;
-extern AVOutputFormat pgmyuvpipe_oformat;
-extern AVInputFormat ppmpipe_iformat;
-extern AVOutputFormat ppmpipe_oformat;
-extern AVOutputFormat yuv4mpegpipe_oformat;
-
#ifdef __MINGW32__
# include <windows.h>
# define usleep(t) Sleep((t) / 1000)
@@ -46,266 +30,45 @@ extern AVOutputFormat yuv4mpegpipe_oformat;
# endif
#endif
-#define IMGFMT_YUV 1
-#define IMGFMT_PGMYUV 2
-#define IMGFMT_PGM 3
-#define IMGFMT_PPM 4
-#define IMGFMT_YUV4MPEG 5
-
-#define Y4M_MAGIC "YUV4MPEG2"
-#define Y4M_FRAME_MAGIC "FRAME"
-#define Y4M_LINE_MAX 256
-
typedef struct {
int width;
int height;
int img_number;
int img_size;
- int img_fmt;
+ AVImageFormat *img_fmt;
+ int pix_fmt;
int is_pipe;
- int header_written;
char path[1024];
+ /* temporary usage */
+ void *ptr;
} VideoData;
int emulate_frame_rate;
-static inline int pnm_space(int c)
-{
- return (c==' ' || c=='\n' || c=='\r' || c=='\t');
-}
-
-static void pnm_get(ByteIOContext *f, char *str, int buf_size)
-{
- char *s;
- int c;
-
- do {
- c=get_byte(f);
- if (c=='#') {
- do {
- c=get_byte(f);
- } while (c!='\n');
- c=get_byte(f);
- }
- } while (pnm_space(c));
-
- s=str;
- do {
- if (url_feof(f))
- break;
- if ((s - str) < buf_size - 1)
- *s++=c;
- c=get_byte(f);
- } while (!pnm_space(c));
- *s = '\0';
-}
-
-static int pgm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size, int is_yuv)
-{
- int width, height, i;
- char buf1[32];
- UINT8 *picture[3];
-
- width = s->width;
- height = s->height;
-
- pnm_get(f, buf1, sizeof(buf1));
- if (strcmp(buf1, "P5")) {
- return -EIO;
- }
- pnm_get(f, buf1, sizeof(buf1));
- pnm_get(f, buf1, sizeof(buf1));
- pnm_get(f, buf1, sizeof(buf1));
-
- picture[0] = buf;
- picture[1] = buf + width * height;
- picture[2] = buf + width * height + (width * height / 4);
- get_buffer(f, picture[0], width * height);
-
- height>>=1;
- width>>=1;
- if (is_yuv) {
- for(i=0;i<height;i++) {
- get_buffer(f, picture[1] + i * width, width);
- get_buffer(f, picture[2] + i * width, width);
- }
- } else {
- for(i=0;i<height;i++) {
- memset(picture[1] + i * width, 128, width);
- memset(picture[2] + i * width, 128, width);
- }
- }
- return 0;
-}
-
-static int ppm_read(VideoData *s, ByteIOContext *f, UINT8 *buf, int size)
-{
- int width, height;
- char buf1[32];
- UINT8 *picture[3];
-
- width = s->width;
- height = s->height;
-
- pnm_get(f, buf1, sizeof(buf1));
- if (strcmp(buf1, "P6")) {
- return -EIO;
- }
-
- pnm_get(f, buf1, sizeof(buf1));
- pnm_get(f, buf1, sizeof(buf1));
- pnm_get(f, buf1, sizeof(buf1));
-
- picture[0] = buf;
- get_buffer(f, picture[0], width * height*3);
-
- return 0;
-
-}
-
-static int yuv_read(VideoData *s, const char *filename, UINT8 *buf, int size1)
-{
- ByteIOContext pb1, *pb = &pb1;
- char fname[1024], *p;
- int size;
-
- size = s->width * s->height;
-
- strcpy(fname, filename);
- p = strrchr(fname, '.');
- if (!p || p[1] != 'Y')
- return -EIO;
-
- if (url_fopen(pb, fname, URL_RDONLY) < 0)
- return -EIO;
-
- get_buffer(pb, buf, size);
- url_fclose(pb);
-
- p[1] = 'U';
- if (url_fopen(pb, fname, URL_RDONLY) < 0)
- return -EIO;
-
- get_buffer(pb, buf + size, size / 4);
- url_fclose(pb);
-
- p[1] = 'V';
- if (url_fopen(pb, fname, URL_RDONLY) < 0)
- return -EIO;
-
- get_buffer(pb, buf + size + (size / 4), size / 4);
- url_fclose(pb);
- return 0;
-}
-
-static int img_read_packet(AVFormatContext *s1, AVPacket *pkt)
+static int image_probe(AVProbeData *p)
{
- VideoData *s = s1->priv_data;
- char filename[1024];
- int ret;
- ByteIOContext f1, *f;
- static INT64 first_frame;
-
- if (emulate_frame_rate) {
- if (!first_frame) {
- first_frame = av_gettime();
- } else {
- INT64 pts;
- INT64 nowus;
-
- nowus = av_gettime() - first_frame;
-
- pts = ((INT64)s->img_number * FRAME_RATE_BASE * 1000000) / (s1->streams[0]->codec.frame_rate);
-
- if (pts > nowus)
- usleep(pts - nowus);
- }
- }
-
-/*
- This if-statement destroys pipes - I do not see why it is necessary
- if (get_frame_filename(filename, sizeof(filename),
- s->path, s->img_number) < 0)
- return -EIO;
-*/
- get_frame_filename(filename, sizeof(filename),
- s->path, s->img_number);
- if (!s->is_pipe) {
- f = &f1;
- if (url_fopen(f, filename, URL_RDONLY) < 0)
- return -EIO;
- } else {
- f = &s1->pb;
- if (url_feof(f))
- return -EIO;
- }
-
- av_new_packet(pkt, s->img_size);
- pkt->stream_index = 0;
-
- switch(s->img_fmt) {
- case IMGFMT_PGMYUV:
- ret = pgm_read(s, f, pkt->data, pkt->size, 1);
- break;
- case IMGFMT_PGM:
- ret = pgm_read(s, f, pkt->data, pkt->size, 0);
- break;
- case IMGFMT_YUV:
- ret = yuv_read(s, filename, pkt->data, pkt->size);
- break;
- case IMGFMT_PPM:
- ret = ppm_read(s, f, pkt->data, pkt->size);
- break;
- default:
- return -EIO;
- }
-
- if (!s->is_pipe) {
- url_fclose(f);
- }
-
- if (ret < 0) {
- av_free_packet(pkt);
- return -EIO; /* signal EOF */
- } else {
- pkt->pts = ((INT64)s->img_number * s1->pts_den * FRAME_RATE_BASE) / (s1->streams[0]->codec.frame_rate * s1->pts_num);
- s->img_number++;
+ if (filename_number_test(p->filename) >= 0)
+ return AVPROBE_SCORE_MAX;
+ else
return 0;
- }
}
-static int sizes[][2] = {
- { 640, 480 },
- { 720, 480 },
- { 720, 576 },
- { 352, 288 },
- { 352, 240 },
- { 160, 128 },
- { 512, 384 },
- { 640, 352 },
- { 640, 240 },
-};
-
-static int infer_size(int *width_ptr, int *height_ptr, int size)
+static int read_header_alloc_cb(void *opaque, AVImageInfo *info)
{
- int i;
+ VideoData *s = opaque;
- for(i=0;i<sizeof(sizes)/sizeof(sizes[0]);i++) {
- if ((sizes[i][0] * sizes[i][1]) == size) {
- *width_ptr = sizes[i][0];
- *height_ptr = sizes[i][1];
- return 0;
- }
- }
- return -1;
+ s->width = info->width;
+ s->height = info->height;
+ s->pix_fmt = info->pix_fmt;
+ /* stop image reading but no error */
+ return 1;
}
static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
{
VideoData *s = s1->priv_data;
- int i, h;
+ int i, ret;
char buf[1024];
- char buf1[32];
ByteIOContext pb1, *f = &pb1;
AVStream *st;
@@ -315,6 +78,9 @@ static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
return -ENOMEM;
}
+ if (ap && ap->image_format)
+ s->img_fmt = ap->image_format;
+
strcpy(s->path, s1->filename);
s->img_number = 0;
@@ -324,20 +90,6 @@ static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
else
s->is_pipe = 1;
- if (s1->iformat == &pgmyuvpipe_iformat ||
- s1->iformat == &pgmyuv_iformat)
- s->img_fmt = IMGFMT_PGMYUV;
- else if (s1->iformat == &pgmpipe_iformat ||
- s1->iformat == &pgm_iformat)
- s->img_fmt = IMGFMT_PGM;
- else if (s1->iformat == &imgyuv_iformat)
- s->img_fmt = IMGFMT_YUV;
- else if (s1->iformat == &ppmpipe_iformat ||
- s1->iformat == &ppm_iformat)
- s->img_fmt = IMGFMT_PPM;
- else
- goto fail;
-
if (!s->is_pipe) {
/* try to find the first image */
for(i=0;i<5;i++) {
@@ -353,43 +105,9 @@ static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
f = &s1->pb;
}
- /* find the image size */
- /* XXX: use generic file format guessing, as mpeg */
- switch(s->img_fmt) {
- case IMGFMT_PGM:
- case IMGFMT_PGMYUV:
- case IMGFMT_PPM:
- pnm_get(f, buf1, sizeof(buf1));
- pnm_get(f, buf1, sizeof(buf1));
- s->width = atoi(buf1);
- pnm_get(f, buf1, sizeof(buf1));
- h = atoi(buf1);
- if (s->img_fmt == IMGFMT_PGMYUV)
- h = (h * 2) / 3;
- s->height = h;
- if (s->width <= 0 ||
- s->height <= 0 ||
- (s->width % 2) != 0 ||
- (s->height % 2) != 0) {
- goto fail1;
- }
- break;
- case IMGFMT_YUV:
- /* infer size by using the file size. */
- {
- int img_size;
- URLContext *h;
-
- /* XXX: hack hack */
- h = url_fileno(f);
- img_size = url_seek(h, 0, SEEK_END);
- if (infer_size(&s->width, &s->height, img_size) < 0) {
- goto fail1;
- }
- }
- break;
- }
-
+ ret = av_read_image(f, s1->filename, s->img_fmt, read_header_alloc_cb, s);
+ if (ret < 0)
+ goto fail1;
if (!s->is_pipe) {
url_fclose(f);
@@ -397,18 +115,13 @@ static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
url_fseek(f, 0, SEEK_SET);
}
-
st->codec.codec_type = CODEC_TYPE_VIDEO;
st->codec.codec_id = CODEC_ID_RAWVIDEO;
st->codec.width = s->width;
st->codec.height = s->height;
- if (s->img_fmt == IMGFMT_PPM) {
- st->codec.pix_fmt = PIX_FMT_RGB24;
- s->img_size = (s->width * s->height * 3);
- } else {
- st->codec.pix_fmt = PIX_FMT_YUV420P;
- s->img_size = (s->width * s->height * 3) / 2;
- }
+ st->codec.pix_fmt = s->pix_fmt;
+ s->img_size = avpicture_get_size(s->pix_fmt, s->width, s->height);
+
if (!ap || !ap->frame_rate)
st->codec.frame_rate = 25 * FRAME_RATE_BASE;
else
@@ -423,159 +136,111 @@ static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
return -EIO;
}
-static int img_read_close(AVFormatContext *s1)
+static int read_packet_alloc_cb(void *opaque, AVImageInfo *info)
{
+ VideoData *s = opaque;
+
+ if (info->width != s->width ||
+ info->height != s->height)
+ return -1;
+ avpicture_fill(&info->pict, s->ptr, info->pix_fmt, info->width, info->height);
return 0;
}
-/******************************************************/
-/* image output */
-
-static int pgm_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int is_yuv)
+static int img_read_packet(AVFormatContext *s1, AVPacket *pkt)
{
- int i, h;
- char buf[100];
- UINT8 *ptr, *ptr1, *ptr2;
-
- h = height;
- if (is_yuv)
- h = (height * 3) / 2;
- snprintf(buf, sizeof(buf),
- "P5\n%d %d\n%d\n",
- width, h, 255);
- put_buffer(pb, buf, strlen(buf));
-
- ptr = picture->data[0];
- for(i=0;i<height;i++) {
- put_buffer(pb, ptr, width);
- ptr += picture->linesize[0];
- }
+ VideoData *s = s1->priv_data;
+ char filename[1024];
+ int ret;
+ ByteIOContext f1, *f;
+ static INT64 first_frame;
- if (is_yuv) {
- height >>= 1;
- width >>= 1;
- ptr1 = picture->data[1];
- ptr2 = picture->data[2];
- for(i=0;i<height;i++) {
- put_buffer(pb, ptr1, width);
- put_buffer(pb, ptr2, width);
- ptr1 += picture->linesize[1];
- ptr2 += picture->linesize[2];
+ if (emulate_frame_rate) {
+ if (!first_frame) {
+ first_frame = av_gettime();
+ } else {
+ INT64 pts;
+ INT64 nowus;
+
+ nowus = av_gettime() - first_frame;
+
+ pts = ((INT64)s->img_number * FRAME_RATE_BASE * 1000000) / (s1->streams[0]->codec.frame_rate);
+
+ if (pts > nowus)
+ usleep(pts - nowus);
}
}
- put_flush_packet(pb);
- return 0;
-}
-static int ppm_save(AVPicture *picture, int width, int height, ByteIOContext *pb)
-{
- int i;
- char buf[100];
- UINT8 *ptr;
+ if (!s->is_pipe) {
+ if (get_frame_filename(filename, sizeof(filename),
+ s->path, s->img_number) < 0)
+ return -EIO;
+ f = &f1;
+ if (url_fopen(f, filename, URL_RDONLY) < 0)
+ return -EIO;
+ } else {
+ f = &s1->pb;
+ if (url_feof(f))
+ return -EIO;
+ }
- snprintf(buf, sizeof(buf),
- "P6\n%d %d\n%d\n",
- width, height, 255);
- put_buffer(pb, buf, strlen(buf));
-
- ptr = picture->data[0];
- for(i=0;i<height;i++) {
- put_buffer(pb, ptr, width * 3);
- ptr += picture->linesize[0];
+ av_new_packet(pkt, s->img_size);
+ pkt->stream_index = 0;
+
+ s->ptr = pkt->data;
+ ret = av_read_image(f, filename, s->img_fmt, read_packet_alloc_cb, s);
+ if (!s->is_pipe) {
+ url_fclose(f);
}
- put_flush_packet(pb);
- return 0;
+ if (ret < 0) {
+ av_free_packet(pkt);
+ return -EIO; /* signal EOF */
+ } else {
+ pkt->pts = ((INT64)s->img_number * s1->pts_den * FRAME_RATE_BASE) / (s1->streams[0]->codec.frame_rate * s1->pts_num);
+ s->img_number++;
+ return 0;
+ }
}
-static int yuv_save(AVPicture *picture, int width, int height, const char *filename)
+static int img_read_close(AVFormatContext *s1)
{
- ByteIOContext pb1, *pb = &pb1;
- char fname[1024], *p;
- int i, j;
- UINT8 *ptr;
- static char *ext = "YUV";
-
- strcpy(fname, filename);
- p = strrchr(fname, '.');
- if (!p || p[1] != 'Y')
- return -EIO;
-
- for(i=0;i<3;i++) {
- if (i == 1) {
- width >>= 1;
- height >>= 1;
- }
-
- p[1] = ext[i];
- if (url_fopen(pb, fname, URL_WRONLY) < 0)
- return -EIO;
-
- ptr = picture->data[i];
- for(j=0;j<height;j++) {
- put_buffer(pb, ptr, width);
- ptr += picture->linesize[i];
- }
- put_flush_packet(pb);
- url_fclose(pb);
- }
return 0;
}
-static int yuv4mpeg_save(AVPicture *picture, int width, int height, ByteIOContext *pb, int need_stream_header,
- int is_yuv, int raten, int rated, int aspectn, int aspectd)
+/******************************************************/
+/* image output */
+
+static int img_set_parameters(AVFormatContext *s, AVFormatParameters *ap)
{
- int i, n, m;
- char buf[Y4M_LINE_MAX+1], buf1[20];
- UINT8 *ptr, *ptr1, *ptr2;
-
- /* construct stream header, if this is the first frame */
- if(need_stream_header) {
- n = snprintf(buf, sizeof(buf), "%s W%d H%d F%d:%d I%s A%d:%d\n",
- Y4M_MAGIC,
- width,
- height,
- raten, rated,
- "p", /* ffmpeg seems to only output progressive video */
- aspectn, aspectd);
- if (n < 0) {
- fprintf(stderr, "Error. YUV4MPEG stream header write failed.\n");
- } else {
- fprintf(stderr, "YUV4MPEG stream header written. FPS is %d\n", raten);
- put_buffer(pb, buf, strlen(buf));
- }
- }
-
- /* construct frame header */
- m = snprintf(buf1, sizeof(buf1), "%s \n", Y4M_FRAME_MAGIC);
- if (m < 0) {
- fprintf(stderr, "Error. YUV4MPEG frame header write failed.\n");
+ VideoData *img = s->priv_data;
+ AVStream *st;
+ AVImageFormat *img_fmt;
+ int i;
+
+ /* find output image format */
+ if (ap && ap->image_format) {
+ img_fmt = ap->image_format;
} else {
- /* fprintf(stderr, "YUV4MPEG frame header written.\n"); */
- put_buffer(pb, buf1, strlen(buf1));
- }
-
- ptr = picture->data[0];
- for(i=0;i<height;i++) {
- put_buffer(pb, ptr, width);
- ptr += picture->linesize[0];
+ img_fmt = guess_image_format(s->filename);
}
+ if (!img_fmt)
+ return -1;
- if (is_yuv) {
- height >>= 1;
- width >>= 1;
- ptr1 = picture->data[1];
- ptr2 = picture->data[2];
- for(i=0;i<height;i++) { /* Cb */
- put_buffer(pb, ptr1, width);
- ptr1 += picture->linesize[1];
- }
- for(i=0;i<height;i++) { /* Cr */
- put_buffer(pb, ptr2, width);
- ptr2 += picture->linesize[2];
- }
+ if (s->nb_streams != 1)
+ return -1;
+
+ st = s->streams[0];
+ /* we select the first matching format */
+ for(i=0;i<PIX_FMT_NB;i++) {
+ if (img_fmt->supported_pixel_formats & (1 << i))
+ break;
}
- put_flush_packet(pb);
+ if (i >= PIX_FMT_NB)
+ return -1;
+ img->img_fmt = img_fmt;
+ img->pix_fmt = i;
+ st->codec.pix_fmt = img->pix_fmt;
return 0;
}
@@ -592,27 +257,7 @@ static int img_write_header(AVFormatContext *s)
else
img->is_pipe = 1;
- if (s->oformat == &pgmyuvpipe_oformat ||
- s->oformat == &pgmyuv_oformat) {
- img->img_fmt = IMGFMT_PGMYUV;
- } else if (s->oformat == &pgmpipe_oformat ||
- s->oformat == &pgm_oformat) {
- img->img_fmt = IMGFMT_PGM;
- } else if (s->oformat == &imgyuv_oformat) {
- img->img_fmt = IMGFMT_YUV;
- } else if (s->oformat == &ppmpipe_oformat ||
- s->oformat == &ppm_oformat) {
- img->img_fmt = IMGFMT_PPM;
- } else if (s->oformat == &yuv4mpegpipe_oformat) {
- img->img_fmt = IMGFMT_YUV4MPEG;
- img->header_written = 0;
- } else {
- goto fail;
- }
return 0;
- fail:
- av_free(img);
- return -EIO;
}
static int img_write_packet(AVFormatContext *s, int stream_index,
@@ -621,123 +266,31 @@ static int img_write_packet(AVFormatContext *s, int stream_index,
VideoData *img = s->priv_data;
AVStream *st = s->streams[stream_index];
ByteIOContext pb1, *pb;
- AVPicture picture;
- int width, height, need_stream_header, ret, size1, raten, rated, aspectn, aspectd, fps, fps1;
+ AVPicture *picture;
+ int width, height, ret;
char filename[1024];
+ AVImageInfo info;
width = st->codec.width;
height = st->codec.height;
- if (img->img_number == 1) {
- need_stream_header = 1;
- } else {
- need_stream_header = 0;
- }
-
- fps = st->codec.frame_rate;
- fps1 = (((float)fps / FRAME_RATE_BASE) * 1000);
-
- /* Sorry about this messy code, but mpeg2enc is very picky about
- * the framerates it accepts. */
- switch(fps1) {
- case 23976:
- raten = 24000; /* turn the framerate into a ratio */
- rated = 1001;
- break;
- case 29970:
- raten = 30000;
- rated = 1001;
- break;
- case 25000:
- raten = 25;
- rated = 1;
- break;
- case 30000:
- raten = 30;
- rated = 1;
- break;
- case 24000:
- raten = 24;
- rated = 1;
- break;
- case 50000:
- raten = 50;
- rated = 1;
- break;
- case 59940:
- raten = 60000;
- rated = 1001;
- break;
- case 60000:
- raten = 60;
- rated = 1;
- break;
- default:
- raten = fps1; /* this setting should work, but often doesn't */
- rated = 1000;
- break;
- }
-
- aspectn = 1;
- aspectd = 1; /* ffmpeg always uses a 1:1 aspect ratio */
+ picture = (AVPicture *)buf;
- switch(st->codec.pix_fmt) {
- case PIX_FMT_YUV420P:
- size1 = (width * height * 3) / 2;
- if (size != size1)
- return -EIO;
-
- picture.data[0] = buf;
- picture.data[1] = picture.data[0] + width * height;
- picture.data[2] = picture.data[1] + (width * height) / 4;
- picture.linesize[0] = width;
- picture.linesize[1] = width >> 1;
- picture.linesize[2] = width >> 1;
- break;
- case PIX_FMT_RGB24:
- size1 = (width * height * 3);
- if (size != size1)
- return -EIO;
- picture.data[0] = buf;
- picture.linesize[0] = width * 3;
- break;
- default:
- return -EIO;
- }
-
-/*
- This if-statement destroys pipes - I do not see why it is necessary
- if (get_frame_filename(filename, sizeof(filename),
- img->path, img->img_number) < 0)
- return -EIO;
-*/
- get_frame_filename(filename, sizeof(filename),
- img->path, img->img_number);
if (!img->is_pipe) {
+ if (get_frame_filename(filename, sizeof(filename),
+ img->path, img->img_number) < 0)
+ return -EIO;
pb = &pb1;
if (url_fopen(pb, filename, URL_WRONLY) < 0)
return -EIO;
} else {
pb = &s->pb;
}
- switch(img->img_fmt) {
- case IMGFMT_PGMYUV:
- ret = pgm_save(&picture, width, height, pb, 1);
- break;
- case IMGFMT_PGM:
- ret = pgm_save(&picture, width, height, pb, 0);
- break;
- case IMGFMT_YUV:
- ret = yuv_save(&picture, width, height, filename);
- break;
- case IMGFMT_PPM:
- ret = ppm_save(&picture, width, height, pb);
- break;
- case IMGFMT_YUV4MPEG:
- ret = yuv4mpeg_save(&picture, width, height, pb,
- need_stream_header, 1, raten, rated, aspectn, aspectd);
- break;
- }
+ info.width = width;
+ info.height = height;
+ info.pix_fmt = st->codec.pix_fmt;
+ info.pict = *picture;
+ ret = av_write_image(pb, img->img_fmt, &info);
if (!img->is_pipe) {
url_fclose(pb);
}
@@ -751,38 +304,13 @@ static int img_write_trailer(AVFormatContext *s)
return 0;
}
-static AVInputFormat pgm_iformat = {
- "pgm",
- "pgm image format",
- sizeof(VideoData),
- NULL,
- img_read_header,
- img_read_packet,
- img_read_close,
- NULL,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER,
- .extensions = "pgm",
-};
-
-static AVOutputFormat pgm_oformat = {
- "pgm",
- "pgm image format",
- "",
- "pgm",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER,
-};
+/* input */
-static AVInputFormat pgmyuv_iformat = {
- "pgmyuv",
- "pgm with YUV content image format",
+static AVInputFormat image_iformat = {
+ "image",
+ "image sequence",
sizeof(VideoData),
- NULL, /* no probe */
+ image_probe,
img_read_header,
img_read_packet,
img_read_close,
@@ -790,77 +318,9 @@ static AVInputFormat pgmyuv_iformat = {
AVFMT_NOFILE | AVFMT_NEEDNUMBER,
};
-static AVOutputFormat pgmyuv_oformat = {
- "pgmyuv",
- "pgm with YUV content image format",
- "",
- "pgm",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER,
-};
-
-static AVInputFormat ppm_iformat = {
- "ppm",
- "ppm image format",
- sizeof(VideoData),
- NULL,
- img_read_header,
- img_read_packet,
- img_read_close,
- NULL,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
- .extensions = "ppm",
-};
-
-static AVOutputFormat ppm_oformat = {
- "ppm",
- "ppm image format",
- "",
- "ppm",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RGB24,
-};
-
-static AVInputFormat imgyuv_iformat = {
- ".Y.U.V",
- ".Y.U.V format",
- sizeof(VideoData),
- NULL,
- img_read_header,
- img_read_packet,
- img_read_close,
- NULL,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER,
- .extensions = "Y",
-};
-
-static AVOutputFormat imgyuv_oformat = {
- ".Y.U.V",
- ".Y.U.V format",
- "",
- "Y",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
- AVFMT_NOFILE | AVFMT_NEEDNUMBER,
-};
-
-static AVInputFormat pgmpipe_iformat = {
- "pgmpipe",
- "PGM pipe format",
+static AVInputFormat imagepipe_iformat = {
+ "imagepipe",
+ "piped image sequence",
sizeof(VideoData),
NULL, /* no probe */
img_read_header,
@@ -869,108 +329,46 @@ static AVInputFormat pgmpipe_iformat = {
NULL,
};
-static AVOutputFormat pgmpipe_oformat = {
- "pgmpipe",
- "PGM pipe format",
- "",
- "pgm",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
-};
-static AVInputFormat pgmyuvpipe_iformat = {
- "pgmyuvpipe",
- "PGM YUV pipe format",
- sizeof(VideoData),
- NULL, /* no probe */
- img_read_header,
- img_read_packet,
- img_read_close,
- NULL,
-};
+/* output */
-static AVOutputFormat pgmyuvpipe_oformat = {
- "pgmyuvpipe",
- "PGM YUV pipe format",
+static AVOutputFormat image_oformat = {
+ "image",
+ "image sequence",
"",
- "pgm",
- sizeof(VideoData),
- CODEC_ID_NONE,
- CODEC_ID_RAWVIDEO,
- img_write_header,
- img_write_packet,
- img_write_trailer,
-};
-
-static AVInputFormat ppmpipe_iformat = {
- "ppmpipe",
- "PPM pipe format",
- sizeof(VideoData),
- NULL, /* no probe */
- img_read_header,
- img_read_packet,
- img_read_close,
- NULL,
- .flags = AVFMT_RGB24,
-};
-
-static AVOutputFormat ppmpipe_oformat = {
- "ppmpipe",
- "PPM pipe format",
"",
- "ppm",
sizeof(VideoData),
CODEC_ID_NONE,
CODEC_ID_RAWVIDEO,
img_write_header,
img_write_packet,
img_write_trailer,
- .flags = AVFMT_RGB24,
+ AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RAWPICTURE,
+ img_set_parameters,
};
-
-static AVOutputFormat yuv4mpegpipe_oformat = {
- "yuv4mpegpipe",
- "YUV4MPEG pipe format",
+static AVOutputFormat imagepipe_oformat = {
+ "imagepipe",
+ "piped image sequence",
+ "",
"",
- "yuv4mpeg",
sizeof(VideoData),
CODEC_ID_NONE,
CODEC_ID_RAWVIDEO,
img_write_header,
img_write_packet,
img_write_trailer,
+ AVFMT_NEEDNUMBER | AVFMT_RAWPICTURE,
+ img_set_parameters,
};
-
int img_init(void)
{
- av_register_input_format(&pgm_iformat);
- av_register_output_format(&pgm_oformat);
-
- av_register_input_format(&pgmyuv_iformat);
- av_register_output_format(&pgmyuv_oformat);
-
- av_register_input_format(&ppm_iformat);
- av_register_output_format(&ppm_oformat);
-
- av_register_input_format(&imgyuv_iformat);
- av_register_output_format(&imgyuv_oformat);
-
- av_register_input_format(&pgmpipe_iformat);
- av_register_output_format(&pgmpipe_oformat);
-
- av_register_input_format(&pgmyuvpipe_iformat);
- av_register_output_format(&pgmyuvpipe_oformat);
+ av_register_input_format(&image_iformat);
+ av_register_output_format(&image_oformat);
- av_register_input_format(&ppmpipe_iformat);
- av_register_output_format(&ppmpipe_oformat);
-
- av_register_output_format(&yuv4mpegpipe_oformat);
+ av_register_input_format(&imagepipe_iformat);
+ av_register_output_format(&imagepipe_oformat);
return 0;
}