changeset 829:1c250e20c6e3 trunk

[svn] - more video demuxing removed
author nenolod
date Mon, 12 Mar 2007 15:15:54 -0700
parents c294c4700937
children 68562d99230f
files ChangeLog src/ffmpeg/Makefile src/ffmpeg/libavformat/img.c src/ffmpeg/libavformat/v4l2.c src/ffmpeg/libavformat/yuv4mpeg.c
diffstat 5 files changed, 19 insertions(+), 1353 deletions(-) [+]
line wrap: on
line diff
--- a/ChangeLog	Mon Mar 12 15:05:25 2007 -0700
+++ b/ChangeLog	Mon Mar 12 15:15:54 2007 -0700
@@ -1,3 +1,21 @@
+2007-03-12 22:05:25 +0000  William Pitcock <nenolod@sacredspiral.co.uk>
+  revision [1756]
+  - more stuff to kill
+  
+  trunk/src/ffmpeg/Makefile               |    8 
+  trunk/src/ffmpeg/libavformat/avidec.c   |  984 ----------------
+  trunk/src/ffmpeg/libavformat/avienc.c   |  580 ---------
+  trunk/src/ffmpeg/libavformat/aviobuf.c  |  131 ++
+  trunk/src/ffmpeg/libavformat/avs.c      |  227 ---
+  trunk/src/ffmpeg/libavformat/dv.c       |  451 -------
+  trunk/src/ffmpeg/libavformat/dv.h       |   37 
+  trunk/src/ffmpeg/libavformat/mov.c      | 1870 --------------------------------
+  trunk/src/ffmpeg/libavformat/movenc.c   | 1674 ----------------------------
+  trunk/src/ffmpeg/libavformat/rtpproto.c |  303 -----
+  trunk/src/ffmpeg/libavformat/voc.c      |  274 ----
+  11 files changed, 131 insertions(+), 6408 deletions(-)
+
+
 2007-03-12 21:49:42 +0000  William Pitcock <nenolod@sacredspiral.co.uk>
   revision [1754]
   - remove some more stuff
--- a/src/ffmpeg/Makefile	Mon Mar 12 15:05:25 2007 -0700
+++ b/src/ffmpeg/Makefile	Mon Mar 12 15:15:54 2007 -0700
@@ -93,7 +93,6 @@
 	libavformat/grab.c \
 	libavformat/idcin.c \
 	libavformat/idroq.c \
-	libavformat/img.c \
 	libavformat/img2.c \
 	libavformat/ipmovie.c \
 	libavformat/isom.c \
@@ -125,13 +124,11 @@
 	libavformat/tiertexseq.c \
 	libavformat/tta.c \
 	libavformat/utils.c \
-	libavformat/v4l2.c \
 	libavformat/wav.c \
 	libavformat/wc3movie.c \
 	libavformat/westwood.c \
 	libavformat/wv.c \
 	libavformat/yuv.c \
-	libavformat/yuv4mpeg.c \
 	libavutil/adler32.c \
 	libavutil/crc.c \
 	libavutil/fifo.c \
@@ -148,6 +145,6 @@
 OBJECTS = ${SOURCES:.c=.o}
 
 CFLAGS += $(PICFLAGS) $(GTK_CFLAGS) $(GLIB_CFLAGS) $(PANGO_CFLAGS) -I. -I../.. -I./libavformat \
-	-I./libavcodec -I./libavutil -DHAVE_AV_CONFIG_H
+	-I./libavcodec -I./libavutil -DHAVE_AV_CONFIG_H -DCONFIG_DEMUXERS
 
 include ../../mk/objective.mk
--- a/src/ffmpeg/libavformat/img.c	Mon Mar 12 15:05:25 2007 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,400 +0,0 @@
-/*
- * Image format
- * Copyright (c) 2000, 2001, 2002 Fabrice Bellard.
- *
- * This file is part of FFmpeg.
- *
- * FFmpeg is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2.1 of the License, or (at your option) any later version.
- *
- * FFmpeg is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with FFmpeg; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-#include "avformat.h"
-
-typedef struct {
-    int width;
-    int height;
-    int img_first;
-    int img_last;
-    int img_number;
-    int img_count;
-    int img_size;
-    AVImageFormat *img_fmt;
-    int pix_fmt;
-    int is_pipe;
-    char path[1024];
-    /* temporary usage */
-    void *ptr;
-} VideoData;
-
-
-/* return -1 if no image found */
-static int find_image_range(int *pfirst_index, int *plast_index,
-                            const char *path)
-{
-    char buf[1024];
-    int range, last_index, range1, first_index;
-
-    /* find the first image */
-    for(first_index = 0; first_index < 5; first_index++) {
-        if (av_get_frame_filename(buf, sizeof(buf), path, first_index) < 0)
-            goto fail;
-        if (url_exist(buf))
-            break;
-    }
-    if (first_index == 5)
-        goto fail;
-
-    /* find the last image */
-    last_index = first_index;
-    for(;;) {
-        range = 0;
-        for(;;) {
-            if (!range)
-                range1 = 1;
-            else
-                range1 = 2 * range;
-            if (av_get_frame_filename(buf, sizeof(buf), path,
-                                      last_index + range1) < 0)
-                goto fail;
-            if (!url_exist(buf))
-                break;
-            range = range1;
-            /* just in case... */
-            if (range >= (1 << 30))
-                goto fail;
-        }
-        /* we are sure than image last_index + range exists */
-        if (!range)
-            break;
-        last_index += range;
-    }
-    *pfirst_index = first_index;
-    *plast_index = last_index;
-    return 0;
- fail:
-    return -1;
-}
-
-
-static int image_probe(AVProbeData *p)
-{
-    if (av_filename_number_test(p->filename) && guess_image_format(p->filename))
-        return AVPROBE_SCORE_MAX-1;
-    else
-        return 0;
-}
-
-static int read_header_alloc_cb(void *opaque, AVImageInfo *info)
-{
-    VideoData *s = opaque;
-
-    s->width = info->width;
-    s->height = info->height;
-    s->pix_fmt = info->pix_fmt;
-    /* stop image reading but no error */
-    return 1;
-}
-
-static int img_read_header(AVFormatContext *s1, AVFormatParameters *ap)
-{
-    VideoData *s = s1->priv_data;
-    int ret, first_index, last_index;
-    char buf[1024];
-    ByteIOContext pb1, *f = &pb1;
-    AVStream *st;
-
-    st = av_new_stream(s1, 0);
-    if (!st) {
-        return -ENOMEM;
-    }
-
-    if (ap->image_format)
-        s->img_fmt = ap->image_format;
-
-    pstrcpy(s->path, sizeof(s->path), s1->filename);
-    s->img_number = 0;
-    s->img_count = 0;
-
-    /* find format */
-    if (s1->iformat->flags & AVFMT_NOFILE)
-        s->is_pipe = 0;
-    else
-        s->is_pipe = 1;
-
-    if (!ap->time_base.num) {
-        st->codec->time_base= (AVRational){1,25};
-    } else {
-        st->codec->time_base= ap->time_base;
-    }
-
-    if (!s->is_pipe) {
-        if (find_image_range(&first_index, &last_index, s->path) < 0)
-            goto fail;
-        s->img_first = first_index;
-        s->img_last = last_index;
-        s->img_number = first_index;
-        /* compute duration */
-        st->start_time = 0;
-        st->duration = last_index - first_index + 1;
-        if (av_get_frame_filename(buf, sizeof(buf), s->path, s->img_number) < 0)
-            goto fail;
-        if (url_fopen(f, buf, URL_RDONLY) < 0)
-            goto fail;
-    } else {
-        f = &s1->pb;
-    }
-
-    ret = av_read_image(f, s1->filename, s->img_fmt, read_header_alloc_cb, s);
-    if (ret < 0)
-        goto fail1;
-
-    if (!s->is_pipe) {
-        url_fclose(f);
-    } else {
-        url_fseek(f, 0, SEEK_SET);
-    }
-
-    st->codec->codec_type = CODEC_TYPE_VIDEO;
-    st->codec->codec_id = CODEC_ID_RAWVIDEO;
-    st->codec->width = s->width;
-    st->codec->height = s->height;
-    st->codec->pix_fmt = s->pix_fmt;
-    s->img_size = avpicture_get_size(s->pix_fmt, (s->width+15)&(~15), (s->height+15)&(~15));
-
-    return 0;
- fail1:
-    if (!s->is_pipe)
-        url_fclose(f);
- fail:
-    return AVERROR_IO;
-}
-
-static int read_packet_alloc_cb(void *opaque, AVImageInfo *info)
-{
-    VideoData *s = opaque;
-
-    if (info->width != s->width ||
-        info->height != s->height)
-        return -1;
-    avpicture_fill(&info->pict, s->ptr, info->pix_fmt, (info->width+15)&(~15), (info->height+15)&(~15));
-    return 0;
-}
-
-static int img_read_packet(AVFormatContext *s1, AVPacket *pkt)
-{
-    VideoData *s = s1->priv_data;
-    char filename[1024];
-    int ret;
-    ByteIOContext f1, *f;
-
-    if (!s->is_pipe) {
-        /* loop over input */
-        if (s1->loop_input && s->img_number > s->img_last) {
-            s->img_number = s->img_first;
-        }
-        if (av_get_frame_filename(filename, sizeof(filename),
-                                  s->path, s->img_number) < 0)
-            return AVERROR_IO;
-        f = &f1;
-        if (url_fopen(f, filename, URL_RDONLY) < 0)
-            return AVERROR_IO;
-    } else {
-        f = &s1->pb;
-        if (url_feof(f))
-            return AVERROR_IO;
-    }
-
-    av_new_packet(pkt, s->img_size);
-    pkt->stream_index = 0;
-
-    s->ptr = pkt->data;
-    ret = av_read_image(f, filename, s->img_fmt, read_packet_alloc_cb, s);
-    if (!s->is_pipe) {
-        url_fclose(f);
-    }
-
-    if (ret < 0) {
-        av_free_packet(pkt);
-        return AVERROR_IO; /* signal EOF */
-    } else {
-        /* XXX: computing this pts is not necessary as it is done in
-           the generic code too */
-        pkt->pts = av_rescale((int64_t)s->img_count * s1->streams[0]->codec->time_base.num, s1->streams[0]->time_base.den, s1->streams[0]->codec->time_base.den) / s1->streams[0]->time_base.num;
-        s->img_count++;
-        s->img_number++;
-        return 0;
-    }
-}
-
-static int img_read_close(AVFormatContext *s1)
-{
-    return 0;
-}
-
-/******************************************************/
-/* image output */
-
-static int img_set_parameters(AVFormatContext *s, AVFormatParameters *ap)
-{
-    VideoData *img = s->priv_data;
-    AVStream *st;
-    AVImageFormat *img_fmt;
-    int i;
-
-    /* find output image format */
-    if (ap->image_format) {
-        img_fmt = ap->image_format;
-    } else {
-        img_fmt = guess_image_format(s->filename);
-    }
-    if (!img_fmt)
-        return -1;
-
-    if (s->nb_streams != 1)
-        return -1;
-
-    st = s->streams[0];
-    /* we select the first matching format */
-    for(i=0;i<PIX_FMT_NB;i++) {
-        if (img_fmt->supported_pixel_formats & (1 << i))
-            break;
-    }
-    if (i >= PIX_FMT_NB)
-        return -1;
-    img->img_fmt = img_fmt;
-    img->pix_fmt = i;
-    st->codec->pix_fmt = img->pix_fmt;
-    return 0;
-}
-
-static int img_write_header(AVFormatContext *s)
-{
-    VideoData *img = s->priv_data;
-
-    img->img_number = 1;
-    pstrcpy(img->path, sizeof(img->path), s->filename);
-
-    /* find format */
-    if (s->oformat->flags & AVFMT_NOFILE)
-        img->is_pipe = 0;
-    else
-        img->is_pipe = 1;
-
-    return 0;
-}
-
-static int img_write_packet(AVFormatContext *s, AVPacket *pkt)
-{
-    VideoData *img = s->priv_data;
-    AVStream *st = s->streams[pkt->stream_index];
-    ByteIOContext pb1, *pb;
-    AVPicture *picture;
-    int width, height, ret;
-    char filename[1024];
-    AVImageInfo info;
-
-    width = st->codec->width;
-    height = st->codec->height;
-
-    picture = (AVPicture *)pkt->data;
-
-    if (!img->is_pipe) {
-        if (av_get_frame_filename(filename, sizeof(filename),
-                                  img->path, img->img_number) < 0)
-            return AVERROR_IO;
-        pb = &pb1;
-        if (url_fopen(pb, filename, URL_WRONLY) < 0)
-            return AVERROR_IO;
-    } else {
-        pb = &s->pb;
-    }
-    info.width = width;
-    info.height = height;
-    info.pix_fmt = st->codec->pix_fmt;
-    info.interleaved = 0;    /* FIXME: there should be a way to set it right */
-    info.pict = *picture;
-    ret = av_write_image(pb, img->img_fmt, &info);
-    if (!img->is_pipe) {
-        url_fclose(pb);
-    }
-
-    img->img_number++;
-    return 0;
-}
-
-static int img_write_trailer(AVFormatContext *s)
-{
-    return 0;
-}
-
-/* input */
-#ifdef CONFIG_IMAGE_DEMUXER
-AVInputFormat image_demuxer = {
-    "image",
-    "image sequence",
-    sizeof(VideoData),
-    image_probe,
-    img_read_header,
-    img_read_packet,
-    img_read_close,
-    NULL,
-    NULL,
-    AVFMT_NOFILE | AVFMT_NEEDNUMBER,
-};
-#endif
-#ifdef CONFIG_IMAGEPIPE_DEMUXER
-AVInputFormat imagepipe_demuxer = {
-    "imagepipe",
-    "piped image sequence",
-    sizeof(VideoData),
-    NULL, /* no probe */
-    img_read_header,
-    img_read_packet,
-    img_read_close,
-    NULL,
-};
-#endif
-
-/* output */
-#ifdef CONFIG_IMAGE_MUXER
-AVOutputFormat image_muxer = {
-    "image",
-    "image sequence",
-    "",
-    "",
-    sizeof(VideoData),
-    CODEC_ID_NONE,
-    CODEC_ID_RAWVIDEO,
-    img_write_header,
-    img_write_packet,
-    img_write_trailer,
-    AVFMT_NOFILE | AVFMT_NEEDNUMBER | AVFMT_RAWPICTURE,
-    img_set_parameters,
-};
-#endif
-#ifdef CONFIG_IMAGEPIPE_MUXER
-AVOutputFormat imagepipe_muxer = {
-    "imagepipe",
-    "piped image sequence",
-    "",
-    "",
-    sizeof(VideoData),
-    CODEC_ID_NONE,
-    CODEC_ID_RAWVIDEO,
-    img_write_header,
-    img_write_packet,
-    img_write_trailer,
-    AVFMT_RAWPICTURE,
-    img_set_parameters,
-};
-#endif
--- a/src/ffmpeg/libavformat/v4l2.c	Mon Mar 12 15:05:25 2007 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,541 +0,0 @@
-/*
- * Video4Linux2 grab interface
- * Copyright (c) 2000,2001 Fabrice Bellard.
- * Copyright (c) 2006 Luca Abeni.
- *
- * Part of this file is based on the V4L2 video capture example
- * (http://v4l2spec.bytesex.org/v4l2spec/capture.c)
- *
- * Thanks to Michael Niedermayer for providing the mapping between
- * V4L2_PIX_FMT_* and PIX_FMT_*
- *
- *
- * This file is part of FFmpeg.
- *
- * FFmpeg is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2.1 of the License, or (at your option) any later version.
- *
- * FFmpeg is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with FFmpeg; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-#include "avformat.h"
-#include <unistd.h>
-#include <fcntl.h>
-#include <sys/ioctl.h>
-#include <sys/mman.h>
-#include <sys/time.h>
-#include <asm/types.h>
-#include <linux/videodev2.h>
-#include <time.h>
-
-static const int desired_video_buffers = 256;
-
-enum io_method {
-    io_read,
-    io_mmap,
-    io_userptr
-};
-
-struct video_data {
-    int fd;
-    int frame_format; /* V4L2_PIX_FMT_* */
-    enum io_method io_method;
-    int width, height;
-    int frame_rate;
-    int frame_rate_base;
-    int frame_size;
-    int top_field_first;
-
-    int buffers;
-    void **buf_start;
-    unsigned int *buf_len;
-};
-
-struct fmt_map {
-    enum PixelFormat ff_fmt;
-    int32_t v4l2_fmt;
-};
-
-static struct fmt_map fmt_conversion_table[] = {
-    {
-        .ff_fmt = PIX_FMT_YUV420P,
-        .v4l2_fmt = V4L2_PIX_FMT_YUV420,
-    },
-    {
-        .ff_fmt = PIX_FMT_YUV422P,
-        .v4l2_fmt = V4L2_PIX_FMT_YUV422P,
-    },
-    {
-        .ff_fmt = PIX_FMT_YUV422,
-        .v4l2_fmt = V4L2_PIX_FMT_YUYV,
-    },
-    {
-        .ff_fmt = PIX_FMT_UYVY422,
-        .v4l2_fmt = V4L2_PIX_FMT_UYVY,
-    },
-    {
-        .ff_fmt = PIX_FMT_YUV411P,
-        .v4l2_fmt = V4L2_PIX_FMT_YUV411P,
-    },
-    {
-        .ff_fmt = PIX_FMT_YUV410P,
-        .v4l2_fmt = V4L2_PIX_FMT_YUV410,
-    },
-    {
-        .ff_fmt = PIX_FMT_BGR24,
-        .v4l2_fmt = V4L2_PIX_FMT_BGR24,
-    },
-    {
-        .ff_fmt = PIX_FMT_RGB24,
-        .v4l2_fmt = V4L2_PIX_FMT_RGB24,
-    },
-    /*
-    {
-        .ff_fmt = PIX_FMT_RGBA32,
-        .v4l2_fmt = V4L2_PIX_FMT_BGR32,
-    },
-    */
-    {
-        .ff_fmt = PIX_FMT_GRAY8,
-        .v4l2_fmt = V4L2_PIX_FMT_GREY,
-    },
-};
-
-static int device_open(const char *devname, uint32_t *capabilities)
-{
-    struct v4l2_capability cap;
-    int fd;
-    int res;
-
-    fd = open(devname, O_RDWR /*| O_NONBLOCK*/, 0);
-    if (fd < 0) {
-        av_log(NULL, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
-                 devname, strerror(errno));
-
-        return -1;
-    }
-
-    res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
-    // ENOIOCTLCMD definition only availble on __KERNEL__
-    if (res < 0 && errno == 515)
-    {
-        av_log(NULL, AV_LOG_ERROR, "QUERYCAP not implemented, probably V4L device but not supporting V4L2\n");
-        close(fd);
-
-        return -1;
-    }
-    if (res < 0) {
-        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
-                 strerror(errno));
-        close(fd);
-
-        return -1;
-    }
-    if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
-        av_log(NULL, AV_LOG_ERROR, "Not a video capture device\n");
-        close(fd);
-
-        return -1;
-    }
-    *capabilities = cap.capabilities;
-
-    return fd;
-}
-
-static int device_init(int fd, int *width, int *height, int pix_fmt)
-{
-    struct v4l2_format fmt;
-    int res;
-
-    memset(&fmt, 0, sizeof(struct v4l2_format));
-    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-    fmt.fmt.pix.width = *width;
-    fmt.fmt.pix.height = *height;
-    fmt.fmt.pix.pixelformat = pix_fmt;
-    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
-    res = ioctl(fd, VIDIOC_S_FMT, &fmt);
-    if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
-        av_log(NULL, AV_LOG_INFO, "The V4L2 driver changed the video from %dx%d to %dx%d\n", *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
-        *width = fmt.fmt.pix.width;
-        *height = fmt.fmt.pix.height;
-    }
-
-    return res;
-}
-
-static int first_field(int fd)
-{
-    int res;
-    v4l2_std_id std;
-
-    res = ioctl(fd, VIDIOC_G_STD, &std);
-    if (res < 0) {
-        return 0;
-    }
-    if (std & V4L2_STD_NTSC) {
-        return 0;
-    }
-
-    return 1;
-}
-
-static uint32_t fmt_ff2v4l(enum PixelFormat pix_fmt)
-{
-    int i;
-
-    for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
-        if (fmt_conversion_table[i].ff_fmt == pix_fmt) {
-            return fmt_conversion_table[i].v4l2_fmt;
-        }
-    }
-
-    return 0;
-}
-
-static enum PixelFormat fmt_v4l2ff(uint32_t pix_fmt)
-{
-    int i;
-
-    for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
-        if (fmt_conversion_table[i].v4l2_fmt == pix_fmt) {
-            return fmt_conversion_table[i].ff_fmt;
-        }
-    }
-
-    return -1;
-}
-
-static int mmap_init(struct video_data *s)
-{
-    struct v4l2_requestbuffers req;
-    int i, res;
-
-    memset(&req, 0, sizeof(struct v4l2_requestbuffers));
-    req.count = desired_video_buffers;
-    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-    req.memory = V4L2_MEMORY_MMAP;
-    res = ioctl (s->fd, VIDIOC_REQBUFS, &req);
-    if (res < 0) {
-        if (errno == EINVAL) {
-            av_log(NULL, AV_LOG_ERROR, "Device does not support mmap\n");
-        } else {
-            av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");
-        }
-
-        return -1;
-    }
-
-    if (req.count < 2) {
-        av_log(NULL, AV_LOG_ERROR, "Insufficient buffer memory\n");
-
-        return -1;
-    }
-    s->buffers = req.count;
-    s->buf_start = av_malloc(sizeof(void *) * s->buffers);
-    if (s->buf_start == NULL) {
-        av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
-
-        return -1;
-    }
-    s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
-    if (s->buf_len == NULL) {
-        av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
-        av_free(s->buf_start);
-
-        return -1;
-    }
-
-    for (i = 0; i < req.count; i++) {
-        struct v4l2_buffer buf;
-
-        memset(&buf, 0, sizeof(struct v4l2_buffer));
-        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-        buf.memory = V4L2_MEMORY_MMAP;
-        buf.index = i;
-        res = ioctl (s->fd, VIDIOC_QUERYBUF, &buf);
-        if (res < 0) {
-            av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
-
-            return -1;
-        }
-
-        s->buf_len[i] = buf.length;
-        if (s->buf_len[i] < s->frame_size) {
-            av_log(NULL, AV_LOG_ERROR, "Buffer len [%d] = %d != %d\n", i, s->buf_len[i], s->frame_size);
-
-            return -1;
-        }
-        s->buf_start[i] = mmap (NULL, buf.length,
-                        PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
-        if (s->buf_start[i] == MAP_FAILED) {
-            av_log(NULL, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
-
-            return -1;
-        }
-    }
-
-    return 0;
-}
-
-static int read_init(struct video_data *s)
-{
-    return -1;
-}
-
-static int mmap_read_frame(struct video_data *s, void *frame, int64_t *ts)
-{
-    struct v4l2_buffer buf;
-    int res;
-
-    memset(&buf, 0, sizeof(struct v4l2_buffer));
-    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-    buf.memory = V4L2_MEMORY_MMAP;
-
-    /* FIXME: Some special treatment might be needed in case of loss of signal... */
-    while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 &&
-           ((errno == EAGAIN) || (errno == EINTR)));
-    if (res < 0) {
-        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", strerror(errno));
-
-        return -1;
-    }
-    assert (buf.index < s->buffers);
-    if (buf.bytesused != s->frame_size) {
-        av_log(NULL, AV_LOG_ERROR, "The v4l2 frame is %d bytes, but %d bytes are expected\n", buf.bytesused, s->frame_size);
-
-        return -1;
-    }
-
-    /* Image is at s->buff_start[buf.index] */
-    memcpy(frame, s->buf_start[buf.index], buf.bytesused);
-    *ts = buf.timestamp.tv_sec * int64_t_C(1000000) + buf.timestamp.tv_usec;
-
-    res = ioctl (s->fd, VIDIOC_QBUF, &buf);
-    if (res < 0) {
-        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");
-
-        return -1;
-    }
-
-    return s->buf_len[buf.index];
-}
-
-static int read_frame(struct video_data *s, void *frame, int64_t *ts)
-{
-    return -1;
-}
-
-static int mmap_start(struct video_data *s)
-{
-    enum v4l2_buf_type type;
-    int i, res;
-
-    for (i = 0; i < s->buffers; i++) {
-        struct v4l2_buffer buf;
-
-        memset(&buf, 0, sizeof(struct v4l2_buffer));
-        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-        buf.memory = V4L2_MEMORY_MMAP;
-        buf.index  = i;
-
-        res = ioctl (s->fd, VIDIOC_QBUF, &buf);
-        if (res < 0) {
-            av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
-
-            return -1;
-        }
-    }
-
-    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-    res = ioctl (s->fd, VIDIOC_STREAMON, &type);
-    if (res < 0) {
-        av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", strerror(errno));
-
-        return -1;
-    }
-
-    return 0;
-}
-
-static void mmap_close(struct video_data *s)
-{
-    enum v4l2_buf_type type;
-    int i;
-
-    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
-    /* We do not check for the result, because we could
-     * not do anything about it anyway...
-     */
-    ioctl(s->fd, VIDIOC_STREAMOFF, &type);
-    for (i = 0; i < s->buffers; i++) {
-        munmap(s->buf_start[i], s->buf_len[i]);
-    }
-    av_free(s->buf_start);
-    av_free(s->buf_len);
-}
-
-static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
-{
-    struct video_data *s = s1->priv_data;
-    AVStream *st;
-    int width, height;
-    int res, frame_rate, frame_rate_base;
-    uint32_t desired_format, capabilities;
-    const char *video_device;
-
-    if (ap->width <= 0 || ap->height <= 0 || ap->time_base.den <= 0) {
-        av_log(s1, AV_LOG_ERROR, "Missing/Wrong parameters\n");
-
-        return -1;
-    }
-
-    width = ap->width;
-    height = ap->height;
-    frame_rate = ap->time_base.den;
-    frame_rate_base = ap->time_base.num;
-
-    if((unsigned)width > 32767 || (unsigned)height > 32767) {
-        av_log(s1, AV_LOG_ERROR, "Wrong size %dx%d\n", width, height);
-
-        return -1;
-    }
-
-    st = av_new_stream(s1, 0);
-    if (!st) {
-        return -ENOMEM;
-    }
-    av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
-
-    s->width = width;
-    s->height = height;
-    s->frame_rate      = frame_rate;
-    s->frame_rate_base = frame_rate_base;
-
-    video_device = ap->device;
-    if (!video_device) {
-        video_device = "/dev/video";
-    }
-    capabilities = 0;
-    s->fd = device_open(video_device, &capabilities);
-    if (s->fd < 0) {
-        av_free(st);
-
-        return AVERROR_IO;
-    }
-    av_log(s1, AV_LOG_INFO, "[%d]Capabilities: %x\n", s->fd, capabilities);
-
-    desired_format = fmt_ff2v4l(ap->pix_fmt);
-    if (desired_format == 0 || (device_init(s->fd, &width, &height, desired_format) < 0)) {
-        int i, done;
-
-        done = 0; i = 0;
-        while (!done) {
-            desired_format = fmt_conversion_table[i].v4l2_fmt;
-            if (device_init(s->fd, &width, &height, desired_format) < 0) {
-                desired_format = 0;
-                i++;
-            } else {
-               done = 1;
-            }
-            if (i == sizeof(fmt_conversion_table) / sizeof(struct fmt_map)) {
-               done = 1;
-            }
-        }
-    }
-    if (desired_format == 0) {
-        av_log(s1, AV_LOG_ERROR, "Cannot find a proper format.\n");
-        close(s->fd);
-        av_free(st);
-
-        return AVERROR_IO;
-    }
-    s->frame_format = desired_format;
-
-    st->codec->pix_fmt = fmt_v4l2ff(desired_format);
-    s->frame_size = avpicture_get_size(st->codec->pix_fmt, width, height);
-    if (capabilities & V4L2_CAP_STREAMING) {
-        s->io_method = io_mmap;
-        res = mmap_init(s);
-        if (res == 0) {
-            res = mmap_start(s);
-        }
-    } else {
-        s->io_method = io_read;
-        res = read_init(s);
-    }
-    if (res < 0) {
-        close(s->fd);
-        av_free(st);
-
-        return AVERROR_IO;
-    }
-    s->top_field_first = first_field(s->fd);
-
-    st->codec->codec_type = CODEC_TYPE_VIDEO;
-    st->codec->codec_id = CODEC_ID_RAWVIDEO;
-    st->codec->width = width;
-    st->codec->height = height;
-    st->codec->time_base.den = frame_rate;
-    st->codec->time_base.num = frame_rate_base;
-    st->codec->bit_rate = s->frame_size * 1/av_q2d(st->codec->time_base) * 8;
-
-    return 0;
-}
-
-static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
-{
-    struct video_data *s = s1->priv_data;
-    int res;
-
-    if (av_new_packet(pkt, s->frame_size) < 0)
-        return AVERROR_IO;
-
-    if (s->io_method == io_mmap) {
-        res = mmap_read_frame(s, pkt->data, &pkt->pts);
-    } else if (s->io_method == io_read) {
-        res = read_frame(s, pkt->data, &pkt->pts);
-    } else {
-        return AVERROR_IO;
-    }
-    if (res < 0) {
-        return AVERROR_IO;
-    }
-
-    if (s1->streams[0]->codec->coded_frame) {
-        s1->streams[0]->codec->coded_frame->interlaced_frame = 1;
-        s1->streams[0]->codec->coded_frame->top_field_first = s->top_field_first;
-    }
-
-    return s->frame_size;
-}
-
-static int v4l2_read_close(AVFormatContext *s1)
-{
-    struct video_data *s = s1->priv_data;
-
-    if (s->io_method == io_mmap) {
-        mmap_close(s);
-    }
-
-    close(s->fd);
-    return 0;
-}
-
-AVInputFormat v4l2_demuxer = {
-    "video4linux2",
-    "video grab",
-    sizeof(struct video_data),
-    NULL,
-    v4l2_read_header,
-    v4l2_read_packet,
-    v4l2_read_close,
-    .flags = AVFMT_NOFILE,
-};
--- a/src/ffmpeg/libavformat/yuv4mpeg.c	Mon Mar 12 15:05:25 2007 -0700
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,408 +0,0 @@
-/*
- * YUV4MPEG format
- * Copyright (c) 2001, 2002, 2003 Fabrice Bellard.
- *
- * This file is part of FFmpeg.
- *
- * FFmpeg is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2.1 of the License, or (at your option) any later version.
- *
- * FFmpeg is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with FFmpeg; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-#include "avformat.h"
-
-#define Y4M_MAGIC "YUV4MPEG2"
-#define Y4M_FRAME_MAGIC "FRAME"
-#define Y4M_LINE_MAX 256
-
-struct frame_attributes {
-    int interlaced_frame;
-    int top_field_first;
-};
-
-static int yuv4_generate_header(AVFormatContext *s, char* buf)
-{
-    AVStream *st;
-    int width, height;
-    int raten, rated, aspectn, aspectd, n;
-    char inter;
-    const char *colorspace = "";
-
-    st = s->streams[0];
-    width = st->codec->width;
-    height = st->codec->height;
-
-    av_reduce(&raten, &rated, st->codec->time_base.den, st->codec->time_base.num, (1UL<<31)-1);
-
-    aspectn = st->codec->sample_aspect_ratio.num;
-    aspectd = st->codec->sample_aspect_ratio.den;
-
-    if ( aspectn == 0 && aspectd == 1 ) aspectd = 0;  // 0:0 means unknown
-
-    inter = 'p'; /* progressive is the default */
-    if (st->codec->coded_frame && st->codec->coded_frame->interlaced_frame) {
-        inter = st->codec->coded_frame->top_field_first ? 't' : 'b';
-    }
-
-    switch(st->codec->pix_fmt) {
-    case PIX_FMT_GRAY8:
-        colorspace = " Cmono";
-        break;
-    case PIX_FMT_YUV411P:
-        colorspace = " C411 XYSCSS=411";
-        break;
-    case PIX_FMT_YUV420P:
-        colorspace = (st->codec->codec_id == CODEC_ID_DVVIDEO)?" C420paldv XYSCSS=420PALDV":" C420mpeg2 XYSCSS=420MPEG2";
-        break;
-    case PIX_FMT_YUV422P:
-        colorspace = " C422 XYSCSS=422";
-        break;
-    case PIX_FMT_YUV444P:
-        colorspace = " C444 XYSCSS=444";
-        break;
-    }
-
-    /* construct stream header, if this is the first frame */
-    n = snprintf(buf, Y4M_LINE_MAX, "%s W%d H%d F%d:%d I%c A%d:%d%s\n",
-                 Y4M_MAGIC,
-                 width,
-                 height,
-                 raten, rated,
-                 inter,
-                 aspectn, aspectd,
-                 colorspace);
-
-    return n;
-}
-
-static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
-{
-    AVStream *st = s->streams[pkt->stream_index];
-    ByteIOContext *pb = &s->pb;
-    AVPicture *picture;
-    int* first_pkt = s->priv_data;
-    int width, height, h_chroma_shift, v_chroma_shift;
-    int i, m;
-    char buf2[Y4M_LINE_MAX+1];
-    char buf1[20];
-    uint8_t *ptr, *ptr1, *ptr2;
-
-    picture = (AVPicture *)pkt->data;
-
-    /* for the first packet we have to output the header as well */
-    if (*first_pkt) {
-        *first_pkt = 0;
-        if (yuv4_generate_header(s, buf2) < 0) {
-            av_log(s, AV_LOG_ERROR, "Error. YUV4MPEG stream header write failed.\n");
-            return AVERROR_IO;
-        } else {
-            put_buffer(pb, buf2, strlen(buf2));
-        }
-    }
-
-    /* construct frame header */
-
-    m = snprintf(buf1, sizeof(buf1), "%s\n", Y4M_FRAME_MAGIC);
-    put_buffer(pb, buf1, strlen(buf1));
-
-    width = st->codec->width;
-    height = st->codec->height;
-
-    ptr = picture->data[0];
-    for(i=0;i<height;i++) {
-        put_buffer(pb, ptr, width);
-        ptr += picture->linesize[0];
-    }
-
-    if (st->codec->pix_fmt != PIX_FMT_GRAY8){
-    // Adjust for smaller Cb and Cr planes
-    avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift, &v_chroma_shift);
-    width >>= h_chroma_shift;
-    height >>= v_chroma_shift;
-
-    ptr1 = picture->data[1];
-    ptr2 = picture->data[2];
-    for(i=0;i<height;i++) {     /* Cb */
-        put_buffer(pb, ptr1, width);
-        ptr1 += picture->linesize[1];
-    }
-    for(i=0;i<height;i++) {     /* Cr */
-        put_buffer(pb, ptr2, width);
-            ptr2 += picture->linesize[2];
-    }
-    }
-    put_flush_packet(pb);
-    return 0;
-}
-
-static int yuv4_write_header(AVFormatContext *s)
-{
-    int* first_pkt = s->priv_data;
-
-    if (s->nb_streams != 1)
-        return AVERROR_IO;
-
-    if (s->streams[0]->codec->pix_fmt == PIX_FMT_YUV411P) {
-        av_log(s, AV_LOG_ERROR, "Warning: generating rarely used 4:1:1 YUV stream, some mjpegtools might not work.\n");
-    }
-    else if ((s->streams[0]->codec->pix_fmt != PIX_FMT_YUV420P) &&
-             (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV422P) &&
-             (s->streams[0]->codec->pix_fmt != PIX_FMT_GRAY8) &&
-             (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV444P)) {
-        av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg only handles yuv444p, yuv422p, yuv420p, yuv411p and gray pixel formats. Use -pix_fmt to select one.\n");
-        return AVERROR_IO;
-    }
-
-    *first_pkt = 1;
-    return 0;
-}
-
-static int yuv4_write_trailer(AVFormatContext *s)
-{
-    return 0;
-}
-
-#ifdef CONFIG_YUV4MPEGPIPE_MUXER
-AVOutputFormat yuv4mpegpipe_muxer = {
-    "yuv4mpegpipe",
-    "YUV4MPEG pipe format",
-    "",
-    "y4m",
-    sizeof(int),
-    CODEC_ID_NONE,
-    CODEC_ID_RAWVIDEO,
-    yuv4_write_header,
-    yuv4_write_packet,
-    yuv4_write_trailer,
-    .flags = AVFMT_RAWPICTURE,
-};
-#endif
-
-/* Header size increased to allow room for optional flags */
-#define MAX_YUV4_HEADER 80
-#define MAX_FRAME_HEADER 80
-
-static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
-{
-    char header[MAX_YUV4_HEADER+10];  // Include headroom for the longest option
-    char *tokstart,*tokend,*header_end;
-    int i;
-    ByteIOContext *pb = &s->pb;
-    int width=-1, height=-1, raten=0, rated=0, aspectn=0, aspectd=0;
-    enum PixelFormat pix_fmt=PIX_FMT_NONE,alt_pix_fmt=PIX_FMT_NONE;
-    AVStream *st;
-    struct frame_attributes *s1 = s->priv_data;
-
-    for (i=0; i<MAX_YUV4_HEADER; i++) {
-        header[i] = get_byte(pb);
-        if (header[i] == '\n') {
-            header[i+1] = 0x20;  // Add a space after last option. Makes parsing "444" vs "444alpha" easier.
-            header[i+2] = 0;
-            break;
-        }
-    }
-    if (i == MAX_YUV4_HEADER) return -1;
-    if (strncmp(header, Y4M_MAGIC, strlen(Y4M_MAGIC))) return -1;
-
-    s1->interlaced_frame = 0;
-    s1->top_field_first = 0;
-    header_end = &header[i+1]; // Include space
-    for(tokstart = &header[strlen(Y4M_MAGIC) + 1]; tokstart < header_end; tokstart++) {
-        if (*tokstart==0x20) continue;
-        switch (*tokstart++) {
-        case 'W': // Width. Required.
-            width = strtol(tokstart, &tokend, 10);
-            tokstart=tokend;
-            break;
-        case 'H': // Height. Required.
-            height = strtol(tokstart, &tokend, 10);
-            tokstart=tokend;
-            break;
-        case 'C': // Color space
-            if (strncmp("420jpeg",tokstart,7)==0)
-                pix_fmt = PIX_FMT_YUV420P;
-            else if (strncmp("420mpeg2",tokstart,8)==0)
-                pix_fmt = PIX_FMT_YUV420P;
-            else if (strncmp("420paldv", tokstart, 8)==0)
-                pix_fmt = PIX_FMT_YUV420P;
-            else if (strncmp("411", tokstart, 3)==0)
-                pix_fmt = PIX_FMT_YUV411P;
-            else if (strncmp("422", tokstart, 3)==0)
-                pix_fmt = PIX_FMT_YUV422P;
-            else if (strncmp("444alpha", tokstart, 8)==0) {
-                av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 YUV4MPEG stream.\n");
-                return -1;
-            } else if (strncmp("444", tokstart, 3)==0)
-                pix_fmt = PIX_FMT_YUV444P;
-            else if (strncmp("mono",tokstart, 4)==0) {
-                pix_fmt = PIX_FMT_GRAY8;
-            } else {
-                av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown pixel format.\n");
-                return -1;
-            }
-            while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
-            break;
-        case 'I': // Interlace type
-            switch (*tokstart++){
-            case '?':
-                break;
-            case 'p':
-                s1->interlaced_frame=0;
-                break;
-            case 't':
-                s1->interlaced_frame=1;
-                s1->top_field_first=1;
-                break;
-            case 'b':
-                s1->interlaced_frame=1;
-                s1->top_field_first=0;
-                break;
-            case 'm':
-                av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains mixed interlaced and non-interlaced frames.\n");
-                return -1;
-            default:
-                av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
-                return -1;
-            }
-            break;
-        case 'F': // Frame rate
-            sscanf(tokstart,"%d:%d",&raten,&rated); // 0:0 if unknown
-            while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
-            break;
-        case 'A': // Pixel aspect
-            sscanf(tokstart,"%d:%d",&aspectn,&aspectd); // 0:0 if unknown
-            while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
-            break;
-        case 'X': // Vendor extensions
-            if (strncmp("YSCSS=",tokstart,6)==0) {
-                // Older nonstandard pixel format representation
-                tokstart+=6;
-                if (strncmp("420JPEG",tokstart,7)==0)
-                    alt_pix_fmt=PIX_FMT_YUV420P;
-                else if (strncmp("420MPEG2",tokstart,8)==0)
-                    alt_pix_fmt=PIX_FMT_YUV420P;
-                else if (strncmp("420PALDV",tokstart,8)==0)
-                    alt_pix_fmt=PIX_FMT_YUV420P;
-                else if (strncmp("411",tokstart,3)==0)
-                    alt_pix_fmt=PIX_FMT_YUV411P;
-                else if (strncmp("422",tokstart,3)==0)
-                    alt_pix_fmt=PIX_FMT_YUV422P;
-                else if (strncmp("444",tokstart,3)==0)
-                    alt_pix_fmt=PIX_FMT_YUV444P;
-            }
-            while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
-            break;
-        }
-    }
-
-    if ((width == -1) || (height == -1)) {
-        av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
-        return -1;
-    }
-
-    if (pix_fmt == PIX_FMT_NONE) {
-        if (alt_pix_fmt == PIX_FMT_NONE)
-            pix_fmt = PIX_FMT_YUV420P;
-        else
-            pix_fmt = alt_pix_fmt;
-    }
-
-    if (raten == 0 && rated == 0) {
-        // Frame rate unknown
-        raten = 25;
-        rated = 1;
-    }
-
-    if (aspectn == 0 && aspectd == 0) {
-        // Pixel aspect unknown
-        aspectd = 1;
-    }
-
-    st = av_new_stream(s, 0);
-    st = s->streams[0];
-    st->codec->width = width;
-    st->codec->height = height;
-    av_reduce(&raten, &rated, raten, rated, (1UL<<31)-1);
-    av_set_pts_info(st, 64, rated, raten);
-    st->codec->pix_fmt = pix_fmt;
-    st->codec->codec_type = CODEC_TYPE_VIDEO;
-    st->codec->codec_id = CODEC_ID_RAWVIDEO;
-    st->codec->sample_aspect_ratio= (AVRational){aspectn, aspectd};
-
-    return 0;
-}
-
-static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
-{
-    int i;
-    char header[MAX_FRAME_HEADER+1];
-    int packet_size, width, height;
-    AVStream *st = s->streams[0];
-    struct frame_attributes *s1 = s->priv_data;
-
-    for (i=0; i<MAX_FRAME_HEADER; i++) {
-        header[i] = get_byte(&s->pb);
-        if (header[i] == '\n') {
-            header[i+1] = 0;
-            break;
-        }
-    }
-    if (i == MAX_FRAME_HEADER) return -1;
-    if (strncmp(header, Y4M_FRAME_MAGIC, strlen(Y4M_FRAME_MAGIC))) return -1;
-
-    width = st->codec->width;
-    height = st->codec->height;
-
-    packet_size = avpicture_get_size(st->codec->pix_fmt, width, height);
-    if (packet_size < 0)
-        return -1;
-
-    if (av_get_packet(&s->pb, pkt, packet_size) != packet_size)
-        return AVERROR_IO;
-
-    if (s->streams[0]->codec->coded_frame) {
-        s->streams[0]->codec->coded_frame->interlaced_frame = s1->interlaced_frame;
-        s->streams[0]->codec->coded_frame->top_field_first = s1->top_field_first;
-    }
-
-    pkt->stream_index = 0;
-    return 0;
-}
-
-static int yuv4_read_close(AVFormatContext *s)
-{
-    return 0;
-}
-
-static int yuv4_probe(AVProbeData *pd)
-{
-    /* check file header */
-    if (pd->buf_size <= sizeof(Y4M_MAGIC))
-        return 0;
-    if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC)-1)==0)
-        return AVPROBE_SCORE_MAX;
-    else
-        return 0;
-}
-
-#ifdef CONFIG_YUV4MPEGPIPE_DEMUXER
-AVInputFormat yuv4mpegpipe_demuxer = {
-    "yuv4mpegpipe",
-    "YUV4MPEG pipe format",
-    sizeof(struct frame_attributes),
-    yuv4_probe,
-    yuv4_read_header,
-    yuv4_read_packet,
-    yuv4_read_close,
-    .extensions = "y4m"
-};
-#endif