921
|
1 /*
|
|
2 * Video4Linux2 grab interface
|
|
3 * Copyright (c) 2000,2001 Fabrice Bellard.
|
|
4 * Copyright (c) 2006 Luca Abeni.
|
|
5 *
|
|
6 * Part of this file is based on the V4L2 video capture example
|
|
7 * (http://v4l2spec.bytesex.org/v4l2spec/capture.c)
|
|
8 *
|
|
9 * Thanks to Michael Niedermayer for providing the mapping between
|
|
10 * V4L2_PIX_FMT_* and PIX_FMT_*
|
|
11 *
|
|
12 *
|
|
13 * This library is free software; you can redistribute it and/or
|
|
14 * modify it under the terms of the GNU Lesser General Public
|
|
15 * License as published by the Free Software Foundation; either
|
|
16 * version 2 of the License, or (at your option) any later version.
|
|
17 *
|
|
18 * This library is distributed in the hope that it will be useful,
|
|
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
21 * Lesser General Public License for more details.
|
|
22 *
|
|
23 * You should have received a copy of the GNU Lesser General Public
|
|
24 * License along with this library; if not, write to the Free Software
|
|
25 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
|
26 */
|
|
27 #include "avformat.h"
|
|
28 #include <unistd.h>
|
|
29 #include <fcntl.h>
|
|
30 #include <sys/ioctl.h>
|
|
31 #include <sys/mman.h>
|
|
32 #include <sys/time.h>
|
|
33 #define _LINUX_TIME_H 1
|
|
34 #include <linux/videodev.h>
|
|
35 #include <time.h>
|
|
36
|
|
37 static const int desired_video_buffers = 256;
|
|
38
|
|
39 enum io_method {
|
|
40 io_read,
|
|
41 io_mmap,
|
|
42 io_userptr
|
|
43 };
|
|
44
|
|
45 struct video_data {
|
|
46 int fd;
|
|
47 int frame_format; /* V4L2_PIX_FMT_* */
|
|
48 enum io_method io_method;
|
|
49 int width, height;
|
|
50 int frame_rate;
|
|
51 int frame_rate_base;
|
|
52 int frame_size;
|
|
53 int top_field_first;
|
|
54
|
|
55 int buffers;
|
|
56 void **buf_start;
|
|
57 unsigned int *buf_len;
|
|
58 };
|
|
59
|
|
60 struct fmt_map {
|
|
61 enum PixelFormat ff_fmt;
|
|
62 int32_t v4l2_fmt;
|
|
63 };
|
|
64
|
|
65 static struct fmt_map fmt_conversion_table[] = {
|
|
66 {
|
|
67 .ff_fmt = PIX_FMT_YUV420P,
|
|
68 .v4l2_fmt = V4L2_PIX_FMT_YUV420,
|
|
69 },
|
|
70 {
|
|
71 .ff_fmt = PIX_FMT_YUV422P,
|
|
72 .v4l2_fmt = V4L2_PIX_FMT_YUV422P,
|
|
73 },
|
|
74 {
|
|
75 .ff_fmt = PIX_FMT_YUV422,
|
|
76 .v4l2_fmt = V4L2_PIX_FMT_YUYV,
|
|
77 },
|
|
78 {
|
|
79 .ff_fmt = PIX_FMT_UYVY422,
|
|
80 .v4l2_fmt = V4L2_PIX_FMT_UYVY,
|
|
81 },
|
|
82 {
|
|
83 .ff_fmt = PIX_FMT_YUV411P,
|
|
84 .v4l2_fmt = V4L2_PIX_FMT_YUV411P,
|
|
85 },
|
|
86 {
|
|
87 .ff_fmt = PIX_FMT_YUV410P,
|
|
88 .v4l2_fmt = V4L2_PIX_FMT_YUV410,
|
|
89 },
|
|
90 {
|
|
91 .ff_fmt = PIX_FMT_BGR24,
|
|
92 .v4l2_fmt = V4L2_PIX_FMT_BGR24,
|
|
93 },
|
|
94 {
|
|
95 .ff_fmt = PIX_FMT_RGB24,
|
|
96 .v4l2_fmt = V4L2_PIX_FMT_RGB24,
|
|
97 },
|
|
98 /*
|
|
99 {
|
|
100 .ff_fmt = PIX_FMT_RGBA32,
|
|
101 .v4l2_fmt = V4L2_PIX_FMT_BGR32,
|
|
102 },
|
|
103 */
|
|
104 {
|
|
105 .ff_fmt = PIX_FMT_GRAY8,
|
|
106 .v4l2_fmt = V4L2_PIX_FMT_GREY,
|
|
107 },
|
|
108 };
|
|
109
|
|
110 static int device_open(const char *devname, uint32_t *capabilities)
|
|
111 {
|
|
112 struct v4l2_capability cap;
|
|
113 int fd;
|
|
114 int res;
|
|
115
|
|
116 fd = open(devname, O_RDWR /*| O_NONBLOCK*/, 0);
|
|
117 if (fd < 0) {
|
|
118 av_log(NULL, AV_LOG_ERROR, "Cannot open video device %s : %s\n",
|
|
119 devname, strerror(errno));
|
|
120
|
|
121 return -1;
|
|
122 }
|
|
123
|
|
124 res = ioctl(fd, VIDIOC_QUERYCAP, &cap);
|
|
125 if (res < 0) {
|
|
126 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
|
|
127 strerror(errno));
|
|
128
|
|
129 return -1;
|
|
130 }
|
|
131 if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
|
|
132 av_log(NULL, AV_LOG_ERROR, "Not a video capture device\n");
|
|
133
|
|
134 return -1;
|
|
135 }
|
|
136 *capabilities = cap.capabilities;
|
|
137
|
|
138 return fd;
|
|
139 }
|
|
140
|
|
141 static int device_init(int fd, int width, int height, int pix_fmt)
|
|
142 {
|
|
143 struct v4l2_format fmt;
|
|
144
|
|
145 memset(&fmt, 0, sizeof(struct v4l2_format));
|
|
146 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
147 fmt.fmt.pix.width = width;
|
|
148 fmt.fmt.pix.height = height;
|
|
149 fmt.fmt.pix.pixelformat = pix_fmt;
|
|
150 fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
|
|
151 return ioctl (fd, VIDIOC_S_FMT, &fmt);
|
|
152 }
|
|
153
|
|
154 static int first_field(int fd)
|
|
155 {
|
|
156 int res;
|
|
157 v4l2_std_id std;
|
|
158
|
|
159 res = ioctl(fd, VIDIOC_G_STD, &std);
|
|
160 if (res < 0) {
|
|
161 return 0;
|
|
162 }
|
|
163 if (std & V4L2_STD_NTSC) {
|
|
164 return 0;
|
|
165 }
|
|
166
|
|
167 return 1;
|
|
168 }
|
|
169
|
|
170 static uint32_t fmt_ff2v4l(enum PixelFormat pix_fmt)
|
|
171 {
|
|
172 int i;
|
|
173
|
|
174 for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
|
|
175 if (fmt_conversion_table[i].ff_fmt == pix_fmt) {
|
|
176 return fmt_conversion_table[i].v4l2_fmt;
|
|
177 }
|
|
178 }
|
|
179
|
|
180 return 0;
|
|
181 }
|
|
182
|
|
183 static enum PixelFormat fmt_v4l2ff(uint32_t pix_fmt)
|
|
184 {
|
|
185 int i;
|
|
186
|
|
187 for (i = 0; i < sizeof(fmt_conversion_table) / sizeof(struct fmt_map); i++) {
|
|
188 if (fmt_conversion_table[i].v4l2_fmt == pix_fmt) {
|
|
189 return fmt_conversion_table[i].ff_fmt;
|
|
190 }
|
|
191 }
|
|
192
|
|
193 return -1;
|
|
194 }
|
|
195
|
|
196 static int mmap_init(struct video_data *s)
|
|
197 {
|
|
198 struct v4l2_requestbuffers req;
|
|
199 int i, res;
|
|
200
|
|
201 memset(&req, 0, sizeof(struct v4l2_requestbuffers));
|
|
202 req.count = desired_video_buffers;
|
|
203 req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
204 req.memory = V4L2_MEMORY_MMAP;
|
|
205 res = ioctl (s->fd, VIDIOC_REQBUFS, &req);
|
|
206 if (res < 0) {
|
|
207 if (errno == EINVAL) {
|
|
208 av_log(NULL, AV_LOG_ERROR, "Device does not support mmap\n");
|
|
209 } else {
|
|
210 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS)\n");
|
|
211 }
|
|
212
|
|
213 return -1;
|
|
214 }
|
|
215
|
|
216 if (req.count < 2) {
|
|
217 av_log(NULL, AV_LOG_ERROR, "Insufficient buffer memory\n");
|
|
218
|
|
219 return -1;
|
|
220 }
|
|
221 s->buffers = req.count;
|
|
222 s->buf_start = av_malloc(sizeof(void *) * s->buffers);
|
|
223 if (s->buf_start == NULL) {
|
|
224 av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
|
|
225
|
|
226 return -1;
|
|
227 }
|
|
228 s->buf_len = av_malloc(sizeof(unsigned int) * s->buffers);
|
|
229 if (s->buf_len == NULL) {
|
|
230 av_log(NULL, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
|
|
231 av_free(s->buf_start);
|
|
232
|
|
233 return -1;
|
|
234 }
|
|
235
|
|
236 for (i = 0; i < req.count; i++) {
|
|
237 struct v4l2_buffer buf;
|
|
238
|
|
239 memset(&buf, 0, sizeof(struct v4l2_buffer));
|
|
240 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
241 buf.memory = V4L2_MEMORY_MMAP;
|
|
242 buf.index = i;
|
|
243 res = ioctl (s->fd, VIDIOC_QUERYBUF, &buf);
|
|
244 if (res < 0) {
|
|
245 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF)\n");
|
|
246
|
|
247 return -1;
|
|
248 }
|
|
249
|
|
250 s->buf_len[i] = buf.length;
|
|
251 if (s->buf_len[i] < s->frame_size) {
|
|
252 av_log(NULL, AV_LOG_ERROR, "Buffer len [%d] = %d != %d\n", i, s->buf_len[i], s->frame_size);
|
|
253
|
|
254 return -1;
|
|
255 }
|
|
256 s->buf_start[i] = mmap (NULL, buf.length,
|
|
257 PROT_READ | PROT_WRITE, MAP_SHARED, s->fd, buf.m.offset);
|
|
258 if (s->buf_start[i] == MAP_FAILED) {
|
|
259 av_log(NULL, AV_LOG_ERROR, "mmap: %s\n", strerror(errno));
|
|
260
|
|
261 return -1;
|
|
262 }
|
|
263 }
|
|
264
|
|
265 return 0;
|
|
266 }
|
|
267
|
|
268 static int read_init(struct video_data *s)
|
|
269 {
|
|
270 return -1;
|
|
271 }
|
|
272
|
|
273 static int mmap_read_frame(struct video_data *s, void *frame, int64_t *ts)
|
|
274 {
|
|
275 struct v4l2_buffer buf;
|
|
276 int res;
|
|
277
|
|
278 memset(&buf, 0, sizeof(struct v4l2_buffer));
|
|
279 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
280 buf.memory = V4L2_MEMORY_MMAP;
|
|
281
|
|
282 /* FIXME: Some special treatment might be needed in case of loss of signal... */
|
|
283 while ((res = ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 &&
|
|
284 ((errno == EAGAIN) || (errno == EINTR)));
|
|
285 if (res < 0) {
|
|
286 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n", strerror(errno));
|
|
287
|
|
288 return -1;
|
|
289 }
|
|
290 assert (buf.index < s->buffers);
|
|
291 assert(buf.bytesused == s->frame_size);
|
|
292 /* Image is at s->buff_start[buf.index] */
|
|
293 memcpy(frame, s->buf_start[buf.index], buf.bytesused);
|
|
294 *ts = buf.timestamp.tv_sec * int64_t_C(1000000) + buf.timestamp.tv_usec;
|
|
295
|
|
296 res = ioctl (s->fd, VIDIOC_QBUF, &buf);
|
|
297 if (res < 0) {
|
|
298 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF)\n");
|
|
299
|
|
300 return -1;
|
|
301 }
|
|
302
|
|
303 return s->buf_len[buf.index];
|
|
304 }
|
|
305
|
|
306 static int read_frame(struct video_data *s, void *frame, int64_t *ts)
|
|
307 {
|
|
308 return -1;
|
|
309 }
|
|
310
|
|
311 static int mmap_start(struct video_data *s)
|
|
312 {
|
|
313 enum v4l2_buf_type type;
|
|
314 int i, res;
|
|
315
|
|
316 for (i = 0; i < s->buffers; i++) {
|
|
317 struct v4l2_buffer buf;
|
|
318
|
|
319 memset(&buf, 0, sizeof(struct v4l2_buffer));
|
|
320 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
321 buf.memory = V4L2_MEMORY_MMAP;
|
|
322 buf.index = i;
|
|
323
|
|
324 res = ioctl (s->fd, VIDIOC_QBUF, &buf);
|
|
325 if (res < 0) {
|
|
326 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", strerror(errno));
|
|
327
|
|
328 return -1;
|
|
329 }
|
|
330 }
|
|
331
|
|
332 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
333 res = ioctl (s->fd, VIDIOC_STREAMON, &type);
|
|
334 if (res < 0) {
|
|
335 av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n", strerror(errno));
|
|
336
|
|
337 return -1;
|
|
338 }
|
|
339
|
|
340 return 0;
|
|
341 }
|
|
342
|
|
343 static void mmap_close(struct video_data *s)
|
|
344 {
|
|
345 enum v4l2_buf_type type;
|
|
346 int i;
|
|
347
|
|
348 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
|
349 /* We do not check for the result, because we could
|
|
350 * not do anything about it anyway...
|
|
351 */
|
|
352 ioctl(s->fd, VIDIOC_STREAMOFF, &type);
|
|
353 for (i = 0; i < s->buffers; i++) {
|
|
354 munmap(s->buf_start[i], s->buf_len[i]);
|
|
355 }
|
|
356 av_free(s->buf_start);
|
|
357 av_free(s->buf_len);
|
|
358 }
|
|
359
|
|
360 static int v4l2_read_header(AVFormatContext *s1, AVFormatParameters *ap)
|
|
361 {
|
|
362 struct video_data *s = s1->priv_data;
|
|
363 AVStream *st;
|
|
364 int width, height;
|
|
365 int res, frame_rate, frame_rate_base;
|
|
366 uint32_t desired_format, capabilities;
|
|
367 const char *video_device;
|
|
368
|
|
369 if (!ap || ap->width <= 0 || ap->height <= 0 || ap->time_base.den <= 0) {
|
|
370 av_log(s1, AV_LOG_ERROR, "Missing/Wrong parameters\n");
|
|
371
|
|
372 return -1;
|
|
373 }
|
|
374
|
|
375 width = ap->width;
|
|
376 height = ap->height;
|
|
377 frame_rate = ap->time_base.den;
|
|
378 frame_rate_base = ap->time_base.num;
|
|
379
|
|
380 if((unsigned)width > 32767 || (unsigned)height > 32767) {
|
|
381 av_log(s1, AV_LOG_ERROR, "Wrong size %dx%d\n", width, height);
|
|
382
|
|
383 return -1;
|
|
384 }
|
|
385
|
|
386 st = av_new_stream(s1, 0);
|
|
387 if (!st) {
|
|
388 return -ENOMEM;
|
|
389 }
|
|
390 av_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
|
|
391
|
|
392 s->width = width;
|
|
393 s->height = height;
|
|
394 s->frame_rate = frame_rate;
|
|
395 s->frame_rate_base = frame_rate_base;
|
|
396
|
|
397 video_device = ap->device;
|
|
398 if (!video_device) {
|
|
399 video_device = "/dev/video";
|
|
400 }
|
|
401 capabilities = 0;
|
|
402 s->fd = device_open(video_device, &capabilities);
|
|
403 if (s->fd < 0) {
|
|
404 av_free(st);
|
|
405
|
|
406 return AVERROR_IO;
|
|
407 }
|
|
408 av_log(s1, AV_LOG_ERROR, "[%d]Capabilities: %x\n", s->fd, capabilities);
|
|
409
|
|
410 desired_format = fmt_ff2v4l(ap->pix_fmt);
|
|
411 if (desired_format == 0 || (device_init(s->fd, width, height, desired_format) < 0)) {
|
|
412 int i, done;
|
|
413
|
|
414 done = 0; i = 0;
|
|
415 while (!done) {
|
|
416 desired_format = fmt_conversion_table[i].v4l2_fmt;
|
|
417 if (device_init(s->fd, width, height, desired_format) < 0) {
|
|
418 desired_format = 0;
|
|
419 i++;
|
|
420 } else {
|
|
421 done = 1;
|
|
422 }
|
|
423 if (i == sizeof(fmt_conversion_table) / sizeof(struct fmt_map)) {
|
|
424 done = 1;
|
|
425 }
|
|
426 }
|
|
427 }
|
|
428 if (desired_format == 0) {
|
|
429 av_log(s1, AV_LOG_ERROR, "Cannot find a proper format.\n");
|
|
430 close(s->fd);
|
|
431 av_free(st);
|
|
432
|
|
433 return AVERROR_IO;
|
|
434 }
|
|
435 s->frame_format = desired_format;
|
|
436
|
|
437 st->codec->pix_fmt = fmt_v4l2ff(desired_format);
|
|
438 s->frame_size = avpicture_get_size(st->codec->pix_fmt, width, height);
|
|
439 if (capabilities & V4L2_CAP_STREAMING) {
|
|
440 s->io_method = io_mmap;
|
|
441 res = mmap_init(s);
|
|
442 res = mmap_start(s);
|
|
443 } else {
|
|
444 s->io_method = io_read;
|
|
445 res = read_init(s);
|
|
446 }
|
|
447 if (res < 0) {
|
|
448 close(s->fd);
|
|
449 av_free(st);
|
|
450
|
|
451 return AVERROR_IO;
|
|
452 }
|
|
453 s->top_field_first = first_field(s->fd);
|
|
454
|
|
455 st->codec->codec_type = CODEC_TYPE_VIDEO;
|
|
456 st->codec->codec_id = CODEC_ID_RAWVIDEO;
|
|
457 st->codec->width = width;
|
|
458 st->codec->height = height;
|
|
459 st->codec->time_base.den = frame_rate;
|
|
460 st->codec->time_base.num = frame_rate_base;
|
|
461 st->codec->bit_rate = s->frame_size * 1/av_q2d(st->codec->time_base) * 8;
|
|
462
|
|
463 return 0;
|
|
464 }
|
|
465
|
|
466 static int v4l2_read_packet(AVFormatContext *s1, AVPacket *pkt)
|
|
467 {
|
|
468 struct video_data *s = s1->priv_data;
|
|
469 int res;
|
|
470
|
|
471 if (av_new_packet(pkt, s->frame_size) < 0)
|
|
472 return AVERROR_IO;
|
|
473
|
|
474 if (s->io_method == io_mmap) {
|
|
475 res = mmap_read_frame(s, pkt->data, &pkt->pts);
|
|
476 } else if (s->io_method == io_read) {
|
|
477 res = read_frame(s, pkt->data, &pkt->pts);
|
|
478 } else {
|
|
479 return AVERROR_IO;
|
|
480 }
|
|
481 if (res < 0) {
|
|
482 return AVERROR_IO;
|
|
483 }
|
|
484
|
|
485 if (s1->streams[0]->codec->coded_frame) {
|
|
486 s1->streams[0]->codec->coded_frame->interlaced_frame = 1;
|
|
487 s1->streams[0]->codec->coded_frame->top_field_first = s->top_field_first;
|
|
488 }
|
|
489
|
|
490 return s->frame_size;
|
|
491 }
|
|
492
|
|
493 static int v4l2_read_close(AVFormatContext *s1)
|
|
494 {
|
|
495 struct video_data *s = s1->priv_data;
|
|
496
|
|
497 if (s->io_method == io_mmap) {
|
|
498 mmap_close(s);
|
|
499 }
|
|
500
|
|
501 close(s->fd);
|
|
502 return 0;
|
|
503 }
|
|
504
|
|
505 static AVInputFormat v4l2_format = {
|
|
506 "video4linux2",
|
|
507 "video grab",
|
|
508 sizeof(struct video_data),
|
|
509 NULL,
|
|
510 v4l2_read_header,
|
|
511 v4l2_read_packet,
|
|
512 v4l2_read_close,
|
|
513 .flags = AVFMT_NOFILE,
|
|
514 };
|
|
515
|
|
516 int v4l2_init(void)
|
|
517 {
|
|
518 av_register_input_format(&v4l2_format);
|
|
519 return 0;
|
|
520 }
|