1 /*
2 * DirectShow capture services (QCAP.DLL)
3 *
4 * Copyright 2005 Maarten Lankhorst
5 *
6 * This file contains the part of the vfw capture interface that
7 * does the actual Video4Linux(1/2) stuff required for capturing
8 * and setting/getting media format..
9 *
10 * This library is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
13 * version 2.1 of the License, or (at your option) any later version.
14 *
15 * This library is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
19 *
20 * You should have received a copy of the GNU Lesser General Public
21 * License along with this library; if not, write to the Free Software
22 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
23 */
24
25 #include "config.h"
26 #include "wine/port.h"
27
28 #define COBJMACROS
29
30 #include <stdarg.h>
31 #include <stdio.h>
32 #include <fcntl.h>
33 #ifdef HAVE_SYS_IOCTL_H
34 #include <sys/ioctl.h>
35 #endif
36 #ifdef HAVE_SYS_MMAN_H
37 #include <sys/mman.h>
38 #endif
39 #include <errno.h>
40 #ifdef HAVE_SYS_TIME_H
41 #include <sys/time.h>
42 #endif
43 #ifdef HAVE_ASM_TYPES_H
44 #include <asm/types.h>
45 #endif
46 #ifdef HAVE_LIBV4L1_H
47 #include <libv4l1.h>
48 #endif
49 #ifdef HAVE_LINUX_VIDEODEV_H
50 #include <linux/videodev.h>
51 #endif
52 #ifdef HAVE_UNISTD_H
53 #include <unistd.h>
54 #endif
55
56 #include "windef.h"
57 #include "winbase.h"
58 #include "wtypes.h"
59 #include "wingdi.h"
60 #include "winuser.h"
61 #include "dshow.h"
62 #include "vfwmsgs.h"
63 #include "amvideo.h"
64 #include "wine/debug.h"
65 #include "wine/library.h"
66
67 #include "capture.h"
68 #include "qcap_main.h"
69
70 WINE_DEFAULT_DEBUG_CHANNEL(qcap_v4l);
71
72 #ifdef VIDIOCMCAPTURE
73
74 static typeof(open) *video_open = open;
75 static typeof(close) *video_close = close;
76 static typeof(ioctl) *video_ioctl = ioctl;
77 static typeof(read) *video_read = read;
78 static typeof(mmap) *video_mmap = mmap;
79 static typeof(munmap) *video_munmap = munmap;
80
video_init(void)81 static void video_init(void)
82 {
83 #ifdef SONAME_LIBV4L1
84 static void *video_lib;
85
86 if (video_lib)
87 return;
88 video_lib = wine_dlopen(SONAME_LIBV4L1, RTLD_NOW, NULL, 0);
89 if (!video_lib)
90 return;
91 video_open = wine_dlsym(video_lib, "v4l1_open", NULL, 0);
92 video_close = wine_dlsym(video_lib, "v4l1_close", NULL, 0);
93 video_ioctl = wine_dlsym(video_lib, "v4l1_ioctl", NULL, 0);
94 video_read = wine_dlsym(video_lib, "v4l1_read", NULL, 0);
95 video_mmap = wine_dlsym(video_lib, "v4l1_mmap", NULL, 0);
96 video_munmap = wine_dlsym(video_lib, "v4l1_munmap", NULL, 0);
97 #endif
98 }
99
100 typedef void (* Renderer)(const Capture *, LPBYTE bufferin, const BYTE *stream);
101
102 struct _Capture
103 {
104 UINT width, height, bitDepth, fps, outputwidth, outputheight;
105 BOOL swresize;
106
107 CRITICAL_SECTION CritSect;
108
109 IPin *pOut;
110 int fd, mmap;
111 BOOL iscommitted, stopped;
112 struct video_picture pict;
113 int dbrightness, dhue, dcolour, dcontrast;
114
115 /* mmap (V4l1) */
116 struct video_mmap *grab_buf;
117 struct video_mbuf gb_buffers;
118 unsigned char *pmap;
119 int buffers;
120
121 /* read (V4l1) */
122 int imagesize;
123 char * grab_data;
124
125 int curframe;
126
127 HANDLE thread;
128 Renderer renderer;
129 };
130
131 struct renderlist
132 {
133 int depth;
134 const char* name;
135 Renderer renderer;
136 };
137
138 static void renderer_RGB(const Capture *capBox, LPBYTE bufferin, const BYTE *stream);
139 static void renderer_YUV(const Capture *capBox, LPBYTE bufferin, const BYTE *stream);
140
141 static const struct renderlist renderlist_V4l[] = {
142 { 0, "NULL renderer", NULL },
143 { 8, "Gray scales", NULL }, /* 1, Don't support */
144 { 0, "High 240 cube (BT848)", NULL }, /* 2, Don't support */
145 { 16, "16 bit RGB (565)", NULL }, /* 3, Don't support */
146 { 24, "24 bit RGB values", renderer_RGB }, /* 4, Supported, */
147 { 32, "32 bit RGB values", renderer_RGB }, /* 5, Supported */
148 { 16, "15 bit RGB (555)", NULL }, /* 6, Don't support */
149 { 16, "YUV 422 (Not P)", renderer_YUV }, /* 7, Supported */
150 { 16, "YUYV (Not P)", renderer_YUV }, /* 8, Supported */
151 { 16, "UYVY (Not P)", renderer_YUV }, /* 9, Supported */
152 { 16, "YUV 420 (Not P)", NULL }, /* 10, Not supported, if I had to guess it's YYUYYV */
153 { 12, "YUV 411 (Not P)", renderer_YUV }, /* 11, Supported */
154 { 0, "Raw capturing (BT848)", NULL }, /* 12, Don't support */
155 { 16, "YUV 422 (Planar)", renderer_YUV }, /* 13, Supported */
156 { 12, "YUV 411 (Planar)", renderer_YUV }, /* 14, Supported */
157 { 12, "YUV 420 (Planar)", renderer_YUV }, /* 15, Supported */
158 { 10, "YUV 410 (Planar)", renderer_YUV }, /* 16, Supported */
159 /* FIXME: add YUV420 support */
160 { 0, NULL, NULL },
161 };
162
163 static const int fallback_V4l[] = { 4, 5, 7, 8, 9, 13, 15, 14, 16, 11, -1 };
164 /* Fallback: First try raw formats (Should try yuv first perhaps?), then yuv */
165
166 /* static const Capture defbox; */
167
xioctl(int fd,int request,void * arg)168 static int xioctl(int fd, int request, void * arg)
169 {
170 int r;
171
172 do {
173 r = video_ioctl (fd, request, arg);
174 } while (-1 == r && EINTR == errno);
175
176 return r;
177 }
178
179 /* Prepare the capture buffers */
V4l_Prepare(Capture * capBox)180 static HRESULT V4l_Prepare(Capture *capBox)
181 {
182 TRACE("%p: Preparing for %dx%d resolution\n", capBox, capBox->width, capBox->height);
183
184 /* Try mmap */
185 capBox->mmap = 0;
186 if (xioctl(capBox->fd, VIDIOCGMBUF, &capBox->gb_buffers) != -1 &&
187 capBox->gb_buffers.frames)
188 {
189 capBox->buffers = capBox->gb_buffers.frames;
190 if (capBox->gb_buffers.frames > 1)
191 capBox->buffers = 1;
192 TRACE("%p: Using %d/%d buffers\n", capBox,
193 capBox->buffers, capBox->gb_buffers.frames);
194
195 capBox->pmap = video_mmap( 0, capBox->gb_buffers.size, PROT_READ|PROT_WRITE,
196 MAP_SHARED, capBox->fd, 0 );
197 if (capBox->pmap != MAP_FAILED)
198 {
199 int i;
200
201 capBox->grab_buf = CoTaskMemAlloc(sizeof(struct video_mmap) * capBox->buffers);
202 if (!capBox->grab_buf)
203 {
204 video_munmap(capBox->pmap, capBox->gb_buffers.size);
205 return E_OUTOFMEMORY;
206 }
207
208 /* Setup mmap capture buffers. */
209 for (i = 0; i < capBox->buffers; i++)
210 {
211 capBox->grab_buf[i].format = capBox->pict.palette;
212 capBox->grab_buf[i].frame = i;
213 capBox->grab_buf[i].width = capBox->width;
214 capBox->grab_buf[i].height = capBox->height;
215 }
216 capBox->mmap = 1;
217 }
218 }
219 if (!capBox->mmap)
220 {
221 capBox->buffers = 1;
222 capBox->imagesize = renderlist_V4l[capBox->pict.palette].depth *
223 capBox->height * capBox->width / 8;
224 capBox->grab_data = CoTaskMemAlloc(capBox->imagesize);
225 if (!capBox->grab_data)
226 return E_OUTOFMEMORY;
227 }
228 TRACE("Using mmap: %d\n", capBox->mmap);
229 return S_OK;
230 }
231
V4l_Unprepare(Capture * capBox)232 static void V4l_Unprepare(Capture *capBox)
233 {
234 if (capBox->mmap)
235 {
236 for (capBox->curframe = 0; capBox->curframe < capBox->buffers; capBox->curframe++)
237 xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]);
238 video_munmap(capBox->pmap, capBox->gb_buffers.size);
239 CoTaskMemFree(capBox->grab_buf);
240 }
241 else
242 CoTaskMemFree(capBox->grab_data);
243 }
244
qcap_driver_destroy(Capture * capBox)245 HRESULT qcap_driver_destroy(Capture *capBox)
246 {
247 TRACE("%p\n", capBox);
248
249 if( capBox->fd != -1 )
250 video_close(capBox->fd);
251 capBox->CritSect.DebugInfo->Spare[0] = 0;
252 DeleteCriticalSection(&capBox->CritSect);
253 CoTaskMemFree(capBox);
254 return S_OK;
255 }
256
qcap_driver_set_format(Capture * capBox,AM_MEDIA_TYPE * mT)257 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
258 {
259 int newheight, newwidth;
260 struct video_window window;
261 VIDEOINFOHEADER *format;
262
263 TRACE("%p\n", capBox);
264
265 format = (VIDEOINFOHEADER *) mT->pbFormat;
266 if (format->bmiHeader.biBitCount != 24 ||
267 format->bmiHeader.biCompression != BI_RGB)
268 {
269 FIXME("unsupported media type %d %d\n", format->bmiHeader.biBitCount,
270 format->bmiHeader.biCompression );
271 return VFW_E_INVALIDMEDIATYPE;
272 }
273
274 newwidth = format->bmiHeader.biWidth;
275 newheight = format->bmiHeader.biHeight;
276
277 TRACE("%p -> (%p) - %d %d\n", capBox, mT, newwidth, newheight);
278
279 if (capBox->height == newheight && capBox->width == newwidth)
280 return S_OK;
281
282 if(-1 == xioctl(capBox->fd, VIDIOCGWIN, &window))
283 {
284 ERR("ioctl(VIDIOCGWIN) failed (%d)\n", errno);
285 return E_FAIL;
286 }
287 window.width = newwidth;
288 window.height = newheight;
289 if (xioctl(capBox->fd, VIDIOCSWIN, &window) == -1)
290 {
291 TRACE("using software resize: %dx%d -> %dx%d\n",
292 window.width, window.height, capBox->width, capBox->height);
293 capBox->swresize = TRUE;
294 }
295 else
296 {
297 capBox->height = window.height;
298 capBox->width = window.width;
299 capBox->swresize = FALSE;
300 }
301 capBox->outputwidth = window.width;
302 capBox->outputheight = window.height;
303 return S_OK;
304 }
305
qcap_driver_get_format(const Capture * capBox,AM_MEDIA_TYPE ** mT)306 HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
307 {
308 VIDEOINFOHEADER *vi;
309
310 mT[0] = CoTaskMemAlloc(sizeof(AM_MEDIA_TYPE));
311 if (!mT[0])
312 return E_OUTOFMEMORY;
313 vi = CoTaskMemAlloc(sizeof(VIDEOINFOHEADER));
314 mT[0]->cbFormat = sizeof(VIDEOINFOHEADER);
315 if (!vi)
316 {
317 CoTaskMemFree(mT[0]);
318 mT[0] = NULL;
319 return E_OUTOFMEMORY;
320 }
321 mT[0]->majortype = MEDIATYPE_Video;
322 mT[0]->subtype = MEDIASUBTYPE_RGB24;
323 mT[0]->formattype = FORMAT_VideoInfo;
324 mT[0]->bFixedSizeSamples = TRUE;
325 mT[0]->bTemporalCompression = FALSE;
326 mT[0]->pUnk = NULL;
327 mT[0]->lSampleSize = capBox->outputwidth * capBox->outputheight * capBox->bitDepth / 8;
328 TRACE("Output format: %dx%d - %d bits = %u KB\n", capBox->outputwidth,
329 capBox->outputheight, capBox->bitDepth, mT[0]->lSampleSize/1024);
330 vi->rcSource.left = 0; vi->rcSource.top = 0;
331 vi->rcTarget.left = 0; vi->rcTarget.top = 0;
332 vi->rcSource.right = capBox->width; vi->rcSource.bottom = capBox->height;
333 vi->rcTarget.right = capBox->outputwidth; vi->rcTarget.bottom = capBox->outputheight;
334 vi->dwBitRate = capBox->fps * mT[0]->lSampleSize;
335 vi->dwBitErrorRate = 0;
336 vi->AvgTimePerFrame = (LONGLONG)10000000.0 / (LONGLONG)capBox->fps;
337 vi->bmiHeader.biSize = 40;
338 vi->bmiHeader.biWidth = capBox->outputwidth;
339 vi->bmiHeader.biHeight = capBox->outputheight;
340 vi->bmiHeader.biPlanes = 1;
341 vi->bmiHeader.biBitCount = 24;
342 vi->bmiHeader.biCompression = BI_RGB;
343 vi->bmiHeader.biSizeImage = mT[0]->lSampleSize;
344 vi->bmiHeader.biClrUsed = vi->bmiHeader.biClrImportant = 0;
345 vi->bmiHeader.biXPelsPerMeter = 100;
346 vi->bmiHeader.biYPelsPerMeter = 100;
347 mT[0]->pbFormat = (void *)vi;
348 dump_AM_MEDIA_TYPE(mT[0]);
349 return S_OK;
350 }
351
qcap_driver_get_prop_range(Capture * capBox,VideoProcAmpProperty Property,LONG * pMin,LONG * pMax,LONG * pSteppingDelta,LONG * pDefault,LONG * pCapsFlags)352 HRESULT qcap_driver_get_prop_range( Capture *capBox,
353 VideoProcAmpProperty Property, LONG *pMin, LONG *pMax,
354 LONG *pSteppingDelta, LONG *pDefault, LONG *pCapsFlags )
355 {
356 TRACE("%p -> %d %p %p %p %p %p\n", capBox, Property,
357 pMin, pMax, pSteppingDelta, pDefault, pCapsFlags);
358
359 switch (Property)
360 {
361 case VideoProcAmp_Brightness:
362 *pDefault = capBox->dbrightness;
363 break;
364 case VideoProcAmp_Contrast:
365 *pDefault = capBox->dcontrast;
366 break;
367 case VideoProcAmp_Hue:
368 *pDefault = capBox->dhue;
369 break;
370 case VideoProcAmp_Saturation:
371 *pDefault = capBox->dcolour;
372 break;
373 default:
374 FIXME("Not implemented %d\n", Property);
375 return E_NOTIMPL;
376 }
377 *pMin = 0;
378 *pMax = 65535;
379 *pSteppingDelta = 65536/256;
380 *pCapsFlags = VideoProcAmp_Flags_Manual;
381 return S_OK;
382 }
383
qcap_driver_get_prop(Capture * capBox,VideoProcAmpProperty Property,LONG * lValue,LONG * Flags)384 HRESULT qcap_driver_get_prop( Capture *capBox,
385 VideoProcAmpProperty Property, LONG *lValue, LONG *Flags )
386 {
387 TRACE("%p -> %d %p %p\n", capBox, Property, lValue, Flags);
388
389 switch (Property)
390 {
391 case VideoProcAmp_Brightness:
392 *lValue = capBox->pict.brightness;
393 break;
394 case VideoProcAmp_Contrast:
395 *lValue = capBox->pict.contrast;
396 break;
397 case VideoProcAmp_Hue:
398 *lValue = capBox->pict.hue;
399 break;
400 case VideoProcAmp_Saturation:
401 *lValue = capBox->pict.colour;
402 break;
403 default:
404 FIXME("Not implemented %d\n", Property);
405 return E_NOTIMPL;
406 }
407 *Flags = VideoProcAmp_Flags_Manual;
408 return S_OK;
409 }
410
qcap_driver_set_prop(Capture * capBox,VideoProcAmpProperty Property,LONG lValue,LONG Flags)411 HRESULT qcap_driver_set_prop(Capture *capBox, VideoProcAmpProperty Property,
412 LONG lValue, LONG Flags)
413 {
414 TRACE("%p -> %d %d %d\n", capBox, Property, lValue, Flags);
415
416 switch (Property)
417 {
418 case VideoProcAmp_Brightness:
419 capBox->pict.brightness = lValue;
420 break;
421 case VideoProcAmp_Contrast:
422 capBox->pict.contrast = lValue;
423 break;
424 case VideoProcAmp_Hue:
425 capBox->pict.hue = lValue;
426 break;
427 case VideoProcAmp_Saturation:
428 capBox->pict.colour = lValue;
429 break;
430 default:
431 FIXME("Not implemented %d\n", Property);
432 return E_NOTIMPL;
433 }
434
435 if (xioctl(capBox->fd, VIDIOCSPICT, &capBox->pict) == -1)
436 {
437 ERR("ioctl(VIDIOCSPICT) failed (%d)\n",errno);
438 return E_FAIL;
439 }
440 return S_OK;
441 }
442
renderer_RGB(const Capture * capBox,LPBYTE bufferin,const BYTE * stream)443 static void renderer_RGB(const Capture *capBox, LPBYTE bufferin, const BYTE *stream)
444 {
445 int depth = renderlist_V4l[capBox->pict.palette].depth;
446 int size = capBox->height * capBox->width * depth / 8;
447 int pointer, offset;
448
449 switch (depth)
450 {
451 case 24:
452 memcpy(bufferin, stream, size);
453 break;
454 case 32:
455 pointer = 0;
456 offset = 1;
457 while (pointer + offset <= size)
458 {
459 bufferin[pointer] = stream[pointer + offset];
460 pointer++;
461 bufferin[pointer] = stream[pointer + offset];
462 pointer++;
463 bufferin[pointer] = stream[pointer + offset];
464 pointer++;
465 offset++;
466 }
467 break;
468 default:
469 ERR("Unknown bit depth %d\n", depth);
470 return;
471 }
472 }
473
renderer_YUV(const Capture * capBox,LPBYTE bufferin,const BYTE * stream)474 static void renderer_YUV(const Capture *capBox, LPBYTE bufferin, const BYTE *stream)
475 {
476 enum YUV_Format format;
477
478 switch (capBox->pict.palette)
479 {
480 case 7: /* YUV422 - same as YUYV */
481 case 8: /* YUYV */
482 format = YUYV;
483 break;
484 case 9: /* UYVY */
485 format = UYVY;
486 break;
487 case 11: /* YUV411 */
488 format = UYYVYY;
489 break;
490 case 13: /* YUV422P */
491 format = YUVP_421;
492 break;
493 case 14: /* YUV411P */
494 format = YUVP_441;
495 break;
496 case 15: /* YUV420P */
497 format = YUVP_422;
498 break;
499 case 16: /* YUV410P */
500 format = YUVP_444;
501 break;
502 default:
503 ERR("Unknown palette %d\n", capBox->pict.palette);
504 return;
505 }
506 YUV_To_RGB24(format, bufferin, stream, capBox->width, capBox->height);
507 }
508
Resize(const Capture * capBox,LPBYTE output,const BYTE * input)509 static void Resize(const Capture * capBox, LPBYTE output, const BYTE *input)
510 {
511 /* the whole image needs to be reversed,
512 because the dibs are messed up in windows */
513 if (!capBox->swresize)
514 {
515 int depth = capBox->bitDepth / 8;
516 int inoffset = 0, outoffset = capBox->height * capBox->width * depth;
517 int ow = capBox->width * depth;
518 while (outoffset > 0)
519 {
520 int x;
521 outoffset -= ow;
522 for (x = 0; x < ow; x++)
523 output[outoffset + x] = input[inoffset + x];
524 inoffset += ow;
525 }
526 }
527 else
528 {
529 HDC dc_s, dc_d;
530 HBITMAP bmp_s, bmp_d;
531 int depth = capBox->bitDepth / 8;
532 int inoffset = 0, outoffset = (capBox->outputheight) * capBox->outputwidth * depth;
533 int ow = capBox->outputwidth * depth;
534 LPBYTE myarray;
535
536 /* FIXME: Improve software resizing: add error checks and optimize */
537
538 myarray = CoTaskMemAlloc(capBox->outputwidth * capBox->outputheight * depth);
539 dc_s = CreateCompatibleDC(NULL);
540 dc_d = CreateCompatibleDC(NULL);
541 bmp_s = CreateBitmap(capBox->width, capBox->height, 1, capBox->bitDepth, input);
542 bmp_d = CreateBitmap(capBox->outputwidth, capBox->outputheight, 1, capBox->bitDepth, NULL);
543 SelectObject(dc_s, bmp_s);
544 SelectObject(dc_d, bmp_d);
545 StretchBlt(dc_d, 0, 0, capBox->outputwidth, capBox->outputheight,
546 dc_s, 0, 0, capBox->width, capBox->height, SRCCOPY);
547 GetBitmapBits(bmp_d, capBox->outputwidth * capBox->outputheight * depth, myarray);
548 while (outoffset > 0)
549 {
550 int i;
551
552 outoffset -= ow;
553 for (i = 0; i < ow; i++)
554 output[outoffset + i] = myarray[inoffset + i];
555 inoffset += ow;
556 }
557 CoTaskMemFree(myarray);
558 DeleteObject(dc_s);
559 DeleteObject(dc_d);
560 DeleteObject(bmp_s);
561 DeleteObject(bmp_d);
562 }
563 }
564
V4l_GetFrame(Capture * capBox,unsigned char ** pInput)565 static void V4l_GetFrame(Capture * capBox, unsigned char ** pInput)
566 {
567 if (capBox->mmap)
568 {
569 if (xioctl(capBox->fd, VIDIOCSYNC, &capBox->grab_buf[capBox->curframe]) == -1)
570 WARN("Syncing ioctl failed: %d\n", errno);
571
572 *pInput = capBox->pmap + capBox->gb_buffers.offsets[capBox->curframe];
573 }
574 else
575 {
576 int retval;
577 while ((retval = video_read(capBox->fd, capBox->grab_data, capBox->imagesize)) == -1)
578 if (errno != EAGAIN) break;
579 if (retval == -1)
580 WARN("Error occurred while reading from device: %s\n", strerror(errno));
581 *pInput = (unsigned char*) capBox->grab_data;
582 }
583 }
584
V4l_FreeFrame(Capture * capBox)585 static void V4l_FreeFrame(Capture * capBox)
586 {
587 TRACE("\n");
588 if (capBox->mmap)
589 {
590 if (xioctl(capBox->fd, VIDIOCMCAPTURE, &capBox->grab_buf[capBox->curframe]) == -1)
591 ERR("Freeing frame for capture failed: %s\n", strerror(errno));
592 }
593 if (++capBox->curframe == capBox->buffers)
594 capBox->curframe = 0;
595 }
596
ReadThread(LPVOID lParam)597 static DWORD WINAPI ReadThread(LPVOID lParam)
598 {
599 Capture * capBox = lParam;
600 HRESULT hr;
601 IMediaSample *pSample = NULL;
602 ULONG framecount = 0;
603 unsigned char *pTarget, *pInput, *pOutput;
604
605 hr = V4l_Prepare(capBox);
606 if (FAILED(hr))
607 {
608 ERR("Stop IFilterGraph: %x\n", hr);
609 capBox->thread = 0;
610 capBox->stopped = TRUE;
611 return 0;
612 }
613
614 pOutput = CoTaskMemAlloc(capBox->width * capBox->height * capBox->bitDepth / 8);
615 capBox->curframe = 0;
616 do {
617 V4l_FreeFrame(capBox);
618 } while (capBox->curframe != 0);
619
620 while (1)
621 {
622 EnterCriticalSection(&capBox->CritSect);
623 if (capBox->stopped)
624 break;
625 hr = BaseOutputPinImpl_GetDeliveryBuffer((BaseOutputPin *)capBox->pOut, &pSample, NULL, NULL, 0);
626 if (SUCCEEDED(hr))
627 {
628 int len;
629
630 if (!capBox->swresize)
631 len = capBox->height * capBox->width * capBox->bitDepth / 8;
632 else
633 len = capBox->outputheight * capBox->outputwidth * capBox->bitDepth / 8;
634 IMediaSample_SetActualDataLength(pSample, len);
635
636 len = IMediaSample_GetActualDataLength(pSample);
637 TRACE("Data length: %d KB\n", len / 1024);
638
639 IMediaSample_GetPointer(pSample, &pTarget);
640 /* FIXME: Check return values.. */
641 V4l_GetFrame(capBox, &pInput);
642 capBox->renderer(capBox, pOutput, pInput);
643 Resize(capBox, pTarget, pOutput);
644 hr = BaseOutputPinImpl_Deliver((BaseOutputPin *)capBox->pOut, pSample);
645 TRACE("%p -> Frame %u: %x\n", capBox, ++framecount, hr);
646 IMediaSample_Release(pSample);
647 V4l_FreeFrame(capBox);
648 }
649 if (FAILED(hr) && hr != VFW_E_NOT_CONNECTED)
650 {
651 TRACE("Return %x, stop IFilterGraph\n", hr);
652 V4l_Unprepare(capBox);
653 capBox->thread = 0;
654 capBox->stopped = TRUE;
655 break;
656 }
657 LeaveCriticalSection(&capBox->CritSect);
658 }
659
660 LeaveCriticalSection(&capBox->CritSect);
661 CoTaskMemFree(pOutput);
662 return 0;
663 }
664
qcap_driver_run(Capture * capBox,FILTER_STATE * state)665 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
666 {
667 HANDLE thread;
668 HRESULT hr;
669
670 TRACE("%p -> (%p)\n", capBox, state);
671
672 if (*state == State_Running) return S_OK;
673
674 EnterCriticalSection(&capBox->CritSect);
675
676 capBox->stopped = FALSE;
677
678 if (*state == State_Stopped)
679 {
680 *state = State_Running;
681 if (!capBox->iscommitted)
682 {
683 ALLOCATOR_PROPERTIES ap, actual;
684 BaseOutputPin *out;
685
686 capBox->iscommitted = TRUE;
687
688 ap.cBuffers = 3;
689 if (!capBox->swresize)
690 ap.cbBuffer = capBox->width * capBox->height;
691 else
692 ap.cbBuffer = capBox->outputwidth * capBox->outputheight;
693 ap.cbBuffer = (ap.cbBuffer * capBox->bitDepth) / 8;
694 ap.cbAlign = 1;
695 ap.cbPrefix = 0;
696
697 out = (BaseOutputPin *)capBox->pOut;
698
699 hr = IMemAllocator_SetProperties(out->pAllocator, &ap, &actual);
700
701 if (SUCCEEDED(hr))
702 hr = IMemAllocator_Commit(out->pAllocator);
703
704 TRACE("Committing allocator: %x\n", hr);
705 }
706
707 thread = CreateThread(NULL, 0, ReadThread, capBox, 0, NULL);
708 if (thread)
709 {
710 capBox->thread = thread;
711 SetThreadPriority(thread, THREAD_PRIORITY_LOWEST);
712 LeaveCriticalSection(&capBox->CritSect);
713 return S_OK;
714 }
715 ERR("Creating thread failed.. %u\n", GetLastError());
716 LeaveCriticalSection(&capBox->CritSect);
717 return E_FAIL;
718 }
719
720 ResumeThread(capBox->thread);
721 *state = State_Running;
722 LeaveCriticalSection(&capBox->CritSect);
723 return S_OK;
724 }
725
qcap_driver_pause(Capture * capBox,FILTER_STATE * state)726 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
727 {
728 TRACE("%p -> (%p)\n", capBox, state);
729
730 if (*state == State_Paused)
731 return S_OK;
732 if (*state == State_Stopped)
733 qcap_driver_run(capBox, state);
734
735 EnterCriticalSection(&capBox->CritSect);
736 *state = State_Paused;
737 SuspendThread(capBox->thread);
738 LeaveCriticalSection(&capBox->CritSect);
739
740 return S_OK;
741 }
742
qcap_driver_stop(Capture * capBox,FILTER_STATE * state)743 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
744 {
745 TRACE("%p -> (%p)\n", capBox, state);
746
747 if (*state == State_Stopped)
748 return S_OK;
749
750 EnterCriticalSection(&capBox->CritSect);
751
752 if (capBox->thread)
753 {
754 if (*state == State_Paused)
755 ResumeThread(capBox->thread);
756 capBox->stopped = TRUE;
757 capBox->thread = 0;
758 if (capBox->iscommitted)
759 {
760 BaseOutputPin *out;
761 HRESULT hr;
762
763 capBox->iscommitted = FALSE;
764
765 out = (BaseOutputPin*)capBox->pOut;
766
767 hr = IMemAllocator_Decommit(out->pAllocator);
768
769 if (hr != S_OK && hr != VFW_E_NOT_COMMITTED)
770 WARN("Decommitting allocator: %x\n", hr);
771 }
772 V4l_Unprepare(capBox);
773 }
774
775 *state = State_Stopped;
776 LeaveCriticalSection(&capBox->CritSect);
777 return S_OK;
778 }
779
qcap_driver_init(IPin * pOut,USHORT card)780 Capture * qcap_driver_init( IPin *pOut, USHORT card )
781 {
782 Capture * capBox = NULL;
783 char device[20];
784 struct video_capability capa;
785 struct video_picture pict;
786 struct video_window window;
787
788 YUV_Init();
789 video_init();
790
791 capBox = CoTaskMemAlloc(sizeof(Capture));
792 if (!capBox)
793 goto error;
794
795 /* capBox->vtbl = &defboxVtbl; */
796
797 InitializeCriticalSection( &capBox->CritSect );
798 capBox->CritSect.DebugInfo->Spare[0] = (DWORD_PTR)(__FILE__ ": Capture.CritSect");
799
800 sprintf(device, "/dev/video%i", card);
801 TRACE("opening %s\n", device);
802 #ifdef O_CLOEXEC
803 if ((capBox->fd = video_open(device, O_RDWR | O_NONBLOCK | O_CLOEXEC)) == -1 && errno == EINVAL)
804 #endif
805 capBox->fd = video_open(device, O_RDWR | O_NONBLOCK);
806 if (capBox->fd == -1)
807 {
808 WARN("open failed (%d)\n", errno);
809 goto error;
810 }
811 fcntl( capBox->fd, F_SETFD, FD_CLOEXEC ); /* in case O_CLOEXEC isn't supported */
812
813 memset(&capa, 0, sizeof(capa));
814
815 if (xioctl(capBox->fd, VIDIOCGCAP, &capa) == -1)
816 {
817 WARN("ioctl(VIDIOCGCAP) failed (%d)\n", errno);
818 goto error;
819 }
820
821 if (!(capa.type & VID_TYPE_CAPTURE))
822 {
823 WARN("not a video capture device\n");
824 goto error;
825 }
826
827 TRACE("%d inputs on %s\n", capa.channels, capa.name );
828
829 if (xioctl(capBox->fd, VIDIOCGPICT, &pict) == -1)
830 {
831 ERR("ioctl(VIDIOCGPICT) failed (%d)\n", errno );
832 goto error;
833 }
834
835 TRACE("depth %d palette %d (%s) hue %d color %d contrast %d\n",
836 pict.depth, pict.palette, renderlist_V4l[pict.palette].name,
837 pict.hue, pict.colour, pict.contrast );
838
839 capBox->dbrightness = pict.brightness;
840 capBox->dcolour = pict.colour;
841 capBox->dhue = pict.hue;
842 capBox->dcontrast = pict.contrast;
843
844 if (!renderlist_V4l[pict.palette].renderer)
845 {
846 int palet = pict.palette, i;
847
848 TRACE("No renderer available for %s, falling back to defaults\n",
849 renderlist_V4l[pict.palette].name);
850 capBox->renderer = NULL;
851 for (i = 0; fallback_V4l[i] >=0 ; i++)
852 {
853 int n = fallback_V4l[i];
854
855 if (renderlist_V4l[n].renderer == NULL)
856 continue;
857
858 pict.depth = renderlist_V4l[n].depth;
859 pict.palette = n;
860 if (xioctl(capBox->fd, VIDIOCSPICT, &pict) == -1)
861 {
862 TRACE("Could not render with %s (%d)\n",
863 renderlist_V4l[n].name, n);
864 continue;
865 }
866 TRACE("using renderer %s (%d)\n",
867 renderlist_V4l[n].name, n);
868 capBox->renderer = renderlist_V4l[n].renderer;
869 break;
870 }
871
872 if (!capBox->renderer)
873 {
874 ERR("video format %s isn't available\n",
875 renderlist_V4l[palet].name);
876 goto error;
877 }
878 }
879 else
880 {
881 TRACE("Using the suggested format\n");
882 capBox->renderer = renderlist_V4l[pict.palette].renderer;
883 }
884 memcpy(&capBox->pict, &pict, sizeof(struct video_picture));
885
886 memset(&window, 0, sizeof(window));
887 if (xioctl(capBox->fd, VIDIOCGWIN, &window) == -1)
888 {
889 WARN("VIDIOCGWIN failed (%d)\n", errno);
890 goto error;
891 }
892
893 capBox->height = capBox->outputheight = window.height;
894 capBox->width = capBox->outputwidth = window.width;
895 capBox->swresize = FALSE;
896 capBox->bitDepth = 24;
897 capBox->pOut = pOut;
898 capBox->fps = 3;
899 capBox->stopped = FALSE;
900 capBox->curframe = 0;
901 capBox->iscommitted = FALSE;
902
903 TRACE("format: %d bits - %d x %d\n", capBox->bitDepth, capBox->width, capBox->height);
904
905 return capBox;
906
907 error:
908 if (capBox)
909 qcap_driver_destroy( capBox );
910
911 return NULL;
912 }
913
914 #else
915
qcap_driver_init(IPin * pOut,USHORT card)916 Capture * qcap_driver_init( IPin *pOut, USHORT card )
917 {
918 const char msg[] =
919 "The v4l headers were not available at compile time,\n"
920 "so video capture support is not available.\n";
921 MESSAGE(msg);
922 return NULL;
923 }
924
925 #define FAIL_WITH_ERR \
926 ERR("v4l absent: shouldn't be called\n"); \
927 return E_NOTIMPL
928
qcap_driver_destroy(Capture * capBox)929 HRESULT qcap_driver_destroy(Capture *capBox)
930 {
931 FAIL_WITH_ERR;
932 }
933
qcap_driver_set_format(Capture * capBox,AM_MEDIA_TYPE * mT)934 HRESULT qcap_driver_set_format(Capture *capBox, AM_MEDIA_TYPE * mT)
935 {
936 FAIL_WITH_ERR;
937 }
938
qcap_driver_get_format(const Capture * capBox,AM_MEDIA_TYPE ** mT)939 HRESULT qcap_driver_get_format(const Capture *capBox, AM_MEDIA_TYPE ** mT)
940 {
941 FAIL_WITH_ERR;
942 }
943
qcap_driver_get_prop_range(Capture * capBox,VideoProcAmpProperty Property,LONG * pMin,LONG * pMax,LONG * pSteppingDelta,LONG * pDefault,LONG * pCapsFlags)944 HRESULT qcap_driver_get_prop_range( Capture *capBox,
945 VideoProcAmpProperty Property, LONG *pMin, LONG *pMax,
946 LONG *pSteppingDelta, LONG *pDefault, LONG *pCapsFlags )
947 {
948 FAIL_WITH_ERR;
949 }
950
qcap_driver_get_prop(Capture * capBox,VideoProcAmpProperty Property,LONG * lValue,LONG * Flags)951 HRESULT qcap_driver_get_prop(Capture *capBox,
952 VideoProcAmpProperty Property, LONG *lValue, LONG *Flags)
953 {
954 FAIL_WITH_ERR;
955 }
956
qcap_driver_set_prop(Capture * capBox,VideoProcAmpProperty Property,LONG lValue,LONG Flags)957 HRESULT qcap_driver_set_prop(Capture *capBox, VideoProcAmpProperty Property,
958 LONG lValue, LONG Flags)
959 {
960 FAIL_WITH_ERR;
961 }
962
qcap_driver_run(Capture * capBox,FILTER_STATE * state)963 HRESULT qcap_driver_run(Capture *capBox, FILTER_STATE *state)
964 {
965 FAIL_WITH_ERR;
966 }
967
qcap_driver_pause(Capture * capBox,FILTER_STATE * state)968 HRESULT qcap_driver_pause(Capture *capBox, FILTER_STATE *state)
969 {
970 FAIL_WITH_ERR;
971 }
972
qcap_driver_stop(Capture * capBox,FILTER_STATE * state)973 HRESULT qcap_driver_stop(Capture *capBox, FILTER_STATE *state)
974 {
975 FAIL_WITH_ERR;
976 }
977
978 #endif /* defined(VIDIOCMCAPTURE) */
979