1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include <errno.h>
12 #include <fcntl.h>
13 #include <stdio.h>
14 #include <string.h>
15 #include <sys/ioctl.h>
16 #include <sys/mman.h>
17 #include <sys/stat.h>
18 #include <unistd.h>
19 
20 //v4l includes
21 #if defined(__NetBSD__) || defined(__OpenBSD__)
22 #include <sys/videoio.h>
23 #elif defined(__sun)
24 #include <sys/videodev2.h>
25 #else
26 #include <linux/videodev2.h>
27 #endif
28 #ifdef HAVE_LIBV4L2
29 #include <libv4l2.h>
30 #endif
31 
32 #include <new>
33 
34 #include "webrtc/modules/video_capture/linux/video_capture_linux.h"
35 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
36 #include "webrtc/system_wrappers/interface/ref_count.h"
37 #include "webrtc/system_wrappers/interface/trace.h"
38 
39 #ifdef HAVE_LIBV4L2
40 #define open	v4l2_open
41 #define close	v4l2_close
42 #define dup	v4l2_dup
43 #define ioctl	v4l2_ioctl
44 #define mmap	v4l2_mmap
45 #define munmap	v4l2_munmap
46 #endif
47 
48 namespace webrtc
49 {
50 namespace videocapturemodule
51 {
Create(const int32_t id,const char * deviceUniqueId)52 VideoCaptureModule* VideoCaptureImpl::Create(const int32_t id,
53                                              const char* deviceUniqueId)
54 {
55     RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
56         new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
57 
58     if (!implementation || implementation->Init(deviceUniqueId) != 0)
59     {
60         delete implementation;
61         implementation = NULL;
62     }
63 
64     return implementation;
65 }
66 
VideoCaptureModuleV4L2(const int32_t id)67 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const int32_t id)
68     : VideoCaptureImpl(id),
69       _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
70       _deviceId(-1),
71       _deviceFd(-1),
72       _buffersAllocatedByDevice(-1),
73       _currentWidth(-1),
74       _currentHeight(-1),
75       _currentFrameRate(-1),
76       _captureStarted(false),
77       _captureVideoType(kVideoI420),
78       _pool(NULL)
79 {
80 }
81 
Init(const char * deviceUniqueIdUTF8)82 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
83 {
84     int len = strlen((const char*) deviceUniqueIdUTF8);
85     _deviceUniqueId = new (std::nothrow) char[len + 1];
86     if (_deviceUniqueId)
87     {
88         memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
89     }
90 
91     int device_index;
92     if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1)
93     {
94       _deviceId = device_index;
95       return 0;
96     }
97 
98     int fd;
99     char device[32];
100     bool found = false;
101 
102     /* detect /dev/video [0-63] entries */
103     int n;
104     for (n = 0; n < 64; n++)
105     {
106         sprintf(device, "/dev/video%d", n);
107         if ((fd = open(device, O_RDONLY)) != -1)
108         {
109             // query device capabilities
110             struct v4l2_capability cap;
111             if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
112             {
113                 if (cap.bus_info[0] != 0)
114                 {
115                     if (strncmp((const char*) cap.bus_info,
116                                 (const char*) deviceUniqueIdUTF8,
117                                 strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
118                     {
119                         close(fd);
120                         found = true;
121                         break; // fd matches with device unique id supplied
122                     }
123                 }
124             }
125             close(fd); // close since this is not the matching device
126         }
127     }
128     if (!found)
129     {
130         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
131         return -1;
132     }
133     _deviceId = n; //store the device id
134     return 0;
135 }
136 
~VideoCaptureModuleV4L2()137 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
138 {
139     StopCapture();
140     if (_captureCritSect)
141     {
142         delete _captureCritSect;
143     }
144     if (_deviceFd != -1)
145       close(_deviceFd);
146 }
147 
StartCapture(const VideoCaptureCapability & capability)148 int32_t VideoCaptureModuleV4L2::StartCapture(
149     const VideoCaptureCapability& capability)
150 {
151     if (_captureStarted)
152     {
153         if (capability.width == _currentWidth &&
154             capability.height == _currentHeight &&
155             _captureVideoType == capability.rawType)
156         {
157             return 0;
158         }
159         else
160         {
161             StopCapture();
162         }
163     }
164 
165     CriticalSectionScoped cs(_captureCritSect);
166     //first open /dev/video device
167     char device[20];
168     sprintf(device, "/dev/video%d", (int) _deviceId);
169 
170     if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
171     {
172         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
173                    "error in opening %s errono = %d", device, errno);
174         return -1;
175     }
176 
177     // Supported video formats in preferred order.
178     // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
179     // I420 otherwise.
180     const int nFormats = 5;
181     unsigned int fmts[nFormats];
182     if (capability.width > 640 || capability.height > 480) {
183         fmts[0] = V4L2_PIX_FMT_MJPEG;
184         fmts[1] = V4L2_PIX_FMT_YUV420;
185         fmts[2] = V4L2_PIX_FMT_YUYV;
186         fmts[3] = V4L2_PIX_FMT_UYVY;
187         fmts[4] = V4L2_PIX_FMT_JPEG;
188     } else {
189         fmts[0] = V4L2_PIX_FMT_YUV420;
190         fmts[1] = V4L2_PIX_FMT_YUYV;
191         fmts[2] = V4L2_PIX_FMT_UYVY;
192         fmts[3] = V4L2_PIX_FMT_MJPEG;
193         fmts[4] = V4L2_PIX_FMT_JPEG;
194     }
195 
196     // Enumerate image formats.
197     struct v4l2_fmtdesc fmt;
198     int fmtsIdx = nFormats;
199     memset(&fmt, 0, sizeof(fmt));
200     fmt.index = 0;
201     fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
202     WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
203                  "Video Capture enumerats supported image formats:");
204     while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
205         WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
206                      "  { pixelformat = %c%c%c%c, description = '%s' }",
207                      fmt.pixelformat & 0xFF, (fmt.pixelformat>>8) & 0xFF,
208                      (fmt.pixelformat>>16) & 0xFF, (fmt.pixelformat>>24) & 0xFF,
209                      fmt.description);
210         // Match the preferred order.
211         for (int i = 0; i < nFormats; i++) {
212             if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
213                 fmtsIdx = i;
214         }
215         // Keep enumerating.
216         fmt.index++;
217     }
218 
219     if (fmtsIdx == nFormats)
220     {
221         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
222                      "no supporting video formats found");
223         return -1;
224     } else {
225         WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
226                      "We prefer format %c%c%c%c",
227                      fmts[fmtsIdx] & 0xFF, (fmts[fmtsIdx]>>8) & 0xFF,
228                      (fmts[fmtsIdx]>>16) & 0xFF, (fmts[fmtsIdx]>>24) & 0xFF);
229     }
230 
231     struct v4l2_format video_fmt;
232     memset(&video_fmt, 0, sizeof(struct v4l2_format));
233     video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
234     video_fmt.fmt.pix.sizeimage = 0;
235     video_fmt.fmt.pix.width = capability.width;
236     video_fmt.fmt.pix.height = capability.height;
237     video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
238 
239     if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
240         _captureVideoType = kVideoYUY2;
241     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
242         _captureVideoType = kVideoI420;
243     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
244         _captureVideoType = kVideoUYVY;
245     else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
246              video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
247         _captureVideoType = kVideoMJPEG;
248 
249     //set format and frame size now
250     if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
251     {
252         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
253                    "error in VIDIOC_S_FMT, errno = %d", errno);
254         return -1;
255     }
256 
257     // initialize current width and height
258     _currentWidth = video_fmt.fmt.pix.width;
259     _currentHeight = video_fmt.fmt.pix.height;
260     _captureDelay = 120;
261 
262     // Trying to set frame rate, before check driver capability.
263     bool driver_framerate_support = true;
264     struct v4l2_streamparm streamparms;
265     memset(&streamparms, 0, sizeof(streamparms));
266     streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
267     if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
268         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
269                    "error in VIDIOC_G_PARM errno = %d", errno);
270         driver_framerate_support = false;
271       // continue
272     } else {
273       // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
274       if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
275         // driver supports the feature. Set required framerate.
276         memset(&streamparms, 0, sizeof(streamparms));
277         streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
278         streamparms.parm.capture.timeperframe.numerator = 1;
279         streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
280         if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
281           WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
282                    "Failed to set the framerate. errno=%d", errno);
283           driver_framerate_support = false;
284         } else {
285           _currentFrameRate = capability.maxFPS;
286         }
287       }
288     }
289     // If driver doesn't support framerate control, need to hardcode.
290     // Hardcoding the value based on the frame size.
291     if (!driver_framerate_support) {
292       if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
293         _currentFrameRate = 15;
294       } else {
295         _currentFrameRate = 30;
296       }
297     }
298 
299     if (!AllocateVideoBuffers())
300     {
301         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
302                    "failed to allocate video capture buffers");
303         return -1;
304     }
305 
306     //start capture thread;
307     if (!_captureThread)
308     {
309         _captureThread = ThreadWrapper::CreateThread(
310             VideoCaptureModuleV4L2::CaptureThread, this, "CaptureThread");
311         _captureThread->Start();
312         _captureThread->SetPriority(kHighPriority);
313     }
314 
315     // Needed to start UVC camera - from the uvcview application
316     enum v4l2_buf_type type;
317     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
318     if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
319     {
320         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
321                      "Failed to turn on stream");
322         return -1;
323     }
324 
325     _captureStarted = true;
326     return 0;
327 }
328 
StopCapture()329 int32_t VideoCaptureModuleV4L2::StopCapture()
330 {
331     if (_captureThread) {
332         // Make sure the capture thread stop stop using the critsect.
333         _captureThread->Stop();
334         _captureThread.reset();
335     }
336 
337     CriticalSectionScoped cs(_captureCritSect);
338     if (_captureStarted)
339     {
340         _captureStarted = false;
341 
342         DeAllocateVideoBuffers();
343         close(_deviceFd);
344         _deviceFd = -1;
345     }
346 
347     return 0;
348 }
349 
350 //critical section protected by the caller
351 
AllocateVideoBuffers()352 bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
353 {
354     struct v4l2_requestbuffers rbuffer;
355     memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
356 
357     rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
358     rbuffer.memory = V4L2_MEMORY_MMAP;
359     rbuffer.count = kNoOfV4L2Bufffers;
360 
361     if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
362     {
363         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
364                    "Could not get buffers from device. errno = %d", errno);
365         return false;
366     }
367 
368     if (rbuffer.count > kNoOfV4L2Bufffers)
369         rbuffer.count = kNoOfV4L2Bufffers;
370 
371     _buffersAllocatedByDevice = rbuffer.count;
372 
373     //Map the buffers
374     _pool = new Buffer[rbuffer.count];
375 
376     for (unsigned int i = 0; i < rbuffer.count; i++)
377     {
378         struct v4l2_buffer buffer;
379         memset(&buffer, 0, sizeof(v4l2_buffer));
380         buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
381         buffer.memory = V4L2_MEMORY_MMAP;
382         buffer.index = i;
383 
384         if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
385         {
386             return false;
387         }
388 
389         _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
390                               _deviceFd, buffer.m.offset);
391 
392         if (MAP_FAILED == _pool[i].start)
393         {
394             for (unsigned int j = 0; j < i; j++)
395                 munmap(_pool[j].start, _pool[j].length);
396             return false;
397         }
398 
399         _pool[i].length = buffer.length;
400 
401         if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
402         {
403             return false;
404         }
405     }
406     return true;
407 }
408 
DeAllocateVideoBuffers()409 bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
410 {
411     // unmap buffers
412     for (int i = 0; i < _buffersAllocatedByDevice; i++)
413         munmap(_pool[i].start, _pool[i].length);
414 
415     delete[] _pool;
416 
417     // turn off stream
418     enum v4l2_buf_type type;
419     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
420     if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
421     {
422         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
423                    "VIDIOC_STREAMOFF error. errno: %d", errno);
424     }
425 
426     return true;
427 }
428 
CaptureStarted()429 bool VideoCaptureModuleV4L2::CaptureStarted()
430 {
431     return _captureStarted;
432 }
433 
CaptureThread(void * obj)434 bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
435 {
436     return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
437 }
CaptureProcess()438 bool VideoCaptureModuleV4L2::CaptureProcess()
439 {
440     int retVal = 0;
441     fd_set rSet;
442     struct timeval timeout;
443 
444     _captureCritSect->Enter();
445 
446     FD_ZERO(&rSet);
447     FD_SET(_deviceFd, &rSet);
448     timeout.tv_sec = 1;
449     timeout.tv_usec = 0;
450 
451     retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
452     if (retVal < 0 && errno != EINTR) // continue if interrupted
453     {
454         // select failed
455         _captureCritSect->Leave();
456         return false;
457     }
458     else if (retVal == 0)
459     {
460         // select timed out
461         _captureCritSect->Leave();
462         return true;
463     }
464     else if (!FD_ISSET(_deviceFd, &rSet))
465     {
466         // not event on camera handle
467         _captureCritSect->Leave();
468         return true;
469     }
470 
471     if (_captureStarted)
472     {
473         struct v4l2_buffer buf;
474         memset(&buf, 0, sizeof(struct v4l2_buffer));
475         buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
476         buf.memory = V4L2_MEMORY_MMAP;
477         // dequeue a buffer - repeat until dequeued properly!
478         while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
479         {
480             if (errno != EINTR)
481             {
482                 WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
483                            "could not sync on a buffer on device %s", strerror(errno));
484                 _captureCritSect->Leave();
485                 return true;
486             }
487         }
488         VideoCaptureCapability frameInfo;
489         frameInfo.width = _currentWidth;
490         frameInfo.height = _currentHeight;
491         frameInfo.rawType = _captureVideoType;
492 
493         // convert to to I420 if needed
494         IncomingFrame((unsigned char*) _pool[buf.index].start,
495                       buf.bytesused, frameInfo);
496         // enqueue the buffer again
497         if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
498         {
499             WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
500                        "Failed to enqueue capture buffer");
501         }
502     }
503     _captureCritSect->Leave();
504     usleep(0);
505     return true;
506 }
507 
CaptureSettings(VideoCaptureCapability & settings)508 int32_t VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
509 {
510     settings.width = _currentWidth;
511     settings.height = _currentHeight;
512     settings.maxFPS = _currentFrameRate;
513     settings.rawType=_captureVideoType;
514 
515     return 0;
516 }
517 }  // namespace videocapturemodule
518 }  // namespace webrtc
519