1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/video_capture/linux/video_capture_linux.h"
12
13 #include <errno.h>
14 #include <fcntl.h>
15 #include <linux/videodev2.h>
16 #include <stdio.h>
17 #include <string.h>
18 #include <sys/ioctl.h>
19 #include <sys/mman.h>
20 #include <sys/select.h>
21 #include <time.h>
22 #include <unistd.h>
23
24 #include <new>
25 #include <string>
26
27 #include "api/scoped_refptr.h"
28 #include "media/base/video_common.h"
29 #include "modules/video_capture/video_capture.h"
30 #include "rtc_base/logging.h"
31 #include "rtc_base/ref_counted_object.h"
32
33 namespace webrtc {
34 namespace videocapturemodule {
Create(const char * deviceUniqueId)35 rtc::scoped_refptr<VideoCaptureModule> VideoCaptureImpl::Create(
36 const char* deviceUniqueId) {
37 rtc::scoped_refptr<VideoCaptureModuleV4L2> implementation(
38 new rtc::RefCountedObject<VideoCaptureModuleV4L2>());
39
40 if (implementation->Init(deviceUniqueId) != 0)
41 return nullptr;
42
43 return implementation;
44 }
45
VideoCaptureModuleV4L2()46 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2()
47 : VideoCaptureImpl(),
48 _deviceId(-1),
49 _deviceFd(-1),
50 _buffersAllocatedByDevice(-1),
51 _currentWidth(-1),
52 _currentHeight(-1),
53 _currentFrameRate(-1),
54 _captureStarted(false),
55 _captureVideoType(VideoType::kI420),
56 _pool(NULL) {}
57
Init(const char * deviceUniqueIdUTF8)58 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) {
59 int len = strlen((const char*)deviceUniqueIdUTF8);
60 _deviceUniqueId = new (std::nothrow) char[len + 1];
61 if (_deviceUniqueId) {
62 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
63 }
64
65 int fd;
66 char device[32];
67 bool found = false;
68
69 /* detect /dev/video [0-63] entries */
70 int n;
71 for (n = 0; n < 64; n++) {
72 sprintf(device, "/dev/video%d", n);
73 if ((fd = open(device, O_RDONLY)) != -1) {
74 // query device capabilities
75 struct v4l2_capability cap;
76 if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
77 if (cap.bus_info[0] != 0) {
78 if (strncmp((const char*)cap.bus_info,
79 (const char*)deviceUniqueIdUTF8,
80 strlen((const char*)deviceUniqueIdUTF8)) ==
81 0) // match with device id
82 {
83 close(fd);
84 found = true;
85 break; // fd matches with device unique id supplied
86 }
87 }
88 }
89 close(fd); // close since this is not the matching device
90 }
91 }
92 if (!found) {
93 RTC_LOG(LS_INFO) << "no matching device found";
94 return -1;
95 }
96 _deviceId = n; // store the device id
97 return 0;
98 }
99
~VideoCaptureModuleV4L2()100 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() {
101 StopCapture();
102 if (_deviceFd != -1)
103 close(_deviceFd);
104 }
105
StartCapture(const VideoCaptureCapability & capability)106 int32_t VideoCaptureModuleV4L2::StartCapture(
107 const VideoCaptureCapability& capability) {
108 if (_captureStarted) {
109 if (capability.width == _currentWidth &&
110 capability.height == _currentHeight &&
111 _captureVideoType == capability.videoType) {
112 return 0;
113 } else {
114 StopCapture();
115 }
116 }
117
118 rtc::CritScope cs(&_captureCritSect);
119 // first open /dev/video device
120 char device[20];
121 sprintf(device, "/dev/video%d", (int)_deviceId);
122
123 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
124 RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno;
125 return -1;
126 }
127
128 // Supported video formats in preferred order.
129 // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
130 // I420 otherwise.
131 const int nFormats = 5;
132 unsigned int fmts[nFormats];
133 if (capability.width > 640 || capability.height > 480) {
134 fmts[0] = V4L2_PIX_FMT_MJPEG;
135 fmts[1] = V4L2_PIX_FMT_YUV420;
136 fmts[2] = V4L2_PIX_FMT_YUYV;
137 fmts[3] = V4L2_PIX_FMT_UYVY;
138 fmts[4] = V4L2_PIX_FMT_JPEG;
139 } else {
140 fmts[0] = V4L2_PIX_FMT_YUV420;
141 fmts[1] = V4L2_PIX_FMT_YUYV;
142 fmts[2] = V4L2_PIX_FMT_UYVY;
143 fmts[3] = V4L2_PIX_FMT_MJPEG;
144 fmts[4] = V4L2_PIX_FMT_JPEG;
145 }
146
147 // Enumerate image formats.
148 struct v4l2_fmtdesc fmt;
149 int fmtsIdx = nFormats;
150 memset(&fmt, 0, sizeof(fmt));
151 fmt.index = 0;
152 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
153 RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:";
154 while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
155 RTC_LOG(LS_INFO) << " { pixelformat = "
156 << cricket::GetFourccName(fmt.pixelformat)
157 << ", description = '" << fmt.description << "' }";
158 // Match the preferred order.
159 for (int i = 0; i < nFormats; i++) {
160 if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
161 fmtsIdx = i;
162 }
163 // Keep enumerating.
164 fmt.index++;
165 }
166
167 if (fmtsIdx == nFormats) {
168 RTC_LOG(LS_INFO) << "no supporting video formats found";
169 return -1;
170 } else {
171 RTC_LOG(LS_INFO) << "We prefer format "
172 << cricket::GetFourccName(fmts[fmtsIdx]);
173 }
174
175 struct v4l2_format video_fmt;
176 memset(&video_fmt, 0, sizeof(struct v4l2_format));
177 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
178 video_fmt.fmt.pix.sizeimage = 0;
179 video_fmt.fmt.pix.width = capability.width;
180 video_fmt.fmt.pix.height = capability.height;
181 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
182
183 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
184 _captureVideoType = VideoType::kYUY2;
185 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
186 _captureVideoType = VideoType::kI420;
187 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
188 _captureVideoType = VideoType::kUYVY;
189 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
190 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
191 _captureVideoType = VideoType::kMJPEG;
192
193 // set format and frame size now
194 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
195 RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno;
196 return -1;
197 }
198
199 // initialize current width and height
200 _currentWidth = video_fmt.fmt.pix.width;
201 _currentHeight = video_fmt.fmt.pix.height;
202
203 // Trying to set frame rate, before check driver capability.
204 bool driver_framerate_support = true;
205 struct v4l2_streamparm streamparms;
206 memset(&streamparms, 0, sizeof(streamparms));
207 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
208 if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
209 RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno;
210 driver_framerate_support = false;
211 // continue
212 } else {
213 // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
214 if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
215 // driver supports the feature. Set required framerate.
216 memset(&streamparms, 0, sizeof(streamparms));
217 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
218 streamparms.parm.capture.timeperframe.numerator = 1;
219 streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
220 if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
221 RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno;
222 driver_framerate_support = false;
223 } else {
224 _currentFrameRate = capability.maxFPS;
225 }
226 }
227 }
228 // If driver doesn't support framerate control, need to hardcode.
229 // Hardcoding the value based on the frame size.
230 if (!driver_framerate_support) {
231 if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) {
232 _currentFrameRate = 15;
233 } else {
234 _currentFrameRate = 30;
235 }
236 }
237
238 if (!AllocateVideoBuffers()) {
239 RTC_LOG(LS_INFO) << "failed to allocate video capture buffers";
240 return -1;
241 }
242
243 // start capture thread;
244 if (!_captureThread) {
245 quit_ = false;
246 _captureThread.reset(
247 new rtc::PlatformThread(VideoCaptureModuleV4L2::CaptureThread, this,
248 "CaptureThread", rtc::kHighPriority));
249 _captureThread->Start();
250 }
251
252 // Needed to start UVC camera - from the uvcview application
253 enum v4l2_buf_type type;
254 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
255 if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) {
256 RTC_LOG(LS_INFO) << "Failed to turn on stream";
257 return -1;
258 }
259
260 _captureStarted = true;
261 return 0;
262 }
263
StopCapture()264 int32_t VideoCaptureModuleV4L2::StopCapture() {
265 if (_captureThread) {
266 {
267 rtc::CritScope cs(&_captureCritSect);
268 quit_ = true;
269 }
270 // Make sure the capture thread stop stop using the critsect.
271 _captureThread->Stop();
272 _captureThread.reset();
273 }
274
275 rtc::CritScope cs(&_captureCritSect);
276 if (_captureStarted) {
277 _captureStarted = false;
278
279 DeAllocateVideoBuffers();
280 close(_deviceFd);
281 _deviceFd = -1;
282 }
283
284 return 0;
285 }
286
287 // critical section protected by the caller
288
AllocateVideoBuffers()289 bool VideoCaptureModuleV4L2::AllocateVideoBuffers() {
290 struct v4l2_requestbuffers rbuffer;
291 memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
292
293 rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
294 rbuffer.memory = V4L2_MEMORY_MMAP;
295 rbuffer.count = kNoOfV4L2Bufffers;
296
297 if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) {
298 RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno;
299 return false;
300 }
301
302 if (rbuffer.count > kNoOfV4L2Bufffers)
303 rbuffer.count = kNoOfV4L2Bufffers;
304
305 _buffersAllocatedByDevice = rbuffer.count;
306
307 // Map the buffers
308 _pool = new Buffer[rbuffer.count];
309
310 for (unsigned int i = 0; i < rbuffer.count; i++) {
311 struct v4l2_buffer buffer;
312 memset(&buffer, 0, sizeof(v4l2_buffer));
313 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
314 buffer.memory = V4L2_MEMORY_MMAP;
315 buffer.index = i;
316
317 if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) {
318 return false;
319 }
320
321 _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
322 MAP_SHARED, _deviceFd, buffer.m.offset);
323
324 if (MAP_FAILED == _pool[i].start) {
325 for (unsigned int j = 0; j < i; j++)
326 munmap(_pool[j].start, _pool[j].length);
327 return false;
328 }
329
330 _pool[i].length = buffer.length;
331
332 if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) {
333 return false;
334 }
335 }
336 return true;
337 }
338
DeAllocateVideoBuffers()339 bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() {
340 // unmap buffers
341 for (int i = 0; i < _buffersAllocatedByDevice; i++)
342 munmap(_pool[i].start, _pool[i].length);
343
344 delete[] _pool;
345
346 // turn off stream
347 enum v4l2_buf_type type;
348 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
349 if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) {
350 RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno;
351 }
352
353 return true;
354 }
355
CaptureStarted()356 bool VideoCaptureModuleV4L2::CaptureStarted() {
357 return _captureStarted;
358 }
359
CaptureThread(void * obj)360 void VideoCaptureModuleV4L2::CaptureThread(void* obj) {
361 VideoCaptureModuleV4L2* capture = static_cast<VideoCaptureModuleV4L2*>(obj);
362 while (capture->CaptureProcess()) {
363 }
364 }
CaptureProcess()365 bool VideoCaptureModuleV4L2::CaptureProcess() {
366 int retVal = 0;
367 fd_set rSet;
368 struct timeval timeout;
369
370 FD_ZERO(&rSet);
371 FD_SET(_deviceFd, &rSet);
372 timeout.tv_sec = 1;
373 timeout.tv_usec = 0;
374
375 // _deviceFd written only in StartCapture, when this thread isn't running.
376 retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
377 if (retVal < 0 && errno != EINTR) // continue if interrupted
378 {
379 // select failed
380 return false;
381 } else if (retVal == 0) {
382 // select timed out
383 return true;
384 } else if (!FD_ISSET(_deviceFd, &rSet)) {
385 // not event on camera handle
386 return true;
387 }
388
389 {
390 rtc::CritScope cs(&_captureCritSect);
391
392 if (quit_) {
393 return false;
394 }
395
396 if (_captureStarted) {
397 struct v4l2_buffer buf;
398 memset(&buf, 0, sizeof(struct v4l2_buffer));
399 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
400 buf.memory = V4L2_MEMORY_MMAP;
401 // dequeue a buffer - repeat until dequeued properly!
402 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) {
403 if (errno != EINTR) {
404 RTC_LOG(LS_INFO) << "could not sync on a buffer on device "
405 << strerror(errno);
406 return true;
407 }
408 }
409 VideoCaptureCapability frameInfo;
410 frameInfo.width = _currentWidth;
411 frameInfo.height = _currentHeight;
412 frameInfo.videoType = _captureVideoType;
413
414 // convert to to I420 if needed
415 IncomingFrame((unsigned char*)_pool[buf.index].start, buf.bytesused,
416 frameInfo);
417 // enqueue the buffer again
418 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) {
419 RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer";
420 }
421 }
422 }
423 usleep(0);
424 return true;
425 }
426
CaptureSettings(VideoCaptureCapability & settings)427 int32_t VideoCaptureModuleV4L2::CaptureSettings(
428 VideoCaptureCapability& settings) {
429 settings.width = _currentWidth;
430 settings.height = _currentHeight;
431 settings.maxFPS = _currentFrameRate;
432 settings.videoType = _captureVideoType;
433
434 return 0;
435 }
436 } // namespace videocapturemodule
437 } // namespace webrtc
438