1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/video_capture/linux/video_capture_v4l2.h"
12
13 #include <errno.h>
14 #include <fcntl.h>
15 #include <linux/videodev2.h>
16 #include <stdio.h>
17 #include <string.h>
18 #include <sys/ioctl.h>
19 #include <sys/mman.h>
20 #include <sys/select.h>
21 #include <time.h>
22 #include <unistd.h>
23
24 #include <new>
25 #include <string>
26
27 #include "api/scoped_refptr.h"
28 #include "media/base/video_common.h"
29 #include "modules/video_capture/video_capture.h"
30 #include "rtc_base/logging.h"
31
32 namespace webrtc {
33 namespace videocapturemodule {
VideoCaptureModuleV4L2()34 VideoCaptureModuleV4L2::VideoCaptureModuleV4L2()
35 : VideoCaptureImpl(),
36 _deviceId(-1),
37 _deviceFd(-1),
38 _buffersAllocatedByDevice(-1),
39 _currentWidth(-1),
40 _currentHeight(-1),
41 _currentFrameRate(-1),
42 _captureStarted(false),
43 _captureVideoType(VideoType::kI420),
44 _pool(NULL) {}
45
Init(const char * deviceUniqueIdUTF8)46 int32_t VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8) {
47 int len = strlen((const char*)deviceUniqueIdUTF8);
48 _deviceUniqueId = new (std::nothrow) char[len + 1];
49 if (_deviceUniqueId) {
50 memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
51 }
52
53 int fd;
54 char device[32];
55 bool found = false;
56
57 /* detect /dev/video [0-63] entries */
58 int n;
59 for (n = 0; n < 64; n++) {
60 snprintf(device, sizeof(device), "/dev/video%d", n);
61 if ((fd = open(device, O_RDONLY)) != -1) {
62 // query device capabilities
63 struct v4l2_capability cap;
64 if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0) {
65 if (cap.bus_info[0] != 0) {
66 if (strncmp((const char*)cap.bus_info,
67 (const char*)deviceUniqueIdUTF8,
68 strlen((const char*)deviceUniqueIdUTF8)) ==
69 0) { // match with device id
70 close(fd);
71 found = true;
72 break; // fd matches with device unique id supplied
73 }
74 }
75 }
76 close(fd); // close since this is not the matching device
77 }
78 }
79 if (!found) {
80 RTC_LOG(LS_INFO) << "no matching device found";
81 return -1;
82 }
83 _deviceId = n; // store the device id
84 return 0;
85 }
86
~VideoCaptureModuleV4L2()87 VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2() {
88 StopCapture();
89 if (_deviceFd != -1)
90 close(_deviceFd);
91 }
92
StartCapture(const VideoCaptureCapability & capability)93 int32_t VideoCaptureModuleV4L2::StartCapture(
94 const VideoCaptureCapability& capability) {
95 if (_captureStarted) {
96 if (capability.width == _currentWidth &&
97 capability.height == _currentHeight &&
98 _captureVideoType == capability.videoType) {
99 return 0;
100 } else {
101 StopCapture();
102 }
103 }
104
105 MutexLock lock(&capture_lock_);
106 // first open /dev/video device
107 char device[20];
108 snprintf(device, sizeof(device), "/dev/video%d", _deviceId);
109
110 if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0) {
111 RTC_LOG(LS_INFO) << "error in opening " << device << " errono = " << errno;
112 return -1;
113 }
114
115 // Supported video formats in preferred order.
116 // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
117 // I420 otherwise.
118 const int nFormats = 6;
119 unsigned int fmts[nFormats];
120 if (capability.width > 640 || capability.height > 480) {
121 fmts[0] = V4L2_PIX_FMT_MJPEG;
122 fmts[1] = V4L2_PIX_FMT_YUV420;
123 fmts[2] = V4L2_PIX_FMT_YUYV;
124 fmts[3] = V4L2_PIX_FMT_UYVY;
125 fmts[4] = V4L2_PIX_FMT_NV12;
126 fmts[5] = V4L2_PIX_FMT_JPEG;
127 } else {
128 fmts[0] = V4L2_PIX_FMT_YUV420;
129 fmts[1] = V4L2_PIX_FMT_YUYV;
130 fmts[2] = V4L2_PIX_FMT_UYVY;
131 fmts[3] = V4L2_PIX_FMT_NV12;
132 fmts[4] = V4L2_PIX_FMT_MJPEG;
133 fmts[5] = V4L2_PIX_FMT_JPEG;
134 }
135
136 // Enumerate image formats.
137 struct v4l2_fmtdesc fmt;
138 int fmtsIdx = nFormats;
139 memset(&fmt, 0, sizeof(fmt));
140 fmt.index = 0;
141 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
142 RTC_LOG(LS_INFO) << "Video Capture enumerats supported image formats:";
143 while (ioctl(_deviceFd, VIDIOC_ENUM_FMT, &fmt) == 0) {
144 RTC_LOG(LS_INFO) << " { pixelformat = "
145 << cricket::GetFourccName(fmt.pixelformat)
146 << ", description = '" << fmt.description << "' }";
147 // Match the preferred order.
148 for (int i = 0; i < nFormats; i++) {
149 if (fmt.pixelformat == fmts[i] && i < fmtsIdx)
150 fmtsIdx = i;
151 }
152 // Keep enumerating.
153 fmt.index++;
154 }
155
156 if (fmtsIdx == nFormats) {
157 RTC_LOG(LS_INFO) << "no supporting video formats found";
158 return -1;
159 } else {
160 RTC_LOG(LS_INFO) << "We prefer format "
161 << cricket::GetFourccName(fmts[fmtsIdx]);
162 }
163
164 struct v4l2_format video_fmt;
165 memset(&video_fmt, 0, sizeof(struct v4l2_format));
166 video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
167 video_fmt.fmt.pix.sizeimage = 0;
168 video_fmt.fmt.pix.width = capability.width;
169 video_fmt.fmt.pix.height = capability.height;
170 video_fmt.fmt.pix.pixelformat = fmts[fmtsIdx];
171
172 if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
173 _captureVideoType = VideoType::kYUY2;
174 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
175 _captureVideoType = VideoType::kI420;
176 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_UYVY)
177 _captureVideoType = VideoType::kUYVY;
178 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_NV12)
179 _captureVideoType = VideoType::kNV12;
180 else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG ||
181 video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_JPEG)
182 _captureVideoType = VideoType::kMJPEG;
183
184 // set format and frame size now
185 if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0) {
186 RTC_LOG(LS_INFO) << "error in VIDIOC_S_FMT, errno = " << errno;
187 return -1;
188 }
189
190 // initialize current width and height
191 _currentWidth = video_fmt.fmt.pix.width;
192 _currentHeight = video_fmt.fmt.pix.height;
193
194 // Trying to set frame rate, before check driver capability.
195 bool driver_framerate_support = true;
196 struct v4l2_streamparm streamparms;
197 memset(&streamparms, 0, sizeof(streamparms));
198 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
199 if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
200 RTC_LOG(LS_INFO) << "error in VIDIOC_G_PARM errno = " << errno;
201 driver_framerate_support = false;
202 // continue
203 } else {
204 // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
205 if (streamparms.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
206 // driver supports the feature. Set required framerate.
207 memset(&streamparms, 0, sizeof(streamparms));
208 streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
209 streamparms.parm.capture.timeperframe.numerator = 1;
210 streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
211 if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
212 RTC_LOG(LS_INFO) << "Failed to set the framerate. errno=" << errno;
213 driver_framerate_support = false;
214 } else {
215 _currentFrameRate = capability.maxFPS;
216 }
217 }
218 }
219 // If driver doesn't support framerate control, need to hardcode.
220 // Hardcoding the value based on the frame size.
221 if (!driver_framerate_support) {
222 if (_currentWidth >= 800 && _captureVideoType != VideoType::kMJPEG) {
223 _currentFrameRate = 15;
224 } else {
225 _currentFrameRate = 30;
226 }
227 }
228
229 if (!AllocateVideoBuffers()) {
230 RTC_LOG(LS_INFO) << "failed to allocate video capture buffers";
231 return -1;
232 }
233
234 // start capture thread;
235 if (_captureThread.empty()) {
236 quit_ = false;
237 _captureThread = rtc::PlatformThread::SpawnJoinable(
238 [this] {
239 while (CaptureProcess()) {
240 }
241 },
242 "CaptureThread",
243 rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kHigh));
244 }
245
246 // Needed to start UVC camera - from the uvcview application
247 enum v4l2_buf_type type;
248 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
249 if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1) {
250 RTC_LOG(LS_INFO) << "Failed to turn on stream";
251 return -1;
252 }
253
254 _captureStarted = true;
255 return 0;
256 }
257
StopCapture()258 int32_t VideoCaptureModuleV4L2::StopCapture() {
259 if (!_captureThread.empty()) {
260 {
261 MutexLock lock(&capture_lock_);
262 quit_ = true;
263 }
264 // Make sure the capture thread stops using the mutex.
265 _captureThread.Finalize();
266 }
267
268 MutexLock lock(&capture_lock_);
269 if (_captureStarted) {
270 _captureStarted = false;
271
272 DeAllocateVideoBuffers();
273 close(_deviceFd);
274 _deviceFd = -1;
275 }
276
277 return 0;
278 }
279
280 // critical section protected by the caller
281
AllocateVideoBuffers()282 bool VideoCaptureModuleV4L2::AllocateVideoBuffers() {
283 struct v4l2_requestbuffers rbuffer;
284 memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
285
286 rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
287 rbuffer.memory = V4L2_MEMORY_MMAP;
288 rbuffer.count = kNoOfV4L2Bufffers;
289
290 if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0) {
291 RTC_LOG(LS_INFO) << "Could not get buffers from device. errno = " << errno;
292 return false;
293 }
294
295 if (rbuffer.count > kNoOfV4L2Bufffers)
296 rbuffer.count = kNoOfV4L2Bufffers;
297
298 _buffersAllocatedByDevice = rbuffer.count;
299
300 // Map the buffers
301 _pool = new Buffer[rbuffer.count];
302
303 for (unsigned int i = 0; i < rbuffer.count; i++) {
304 struct v4l2_buffer buffer;
305 memset(&buffer, 0, sizeof(v4l2_buffer));
306 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
307 buffer.memory = V4L2_MEMORY_MMAP;
308 buffer.index = i;
309
310 if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0) {
311 return false;
312 }
313
314 _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE,
315 MAP_SHARED, _deviceFd, buffer.m.offset);
316
317 if (MAP_FAILED == _pool[i].start) {
318 for (unsigned int j = 0; j < i; j++)
319 munmap(_pool[j].start, _pool[j].length);
320 return false;
321 }
322
323 _pool[i].length = buffer.length;
324
325 if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0) {
326 return false;
327 }
328 }
329 return true;
330 }
331
DeAllocateVideoBuffers()332 bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers() {
333 // unmap buffers
334 for (int i = 0; i < _buffersAllocatedByDevice; i++)
335 munmap(_pool[i].start, _pool[i].length);
336
337 delete[] _pool;
338
339 // turn off stream
340 enum v4l2_buf_type type;
341 type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
342 if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0) {
343 RTC_LOG(LS_INFO) << "VIDIOC_STREAMOFF error. errno: " << errno;
344 }
345
346 return true;
347 }
348
CaptureStarted()349 bool VideoCaptureModuleV4L2::CaptureStarted() {
350 return _captureStarted;
351 }
352
CaptureProcess()353 bool VideoCaptureModuleV4L2::CaptureProcess() {
354 int retVal = 0;
355 fd_set rSet;
356 struct timeval timeout;
357
358 FD_ZERO(&rSet);
359 FD_SET(_deviceFd, &rSet);
360 timeout.tv_sec = 1;
361 timeout.tv_usec = 0;
362
363 // _deviceFd written only in StartCapture, when this thread isn't running.
364 retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
365
366 {
367 MutexLock lock(&capture_lock_);
368
369 if (quit_) {
370 return false;
371 }
372
373 if (retVal < 0 && errno != EINTR) { // continue if interrupted
374 // select failed
375 return false;
376 } else if (retVal == 0) {
377 // select timed out
378 return true;
379 } else if (!FD_ISSET(_deviceFd, &rSet)) {
380 // not event on camera handle
381 return true;
382 }
383
384 if (_captureStarted) {
385 struct v4l2_buffer buf;
386 memset(&buf, 0, sizeof(struct v4l2_buffer));
387 buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
388 buf.memory = V4L2_MEMORY_MMAP;
389 // dequeue a buffer - repeat until dequeued properly!
390 while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0) {
391 if (errno != EINTR) {
392 RTC_LOG(LS_INFO) << "could not sync on a buffer on device "
393 << strerror(errno);
394 return true;
395 }
396 }
397 VideoCaptureCapability frameInfo;
398 frameInfo.width = _currentWidth;
399 frameInfo.height = _currentHeight;
400 frameInfo.videoType = _captureVideoType;
401
402 // convert to to I420 if needed
403 IncomingFrame(reinterpret_cast<uint8_t*>(_pool[buf.index].start),
404 buf.bytesused, frameInfo);
405 // enqueue the buffer again
406 if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1) {
407 RTC_LOG(LS_INFO) << "Failed to enqueue capture buffer";
408 }
409 }
410 }
411 usleep(0);
412 return true;
413 }
414
CaptureSettings(VideoCaptureCapability & settings)415 int32_t VideoCaptureModuleV4L2::CaptureSettings(
416 VideoCaptureCapability& settings) {
417 settings.width = _currentWidth;
418 settings.height = _currentHeight;
419 settings.maxFPS = _currentFrameRate;
420 settings.videoType = _captureVideoType;
421
422 return 0;
423 }
424 } // namespace videocapturemodule
425 } // namespace webrtc
426