1 // Copyright (c) 2020- PPSSPP Project.
2 
3 // This program is free software: you can redistribute it and/or modify
4 // it under the terms of the GNU General Public License as published by
5 // the Free Software Foundation, version 2.0 or later versions.
6 
7 // This program is distributed in the hope that it will be useful,
8 // but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
10 // GNU General Public License 2.0 for more details.
11 
12 // A copy of the GPL 2.0 should have been included with the program.
13 // If not, see http://www.gnu.org/licenses/
14 
15 // Official git repository and contact information can be found at
16 // https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17 
18 #include "ppsspp_config.h"
19 #include "Camera.h"
20 #include "Core/Config.h"
21 
convert_frame(int inw,int inh,unsigned char * inData,AVPixelFormat inFormat,int outw,int outh,unsigned char ** outData,int * outLen)22 void convert_frame(int inw, int inh, unsigned char *inData, AVPixelFormat inFormat,
23 					int outw, int outh, unsigned char **outData, int *outLen) {
24 
25 	struct SwsContext *sws_context = sws_getContext(
26 				inw, inh, inFormat,
27 				outw, outh, AV_PIX_FMT_RGB24,
28 				SWS_BICUBIC, NULL, NULL, NULL);
29 
30 	// resize
31 	uint8_t *src[4] = {0};
32 	uint8_t *dst[4] = {0};
33 	int srcStride[4], dstStride[4];
34 
35 	unsigned char *rgbData = (unsigned char*)malloc(outw * outh * 4);
36 
37 	av_image_fill_linesizes(srcStride, inFormat,         inw);
38 	av_image_fill_linesizes(dstStride, AV_PIX_FMT_RGB24, outw);
39 
40 	av_image_fill_pointers(src, inFormat,         inh,  inData,  srcStride);
41 	av_image_fill_pointers(dst, AV_PIX_FMT_RGB24, outh, rgbData, dstStride);
42 
43 	sws_scale(sws_context,
44 		src, srcStride, 0, inh,
45 		dst, dstStride);
46 
47 	// compress jpeg
48 	*outLen = outw * outh * 2;
49 	*outData = (unsigned char*)malloc(*outLen);
50 
51 	jpge::params params;
52 	params.m_quality = 60;
53 	params.m_subsampling = jpge::H2V2;
54 	params.m_two_pass_flag = false;
55 	jpge::compress_image_to_jpeg_file_in_memory(
56 		*outData, *outLen, outw, outh, 3, rgbData, params);
57 	free(rgbData);
58 }
59 
__cameraDummyImage(int width,int height,unsigned char ** outData,int * outLen)60 void __cameraDummyImage(int width, int height, unsigned char** outData, int* outLen) {
61 	unsigned char* rgbData = (unsigned char*)malloc(3 * width * height);
62 	for (int y = 0; y < height; y++) {
63 		for (int x = 0; x < width; x++) {
64 			rgbData[3 * (y * width + x) + 0] = x*255/width;
65 			rgbData[3 * (y * width + x) + 1] = x*255/width;
66 			rgbData[3 * (y * width + x) + 2] = y*255/height;
67 		}
68 	}
69 
70 	*outLen = width * height * 2;
71 	*outData = (unsigned char*)malloc(*outLen);
72 
73 	jpge::params params;
74 	params.m_quality = 60;
75 	params.m_subsampling = jpge::H2V2;
76 	params.m_two_pass_flag = false;
77 	jpge::compress_image_to_jpeg_file_in_memory(
78 		*outData, *outLen, width, height, 3, rgbData, params);
79 	free(rgbData);
80 }
81 
82 
83 #if defined(USING_QT_UI)
84 
__qt_getDeviceList()85 std::vector<std::string> __qt_getDeviceList() {
86 	std::vector<std::string> deviceList;
87 	const QList<QCameraInfo> cameras = QCameraInfo::availableCameras();
88 	for (const QCameraInfo &cameraInfo : cameras) {
89 		deviceList.push_back(cameraInfo.deviceName().toStdString()
90 			+ " (" + cameraInfo.description().toStdString() + ")");
91 	}
92 	return deviceList;
93 }
94 
supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const95 QList<QVideoFrame::PixelFormat> MyViewfinder::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const {
96 	Q_UNUSED(handleType);
97 	// Return the formats you will support
98 	return QList<QVideoFrame::PixelFormat>()
99 		<< QVideoFrame::Format_RGB24
100 		<< QVideoFrame::Format_YUYV
101 		;
102 }
103 
present(const QVideoFrame & frame)104 bool MyViewfinder::present(const QVideoFrame &frame) {
105 	if (frame.isValid()) {
106 		QVideoFrame cloneFrame(frame);
107 		cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
108 
109 		unsigned char *jpegData = nullptr;
110 		int jpegLen = 0;
111 
112 		QVideoFrame::PixelFormat frameFormat = cloneFrame.pixelFormat();
113 		if (frameFormat == QVideoFrame::Format_RGB24) {
114 			convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
115 				(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_RGB24,
116 				qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
117 
118 		} else if (frameFormat == QVideoFrame::Format_YUYV) {
119 			convert_frame(cloneFrame.size().width(), cloneFrame.size().height(),
120 				(unsigned char*)cloneFrame.bits(), AV_PIX_FMT_YUYV422,
121 				qtc_ideal_width, qtc_ideal_height, &jpegData, &jpegLen);
122 		}
123 
124 		if (jpegData) {
125 			Camera::pushCameraImage(jpegLen, jpegData);
126 			free(jpegData);
127 			jpegData = nullptr;
128 		}
129 
130 		cloneFrame.unmap();
131 		return true;
132 	}
133 	return false;
134 }
135 
__qt_startCapture(int width,int height)136 int __qt_startCapture(int width, int height) {
137 	if (qt_camera != nullptr) {
138 		ERROR_LOG(HLE, "camera already started");
139 		return -1;
140 	}
141 
142 	char selectedCamera[80];
143 	sscanf(g_Config.sCameraDevice.c_str(), "%80s ", &selectedCamera[0]);
144 
145 	const QList<QCameraInfo> availableCameras = QCameraInfo::availableCameras();
146 	if (availableCameras.size() < 1) {
147 		delete qt_camera;
148 		qt_camera = nullptr;
149 		ERROR_LOG(HLE, "no camera found");
150 		return -1;
151 	}
152 	for (const QCameraInfo &cameraInfo : availableCameras) {
153 		if (cameraInfo.deviceName() == selectedCamera) {
154 			qt_camera = new QCamera(cameraInfo);
155 		}
156 	}
157 	if (qt_camera == nullptr) {
158 		qt_camera = new QCamera();
159 		if (qt_camera == nullptr) {
160 			ERROR_LOG(HLE, "cannot open camera");
161 			return -1;
162 		}
163 	}
164 
165 	qtc_ideal_width = width;
166 	qtc_ideal_height = height;
167 
168 	qt_viewfinder = new MyViewfinder;
169 
170 	QCameraViewfinderSettings viewfinderSettings = qt_camera->viewfinderSettings();
171 	viewfinderSettings.setResolution(640, 480);
172 	viewfinderSettings.setMinimumFrameRate(15.0);
173 	viewfinderSettings.setMaximumFrameRate(15.0);
174 
175 	qt_camera->setViewfinderSettings(viewfinderSettings);
176 	qt_camera->setViewfinder(qt_viewfinder);
177 	qt_camera->start();
178 
179 	return 0;
180 }
181 
__qt_stopCapture()182 int __qt_stopCapture() {
183 	if (qt_camera != nullptr) {
184 		qt_camera->stop();
185 		qt_camera->unload();
186 		delete qt_camera;
187 		delete qt_viewfinder;
188 		qt_camera = nullptr;
189 	}
190 	return 0;
191 }
192 
193 //endif defined(USING_QT_UI)
194 #elif PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
195 
__v4l_getDeviceList()196 std::vector<std::string> __v4l_getDeviceList() {
197 	std::vector<std::string> deviceList;
198 	for (int i = 0; i < 64; i++) {
199 		char path[256];
200 		snprintf(path, sizeof(path), "/dev/video%d", i);
201 		if (access(path, F_OK) < 0) {
202 			break;
203 		}
204 		int fd = -1;
205 		if((fd = open(path, O_RDONLY)) < 0) {
206 			ERROR_LOG(HLE, "Cannot open '%s'; errno=%d(%s)", path, errno, strerror(errno));
207 			continue;
208 		}
209 		struct v4l2_capability video_cap;
210 		if(ioctl(fd, VIDIOC_QUERYCAP, &video_cap) < 0) {
211 			ERROR_LOG(HLE, "VIDIOC_QUERYCAP");
212 			goto cont;
213 		} else {
214 			char device[256];
215 			snprintf(device, sizeof(device), "%d:%s", i, video_cap.card);
216 			deviceList.push_back(device);
217 		}
218 cont:
219 		close(fd);
220 		fd = -1;
221 	}
222 	return deviceList;
223 }
224 
v4l_loop(void * data)225 void *v4l_loop(void *data) {
226 	SetCurrentThreadName("v4l_loop");
227 	while (v4l_fd >= 0) {
228 		struct v4l2_buffer buf;
229 		memset(&buf, 0, sizeof(buf));
230 		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
231 		buf.memory = V4L2_MEMORY_MMAP;
232 
233 		if (ioctl(v4l_fd, VIDIOC_DQBUF, &buf) == -1) {
234 			ERROR_LOG(HLE, "VIDIOC_DQBUF; errno=%d(%s)", errno, strerror(errno));
235 			switch (errno) {
236 			case EAGAIN:
237 				continue;
238 			default:
239 				return nullptr;
240 			}
241 		}
242 
243 		unsigned char *jpegData = nullptr;
244 		int jpegLen = 0;
245 
246 		if (v4l_format == V4L2_PIX_FMT_YUYV) {
247 			convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)v4l_buffers[buf.index].start, AV_PIX_FMT_YUYV422,
248 				v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
249 		} else if (v4l_format == V4L2_PIX_FMT_JPEG
250 				|| v4l_format == V4L2_PIX_FMT_MJPEG) {
251 			// decompress jpeg
252 			int width, height, req_comps;
253 			unsigned char *rgbData = jpgd::decompress_jpeg_image_from_memory(
254 				(unsigned char*)v4l_buffers[buf.index].start, buf.bytesused, &width, &height, &req_comps, 3);
255 
256 			convert_frame(v4l_hw_width, v4l_hw_height, (unsigned char*)rgbData, AV_PIX_FMT_RGB24,
257 				v4l_ideal_width, v4l_ideal_height, &jpegData, &jpegLen);
258 			free(rgbData);
259 		}
260 
261 		if (jpegData) {
262 			Camera::pushCameraImage(jpegLen, jpegData);
263 			free(jpegData);
264 			jpegData = nullptr;
265 		}
266 
267 		buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
268 		buf.memory = V4L2_MEMORY_MMAP;
269 		if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
270 			ERROR_LOG(HLE, "VIDIOC_QBUF");
271 			return nullptr;
272 		}
273 	}
274 	return nullptr;
275 }
276 
__v4l_startCapture(int ideal_width,int ideal_height)277 int __v4l_startCapture(int ideal_width, int ideal_height) {
278 	if (v4l_fd >= 0) {
279 		__v4l_stopCapture();
280 	}
281 	v4l_ideal_width  = ideal_width;
282 	v4l_ideal_height = ideal_height;
283 
284 	int dev_index = 0;
285 	char dev_name[64];
286 	sscanf(g_Config.sCameraDevice.c_str(), "%d:", &dev_index);
287 	snprintf(dev_name, sizeof(dev_name), "/dev/video%d", dev_index);
288 
289 	if ((v4l_fd = open(dev_name, O_RDWR)) == -1) {
290 		ERROR_LOG(HLE, "Cannot open '%s'; errno=%d(%s)", dev_name, errno, strerror(errno));
291 		return -1;
292 	}
293 
294 	struct v4l2_capability cap;
295 	memset(&cap, 0, sizeof(cap));
296 	if (ioctl(v4l_fd, VIDIOC_QUERYCAP, &cap) == -1) {
297 		ERROR_LOG(HLE, "VIDIOC_QUERYCAP");
298 		return -1;
299 	}
300 	if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
301 		ERROR_LOG(HLE, "V4L2_CAP_VIDEO_CAPTURE");
302 		return -1;
303 	}
304 	if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
305 		ERROR_LOG(HLE, "V4L2_CAP_STREAMING");
306 		return -1;
307 	}
308 
309 	struct v4l2_format fmt;
310 	memset(&fmt, 0, sizeof(fmt));
311 	fmt.type                = V4L2_BUF_TYPE_VIDEO_CAPTURE;
312 	fmt.fmt.pix.pixelformat = 0;
313 
314 	// select a pixel format
315 	struct v4l2_fmtdesc desc;
316 	memset(&desc, 0, sizeof(desc));
317 	desc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
318 	while (ioctl(v4l_fd, VIDIOC_ENUM_FMT, &desc) == 0) {
319 		desc.index++;
320 		INFO_LOG(HLE, "V4L2: pixel format supported: %s", desc.description);
321 		if (fmt.fmt.pix.pixelformat != 0) {
322 			continue;
323 		} else if (desc.pixelformat == V4L2_PIX_FMT_YUYV
324 				|| desc.pixelformat == V4L2_PIX_FMT_JPEG
325 				|| desc.pixelformat == V4L2_PIX_FMT_MJPEG) {
326 			INFO_LOG(HLE, "V4L2: %s selected", desc.description);
327 			fmt.fmt.pix.pixelformat = desc.pixelformat;
328 			v4l_format              = desc.pixelformat;
329 		}
330 	}
331 	if (fmt.fmt.pix.pixelformat == 0) {
332 		ERROR_LOG(HLE, "V4L2: No supported format found");
333 		return -1;
334 	}
335 
336 	// select a frame size
337 	fmt.fmt.pix.width  = 0;
338 	fmt.fmt.pix.height = 0;
339 	struct v4l2_frmsizeenum frmsize;
340 	memset(&frmsize, 0, sizeof(frmsize));
341 	frmsize.pixel_format = fmt.fmt.pix.pixelformat;
342 	while (ioctl(v4l_fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0) {
343 		frmsize.index++;
344 		if (frmsize.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
345 			INFO_LOG(HLE, "V4L2: frame size supported: %dx%d", frmsize.discrete.width, frmsize.discrete.height);
346 			bool matchesIdeal = frmsize.discrete.width >= ideal_width && frmsize.discrete.height >= ideal_height;
347 			bool zeroPix = fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0;
348 			bool pixLarger = frmsize.discrete.width < fmt.fmt.pix.width && frmsize.discrete.height < fmt.fmt.pix.height;
349 			if (matchesIdeal && (zeroPix || pixLarger)) {
350 				fmt.fmt.pix.width  = frmsize.discrete.width;
351 				fmt.fmt.pix.height = frmsize.discrete.height;
352 			}
353 		}
354 	}
355 
356 	if (fmt.fmt.pix.width == 0 && fmt.fmt.pix.height == 0) {
357 		fmt.fmt.pix.width  = ideal_width;
358 		fmt.fmt.pix.height = ideal_height;
359 	}
360 	INFO_LOG(HLE, "V4L2: asking for   %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
361 	if (ioctl(v4l_fd, VIDIOC_S_FMT, &fmt) == -1) {
362 		ERROR_LOG(HLE, "VIDIOC_S_FMT");
363 		return -1;
364 	}
365 	v4l_hw_width  = fmt.fmt.pix.width;
366 	v4l_hw_height = fmt.fmt.pix.height;
367 	INFO_LOG(HLE, "V4L2: will receive %dx%d", v4l_hw_width, v4l_hw_height);
368 	v4l_height_fixed_aspect = v4l_hw_width * ideal_height / ideal_width;
369 	INFO_LOG(HLE, "V4L2: will use     %dx%d", v4l_hw_width, v4l_height_fixed_aspect);
370 
371 	struct v4l2_requestbuffers req;
372 	memset(&req, 0, sizeof(req));
373 	req.count  = 1;
374 	req.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
375 	req.memory = V4L2_MEMORY_MMAP;
376 	if (ioctl(v4l_fd, VIDIOC_REQBUFS, &req) == -1) {
377 		ERROR_LOG(HLE, "VIDIOC_REQBUFS");
378 		return -1;
379 	}
380 	v4l_buffer_count = req.count;
381 	INFO_LOG(HLE, "V4L2: buffer count: %d", v4l_buffer_count);
382 	v4l_buffers = (v4l_buf_t*) calloc(v4l_buffer_count, sizeof(v4l_buf_t));
383 
384 	for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
385 		struct v4l2_buffer buf;
386 		memset(&buf, 0, sizeof(buf));
387 		buf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
388 		buf.memory = V4L2_MEMORY_MMAP;
389 		buf.index  = buf_id;
390 		if (ioctl(v4l_fd, VIDIOC_QUERYBUF, &buf) == -1) {
391 			ERROR_LOG(HLE, "VIDIOC_QUERYBUF");
392 			return -1;
393 		}
394 
395 		v4l_buffers[buf_id].length = buf.length;
396 		v4l_buffers[buf_id].start = mmap(NULL,
397 				buf.length,
398 				PROT_READ | PROT_WRITE,
399 				MAP_SHARED,
400 				v4l_fd, buf.m.offset);
401 		if (v4l_buffers[buf_id].start == MAP_FAILED) {
402 			ERROR_LOG(HLE, "MAP_FAILED");
403 			return -1;
404 		}
405 
406 		memset(&buf, 0, sizeof(buf));
407 		buf.type   = V4L2_BUF_TYPE_VIDEO_CAPTURE;
408 		buf.memory = V4L2_MEMORY_MMAP;
409 		buf.index  = buf_id;
410 		if (ioctl(v4l_fd, VIDIOC_QBUF, &buf) == -1) {
411 			ERROR_LOG(HLE, "VIDIOC_QBUF");
412 			return -1;
413 		}
414 	}
415 
416 	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
417 	if (ioctl(v4l_fd, VIDIOC_STREAMON, &type) == -1) {
418 		ERROR_LOG(HLE, "VIDIOC_STREAMON");
419 		return -1;
420 	}
421 
422 	pthread_create(&v4l_thread, NULL, v4l_loop, NULL);
423 
424 	return 0;
425 }
426 
__v4l_stopCapture()427 int __v4l_stopCapture() {
428 	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
429 
430 	if (v4l_fd < 0) {
431 		goto exit;
432 	}
433 
434 	if (ioctl(v4l_fd, VIDIOC_STREAMOFF, &type) == -1) {
435 		ERROR_LOG(HLE, "VIDIOC_STREAMOFF");
436 		goto exit;
437 	}
438 
439 	for (int buf_id = 0; buf_id < v4l_buffer_count; buf_id++) {
440 		if (munmap(v4l_buffers[buf_id].start, v4l_buffers[buf_id].length) == -1) {
441 			ERROR_LOG(HLE, "munmap");
442 			goto exit;
443 		}
444 	}
445 
446 	if (close(v4l_fd) == -1) {
447 		ERROR_LOG(HLE, "close");
448 		goto exit;
449 	}
450 
451 	v4l_fd = -1;
452 	//pthread_join(v4l_thread, NULL);
453 
454 exit:
455 	v4l_fd = -1;
456 	return 0;
457 }
458 
459 #endif // PPSSPP_PLATFORM(LINUX) && !PPSSPP_PLATFORM(ANDROID)
460