1 // Copyright (c) 2012- PPSSPP Project.
2
3 // This program is free software: you can redistribute it and/or modify
4 // it under the terms of the GNU General Public License as published by
5 // the Free Software Foundation, version 2.0 or later versions.
6
7 // This program is distributed in the hope that it will be useful,
8 // but WITHOUT ANY WARRANTY; without even the implied warranty of
9 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 // GNU General Public License 2.0 for more details.
11
12 // A copy of the GPL 2.0 should have been included with the program.
13 // If not, see http://www.gnu.org/licenses/
14
15 // Official git repository and contact information can be found at
16 // https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
17
18 #include "Common/Serialize/SerializeFuncs.h"
19 #include "Core/Config.h"
20 #include "Core/Debugger/MemBlockInfo.h"
21 #include "Core/HW/MediaEngine.h"
22 #include "Core/MemMap.h"
23 #include "Core/MIPS/MIPS.h"
24 #include "Core/Reporting.h"
25 #include "GPU/GPUState.h" // Used by TextureDecoder.h when templates get instanced
26 #include "GPU/Common/TextureDecoder.h"
27 #include "GPU/GPUInterface.h"
28 #include "Core/HW/SimpleAudioDec.h"
29
30 #include <algorithm>
31
32 #ifdef USE_FFMPEG
33
34 extern "C" {
35
36 #include "libavcodec/avcodec.h"
37 #include "libavformat/avformat.h"
38 #include "libavutil/imgutils.h"
39 #include "libswscale/swscale.h"
40
41 }
42 #endif // USE_FFMPEG
43
44 #ifdef USE_FFMPEG
getSwsFormat(int pspFormat)45 static AVPixelFormat getSwsFormat(int pspFormat)
46 {
47 switch (pspFormat)
48 {
49 case GE_CMODE_16BIT_BGR5650:
50 return AV_PIX_FMT_BGR565LE;
51 case GE_CMODE_16BIT_ABGR5551:
52 return AV_PIX_FMT_BGR555LE;
53 case GE_CMODE_16BIT_ABGR4444:
54 return AV_PIX_FMT_BGR444LE;
55 case GE_CMODE_32BIT_ABGR8888:
56 return AV_PIX_FMT_RGBA;
57 default:
58 ERROR_LOG(ME, "Unknown pixel format");
59 return (AVPixelFormat)0;
60 }
61 }
62
ffmpeg_logger(void *,int level,const char * format,va_list va_args)63 void ffmpeg_logger(void *, int level, const char *format, va_list va_args) {
64 // We're still called even if the level doesn't match.
65 if (level > av_log_get_level())
66 return;
67
68 char tmp[1024];
69 vsnprintf(tmp, sizeof(tmp), format, va_args);
70 tmp[sizeof(tmp) - 1] = '\0';
71
72 // Strip off any trailing newline.
73 size_t len = strlen(tmp);
74 if (tmp[len - 1] == '\n')
75 tmp[len - 1] = '\0';
76
77 if (!strcmp(tmp, "GHA Phase shifting")) {
78 Reporting::ReportMessage("Atrac3+: GHA phase shifting");
79 }
80
81 // Let's color the log line appropriately.
82 if (level <= AV_LOG_PANIC) {
83 ERROR_LOG(ME, "FF: %s", tmp);
84 } else if (level >= AV_LOG_VERBOSE) {
85 DEBUG_LOG(ME, "FF: %s", tmp);
86 } else {
87 INFO_LOG(ME, "FF: %s", tmp);
88 }
89 }
90
InitFFmpeg()91 bool InitFFmpeg() {
92 #ifdef _DEBUG
93 av_log_set_level(AV_LOG_VERBOSE);
94 #else
95 av_log_set_level(AV_LOG_WARNING);
96 #endif
97 av_log_set_callback(&ffmpeg_logger);
98
99 return true;
100 }
101 #endif
102
getPixelFormatBytes(int pspFormat)103 static int getPixelFormatBytes(int pspFormat)
104 {
105 switch (pspFormat)
106 {
107 case GE_CMODE_16BIT_BGR5650:
108 case GE_CMODE_16BIT_ABGR5551:
109 case GE_CMODE_16BIT_ABGR4444:
110 return 2;
111 case GE_CMODE_32BIT_ABGR8888:
112 return 4;
113
114 default:
115 ERROR_LOG(ME, "Unknown pixel format");
116 return 4;
117 }
118 }
119
MediaEngine()120 MediaEngine::MediaEngine(): m_pdata(0) {
121 #ifdef USE_FFMPEG
122 m_pFormatCtx = 0;
123 m_pCodecCtxs.clear();
124 m_pFrame = 0;
125 m_pFrameRGB = 0;
126 m_pIOContext = 0;
127 m_sws_ctx = 0;
128 #endif
129 m_sws_fmt = 0;
130 m_buffer = 0;
131
132 m_videoStream = -1;
133 m_audioStream = -1;
134
135 m_expectedVideoStreams = 0;
136
137 m_desWidth = 0;
138 m_desHeight = 0;
139 m_decodingsize = 0;
140 m_bufSize = 0x2000;
141 m_pdata = 0;
142 m_demux = 0;
143 m_audioContext = 0;
144 m_audiopts = 0;
145
146 m_firstTimeStamp = 0;
147 m_lastTimeStamp = 0;
148 m_isVideoEnd = false;
149
150 m_ringbuffersize = 0;
151 m_mpegheaderReadPos = 0;
152 m_mpegheaderSize = sizeof(m_mpegheader);
153 m_audioType = PSP_CODEC_AT3PLUS; // in movie, we use only AT3+ audio
154 }
155
~MediaEngine()156 MediaEngine::~MediaEngine() {
157 closeMedia();
158 }
159
closeMedia()160 void MediaEngine::closeMedia() {
161 closeContext();
162 if (m_pdata)
163 delete m_pdata;
164 if (m_demux)
165 delete m_demux;
166 m_pdata = 0;
167 m_demux = 0;
168 AudioClose(&m_audioContext);
169 m_isVideoEnd = false;
170 }
171
DoState(PointerWrap & p)172 void MediaEngine::DoState(PointerWrap &p) {
173 auto s = p.Section("MediaEngine", 1, 7);
174 if (!s)
175 return;
176
177 Do(p, m_videoStream);
178 Do(p, m_audioStream);
179
180 DoArray(p, m_mpegheader, sizeof(m_mpegheader));
181 if (s >= 4) {
182 Do(p, m_mpegheaderSize);
183 } else {
184 m_mpegheaderSize = sizeof(m_mpegheader);
185 }
186 if (s >= 5) {
187 Do(p, m_mpegheaderReadPos);
188 } else {
189 m_mpegheaderReadPos = m_mpegheaderSize;
190 }
191 if (s >= 6) {
192 Do(p, m_expectedVideoStreams);
193 } else {
194 m_expectedVideoStreams = 0;
195 }
196
197 Do(p, m_ringbuffersize);
198
199 u32 hasloadStream = m_pdata != NULL;
200 Do(p, hasloadStream);
201 if (hasloadStream && p.mode == p.MODE_READ)
202 reloadStream();
203 #ifdef USE_FFMPEG
204 u32 hasopencontext = m_pFormatCtx != NULL;
205 #else
206 u32 hasopencontext = false;
207 #endif
208 Do(p, hasopencontext);
209 if (m_pdata)
210 m_pdata->DoState(p);
211 if (m_demux)
212 m_demux->DoState(p);
213
214 Do(p, m_videopts);
215 if (s >= 7) {
216 Do(p, m_lastPts);
217 } else {
218 m_lastPts = m_videopts;
219 }
220 Do(p, m_audiopts);
221
222 if (s >= 2) {
223 Do(p, m_firstTimeStamp);
224 Do(p, m_lastTimeStamp);
225 }
226
227 if (hasopencontext && p.mode == p.MODE_READ) {
228 openContext(true);
229 }
230
231 Do(p, m_isVideoEnd);
232 bool noAudioDataRemoved;
233 Do(p, noAudioDataRemoved);
234 if (s >= 3) {
235 Do(p, m_audioType);
236 } else {
237 m_audioType = PSP_CODEC_AT3PLUS;
238 }
239 }
240
MpegReadbuffer(void * opaque,uint8_t * buf,int buf_size)241 static int MpegReadbuffer(void *opaque, uint8_t *buf, int buf_size) {
242 MediaEngine *mpeg = (MediaEngine *)opaque;
243
244 int size = buf_size;
245 if (mpeg->m_mpegheaderReadPos < mpeg->m_mpegheaderSize) {
246 size = std::min(buf_size, mpeg->m_mpegheaderSize - mpeg->m_mpegheaderReadPos);
247 memcpy(buf, mpeg->m_mpegheader + mpeg->m_mpegheaderReadPos, size);
248 mpeg->m_mpegheaderReadPos += size;
249 } else {
250 size = mpeg->m_pdata->pop_front(buf, buf_size);
251 if (size > 0)
252 mpeg->m_decodingsize = size;
253 }
254 return size;
255 }
256
SetupStreams()257 bool MediaEngine::SetupStreams() {
258 #ifdef USE_FFMPEG
259 const u32 magic = *(u32_le *)&m_mpegheader[0];
260 if (magic != PSMF_MAGIC) {
261 WARN_LOG_REPORT(ME, "Could not setup streams, bad magic: %08x", magic);
262 return false;
263 }
264 int numStreams = *(u16_be *)&m_mpegheader[0x80];
265 if (numStreams <= 0 || numStreams > 8) {
266 // Looks crazy. Let's bail out and let FFmpeg handle it.
267 WARN_LOG_REPORT(ME, "Could not setup streams, unexpected stream count: %d", numStreams);
268 return false;
269 }
270
271 // Looking good. Let's add those streams.
272 int videoStreamNum = -1;
273 for (int i = 0; i < numStreams; i++) {
274 const u8 *const currentStreamAddr = m_mpegheader + 0x82 + i * 16;
275 int streamId = currentStreamAddr[0];
276
277 // We only set video streams. We demux the audio stream separately.
278 if ((streamId & PSMF_VIDEO_STREAM_ID) == PSMF_VIDEO_STREAM_ID) {
279 ++videoStreamNum;
280 addVideoStream(videoStreamNum, streamId);
281 }
282 }
283 // Add the streams to meet the expectation.
284 for (int i = videoStreamNum + 1; i < m_expectedVideoStreams; i++) {
285 addVideoStream(i);
286 }
287 #endif
288
289 return true;
290 }
291
openContext(bool keepReadPos)292 bool MediaEngine::openContext(bool keepReadPos) {
293 #ifdef USE_FFMPEG
294 InitFFmpeg();
295
296 if (m_pFormatCtx || !m_pdata)
297 return false;
298 if (!keepReadPos) {
299 m_mpegheaderReadPos = 0;
300 }
301 m_decodingsize = 0;
302
303 m_bufSize = std::max(m_bufSize, m_mpegheaderSize);
304 u8 *tempbuf = (u8*)av_malloc(m_bufSize);
305
306 m_pFormatCtx = avformat_alloc_context();
307 m_pIOContext = avio_alloc_context(tempbuf, m_bufSize, 0, (void*)this, &MpegReadbuffer, nullptr, nullptr);
308 m_pFormatCtx->pb = m_pIOContext;
309
310 // Open video file
311 AVDictionary *open_opt = nullptr;
312 av_dict_set_int(&open_opt, "probesize", m_mpegheaderSize, 0);
313 if (avformat_open_input((AVFormatContext**)&m_pFormatCtx, nullptr, nullptr, &open_opt) != 0) {
314 av_dict_free(&open_opt);
315 return false;
316 }
317 av_dict_free(&open_opt);
318
319 if (!SetupStreams()) {
320 // Fallback to old behavior. Reads too much and corrupts when game doesn't read fast enough.
321 // SetupStreams sometimes work for newer FFmpeg 3.1+ now, but sometimes framerate is missing.
322 WARN_LOG_REPORT_ONCE(setupStreams, ME, "Failed to read valid video stream data from header");
323 if (avformat_find_stream_info(m_pFormatCtx, nullptr) < 0) {
324 closeContext();
325 return false;
326 }
327 }
328
329 if (m_videoStream >= (int)m_pFormatCtx->nb_streams) {
330 WARN_LOG_REPORT(ME, "Bad video stream %d", m_videoStream);
331 m_videoStream = -1;
332 }
333
334 if (m_videoStream == -1) {
335 // Find the first video stream
336 for (int i = 0; i < (int)m_pFormatCtx->nb_streams; i++) {
337 const AVStream *s = m_pFormatCtx->streams[i];
338 #if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
339 AVMediaType type = s->codecpar->codec_type;
340 #else
341 AVMediaType type = s->codec->codec_type;
342 #endif
343 if (type == AVMEDIA_TYPE_VIDEO) {
344 m_videoStream = i;
345 break;
346 }
347 }
348 if (m_videoStream == -1)
349 return false;
350 }
351
352 if (!setVideoStream(m_videoStream, true))
353 return false;
354
355 setVideoDim();
356 m_audioContext = new SimpleAudio(m_audioType, 44100, 2);
357 m_isVideoEnd = false;
358 #endif // USE_FFMPEG
359 return true;
360 }
361
closeContext()362 void MediaEngine::closeContext()
363 {
364 #ifdef USE_FFMPEG
365 if (m_buffer)
366 av_free(m_buffer);
367 if (m_pFrameRGB)
368 av_frame_free(&m_pFrameRGB);
369 if (m_pFrame)
370 av_frame_free(&m_pFrame);
371 if (m_pIOContext && m_pIOContext->buffer)
372 av_free(m_pIOContext->buffer);
373 if (m_pIOContext)
374 av_free(m_pIOContext);
375 for (auto it : m_pCodecCtxs) {
376 #if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
377 avcodec_free_context(&it.second);
378 #else
379 avcodec_close(it.second);
380 #endif
381 }
382 m_pCodecCtxs.clear();
383 if (m_pFormatCtx)
384 avformat_close_input(&m_pFormatCtx);
385 sws_freeContext(m_sws_ctx);
386 m_sws_ctx = NULL;
387 m_pIOContext = 0;
388 #endif
389 m_buffer = 0;
390 }
391
loadStream(const u8 * buffer,int readSize,int RingbufferSize)392 bool MediaEngine::loadStream(const u8 *buffer, int readSize, int RingbufferSize)
393 {
394 closeMedia();
395
396 m_videopts = 0;
397 m_lastPts = -1;
398 m_audiopts = 0;
399 m_ringbuffersize = RingbufferSize;
400 m_pdata = new BufferQueue(RingbufferSize + 2048);
401 m_pdata->push(buffer, readSize);
402 m_firstTimeStamp = getMpegTimeStamp(buffer + PSMF_FIRST_TIMESTAMP_OFFSET);
403 m_lastTimeStamp = getMpegTimeStamp(buffer + PSMF_LAST_TIMESTAMP_OFFSET);
404 int mpegoffset = (int)(*(s32_be*)(buffer + 8));
405 m_demux = new MpegDemux(RingbufferSize + 2048, mpegoffset);
406 m_demux->addStreamData(buffer, readSize);
407 return true;
408 }
409
reloadStream()410 bool MediaEngine::reloadStream()
411 {
412 return loadStream(m_mpegheader, 2048, m_ringbuffersize);
413 }
414
addVideoStream(int streamNum,int streamId)415 bool MediaEngine::addVideoStream(int streamNum, int streamId) {
416 #ifdef USE_FFMPEG
417 if (m_pFormatCtx) {
418 // no need to add an existing stream.
419 if ((u32)streamNum < m_pFormatCtx->nb_streams)
420 return true;
421 const AVCodec *h264_codec = avcodec_find_decoder(AV_CODEC_ID_H264);
422 if (!h264_codec)
423 return false;
424 AVStream *stream = avformat_new_stream(m_pFormatCtx, h264_codec);
425 if (stream) {
426 // Reference ISO/IEC 13818-1.
427 if (streamId == -1)
428 streamId = PSMF_VIDEO_STREAM_ID | streamNum;
429
430 stream->id = 0x00000100 | streamId;
431 #if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
432 stream->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
433 stream->codecpar->codec_id = AV_CODEC_ID_H264;
434 #else
435 stream->request_probe = 0;
436 #endif
437 stream->need_parsing = AVSTREAM_PARSE_FULL;
438 // We could set the width here, but we don't need to.
439 if (streamNum >= m_expectedVideoStreams) {
440 ++m_expectedVideoStreams;
441 }
442 return true;
443 }
444 }
445 #endif
446 if (streamNum >= m_expectedVideoStreams) {
447 ++m_expectedVideoStreams;
448 }
449 return false;
450 }
451
addStreamData(const u8 * buffer,int addSize)452 int MediaEngine::addStreamData(const u8 *buffer, int addSize) {
453 int size = addSize;
454 if (size > 0 && m_pdata) {
455 if (!m_pdata->push(buffer, size))
456 size = 0;
457 if (m_demux) {
458 m_demux->addStreamData(buffer, addSize);
459 }
460 #ifdef USE_FFMPEG
461 if (!m_pFormatCtx && m_pdata->getQueueSize() >= 2048) {
462 m_mpegheaderSize = m_pdata->get_front(m_mpegheader, sizeof(m_mpegheader));
463 int streamOffset = (int)(*(s32_be *)(m_mpegheader + 8));
464 if (streamOffset <= m_mpegheaderSize) {
465 m_mpegheaderSize = streamOffset;
466 m_pdata->pop_front(0, m_mpegheaderSize);
467 openContext();
468 }
469 }
470 #endif // USE_FFMPEG
471
472 // We added data, so... not the end anymore?
473 m_isVideoEnd = false;
474 }
475 return size;
476 }
477
seekTo(s64 timestamp,int videoPixelMode)478 bool MediaEngine::seekTo(s64 timestamp, int videoPixelMode) {
479 if (timestamp <= 0) {
480 return true;
481 }
482
483 // Just doing it the not so great way to be sure audio is in sync.
484 int timeout = 1000;
485 while (getVideoTimeStamp() < timestamp - 3003) {
486 if (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {
487 getNextAudioFrame(NULL, NULL, NULL);
488 }
489 if (!stepVideo(videoPixelMode, true)) {
490 return false;
491 }
492 if (--timeout <= 0) {
493 return true;
494 }
495 }
496
497 while (getAudioTimeStamp() < getVideoTimeStamp() - 4180 * 2) {
498 if (getNextAudioFrame(NULL, NULL, NULL) == 0) {
499 return false;
500 }
501 if (--timeout <= 0) {
502 return true;
503 }
504 }
505
506 return true;
507 }
508
setVideoStream(int streamNum,bool force)509 bool MediaEngine::setVideoStream(int streamNum, bool force) {
510 if (m_videoStream == streamNum && !force) {
511 // Yay, nothing to do.
512 return true;
513 }
514
515 #ifdef USE_FFMPEG
516 if (m_pFormatCtx && m_pCodecCtxs.find(streamNum) == m_pCodecCtxs.end()) {
517 // Get a pointer to the codec context for the video stream
518 if ((u32)streamNum >= m_pFormatCtx->nb_streams) {
519 return false;
520 }
521
522 AVStream *stream = m_pFormatCtx->streams[streamNum];
523 #if LIBAVFORMAT_VERSION_INT >= AV_VERSION_INT(57, 33, 100)
524 AVCodec *pCodec = avcodec_find_decoder(stream->codecpar->codec_id);
525 if (!pCodec) {
526 WARN_LOG_REPORT(ME, "Could not find decoder for %d", (int)stream->codecpar->codec_id);
527 return false;
528 }
529 AVCodecContext *m_pCodecCtx = avcodec_alloc_context3(pCodec);
530 int paramResult = avcodec_parameters_to_context(m_pCodecCtx, stream->codecpar);
531 if (paramResult < 0) {
532 WARN_LOG_REPORT(ME, "Failed to prepare context parameters: %08x", paramResult);
533 return false;
534 }
535 #else
536 AVCodecContext *m_pCodecCtx = stream->codec;
537 // Find the decoder for the video stream
538 AVCodec *pCodec = avcodec_find_decoder(m_pCodecCtx->codec_id);
539 if (pCodec == nullptr) {
540 return false;
541 }
542 #endif
543
544 m_pCodecCtx->flags |= AV_CODEC_FLAG_OUTPUT_CORRUPT | AV_CODEC_FLAG_LOW_DELAY;
545
546 AVDictionary *opt = nullptr;
547 // Allow ffmpeg to use any number of threads it wants. Without this, it doesn't use threads.
548 av_dict_set(&opt, "threads", "0", 0);
549 int openResult = avcodec_open2(m_pCodecCtx, pCodec, &opt);
550 av_dict_free(&opt);
551 if (openResult < 0) {
552 return false;
553 }
554
555 m_pCodecCtxs[streamNum] = m_pCodecCtx;
556 }
557 #endif
558 m_videoStream = streamNum;
559
560 return true;
561 }
562
setVideoDim(int width,int height)563 bool MediaEngine::setVideoDim(int width, int height)
564 {
565 #ifdef USE_FFMPEG
566 auto codecIter = m_pCodecCtxs.find(m_videoStream);
567 if (codecIter == m_pCodecCtxs.end())
568 return false;
569 AVCodecContext *m_pCodecCtx = codecIter->second;
570
571 if (width == 0 && height == 0)
572 {
573 // use the orignal video size
574 m_desWidth = m_pCodecCtx->width;
575 m_desHeight = m_pCodecCtx->height;
576 }
577 else
578 {
579 m_desWidth = width;
580 m_desHeight = height;
581 }
582
583 // Allocate video frame
584 if (!m_pFrame) {
585 m_pFrame = av_frame_alloc();
586 }
587
588 sws_freeContext(m_sws_ctx);
589 m_sws_ctx = NULL;
590 m_sws_fmt = -1;
591
592 if (m_desWidth == 0 || m_desHeight == 0) {
593 // Can't setup SWS yet, so stop for now.
594 return false;
595 }
596
597 updateSwsFormat(GE_CMODE_32BIT_ABGR8888);
598
599 // Allocate video frame for RGB24
600 m_pFrameRGB = av_frame_alloc();
601 #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
602 int numBytes = av_image_get_buffer_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);
603 #else
604 int numBytes = avpicture_get_size((AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);
605 #endif
606 m_buffer = (u8*)av_malloc(numBytes * sizeof(uint8_t));
607
608 // Assign appropriate parts of buffer to image planes in m_pFrameRGB
609 #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
610 av_image_fill_arrays(m_pFrameRGB->data, m_pFrameRGB->linesize, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight, 1);
611 #else
612 avpicture_fill((AVPicture *)m_pFrameRGB, m_buffer, (AVPixelFormat)m_sws_fmt, m_desWidth, m_desHeight);
613 #endif
614 #endif // USE_FFMPEG
615 return true;
616 }
617
updateSwsFormat(int videoPixelMode)618 void MediaEngine::updateSwsFormat(int videoPixelMode) {
619 #ifdef USE_FFMPEG
620 auto codecIter = m_pCodecCtxs.find(m_videoStream);
621 AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;
622
623 AVPixelFormat swsDesired = getSwsFormat(videoPixelMode);
624 if (swsDesired != m_sws_fmt && m_pCodecCtx != 0) {
625 m_sws_fmt = swsDesired;
626 m_sws_ctx = sws_getCachedContext
627 (
628 m_sws_ctx,
629 m_pCodecCtx->width,
630 m_pCodecCtx->height,
631 m_pCodecCtx->pix_fmt,
632 m_desWidth,
633 m_desHeight,
634 (AVPixelFormat)m_sws_fmt,
635 SWS_BILINEAR,
636 NULL,
637 NULL,
638 NULL
639 );
640
641 int *inv_coefficients;
642 int *coefficients;
643 int srcRange, dstRange;
644 int brightness, contrast, saturation;
645
646 if (sws_getColorspaceDetails(m_sws_ctx, &inv_coefficients, &srcRange, &coefficients, &dstRange, &brightness, &contrast, &saturation) != -1) {
647 srcRange = 0;
648 dstRange = 0;
649 sws_setColorspaceDetails(m_sws_ctx, inv_coefficients, srcRange, coefficients, dstRange, brightness, contrast, saturation);
650 }
651 }
652 #endif
653 }
654
stepVideo(int videoPixelMode,bool skipFrame)655 bool MediaEngine::stepVideo(int videoPixelMode, bool skipFrame) {
656 #ifdef USE_FFMPEG
657 auto codecIter = m_pCodecCtxs.find(m_videoStream);
658 AVCodecContext *m_pCodecCtx = codecIter == m_pCodecCtxs.end() ? 0 : codecIter->second;
659
660 if (!m_pFormatCtx)
661 return false;
662 if (!m_pCodecCtx)
663 return false;
664 if (!m_pFrame)
665 return false;
666
667 AVPacket packet;
668 av_init_packet(&packet);
669 int frameFinished;
670 bool bGetFrame = false;
671 while (!bGetFrame) {
672 bool dataEnd = av_read_frame(m_pFormatCtx, &packet) < 0;
673 // Even if we've read all frames, some may have been re-ordered frames at the end.
674 // Still need to decode those, so keep calling avcodec_decode_video2() / avcodec_receive_frame().
675 if (dataEnd || packet.stream_index == m_videoStream) {
676 // avcodec_decode_video2() / avcodec_send_packet() gives us the re-ordered frames with a NULL packet.
677 #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
678 if (dataEnd)
679 av_packet_unref(&packet);
680 #else
681 if (dataEnd)
682 av_free_packet(&packet);
683 #endif
684
685 #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 48, 101)
686 if (packet.size != 0)
687 avcodec_send_packet(m_pCodecCtx, &packet);
688 int result = avcodec_receive_frame(m_pCodecCtx, m_pFrame);
689 if (result == 0) {
690 result = m_pFrame->pkt_size;
691 frameFinished = 1;
692 } else if (result == AVERROR(EAGAIN)) {
693 result = 0;
694 frameFinished = 0;
695 } else {
696 frameFinished = 0;
697 }
698 #else
699 int result = avcodec_decode_video2(m_pCodecCtx, m_pFrame, &frameFinished, &packet);
700 #endif
701 if (frameFinished) {
702 if (!m_pFrameRGB) {
703 setVideoDim();
704 }
705 if (m_pFrameRGB && !skipFrame) {
706 updateSwsFormat(videoPixelMode);
707 // TODO: Technically we could set this to frameWidth instead of m_desWidth for better perf.
708 // Update the linesize for the new format too. We started with the largest size, so it should fit.
709 m_pFrameRGB->linesize[0] = getPixelFormatBytes(videoPixelMode) * m_desWidth;
710
711 sws_scale(m_sws_ctx, m_pFrame->data, m_pFrame->linesize, 0,
712 m_pCodecCtx->height, m_pFrameRGB->data, m_pFrameRGB->linesize);
713 }
714
715 #if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(55, 58, 100)
716 int64_t bestPts = m_pFrame->best_effort_timestamp;
717 int64_t ptsDuration = m_pFrame->pkt_duration;
718 #else
719 int64_t bestPts = av_frame_get_best_effort_timestamp(m_pFrame);
720 int64_t ptsDuration = av_frame_get_pkt_duration(m_pFrame);
721 #endif
722 if (ptsDuration == 0) {
723 if (m_lastPts == bestPts - m_firstTimeStamp || bestPts == AV_NOPTS_VALUE) {
724 // TODO: Assuming 29.97 if missing.
725 m_videopts += 3003;
726 } else {
727 m_videopts = bestPts - m_firstTimeStamp;
728 m_lastPts = m_videopts;
729 }
730 } else if (bestPts != AV_NOPTS_VALUE) {
731 m_videopts = bestPts + ptsDuration - m_firstTimeStamp;
732 m_lastPts = m_videopts;
733 } else {
734 m_videopts += ptsDuration;
735 m_lastPts = m_videopts;
736 }
737 bGetFrame = true;
738 }
739 if (result <= 0 && dataEnd) {
740 // Sometimes, m_readSize is less than m_streamSize at the end, but not by much.
741 // This is kinda a hack, but the ringbuffer would have to be prematurely empty too.
742 m_isVideoEnd = !bGetFrame && (m_pdata->getQueueSize() == 0);
743 if (m_isVideoEnd)
744 m_decodingsize = 0;
745 break;
746 }
747 }
748 #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(57, 12, 100)
749 av_packet_unref(&packet);
750 #else
751 av_free_packet(&packet);
752 #endif
753 }
754 return bGetFrame;
755 #else
756 // If video engine is not available, just add to the timestamp at least.
757 m_videopts += 3003;
758 return true;
759 #endif // USE_FFMPEG
760 }
761
762 // Helpers that null out alpha (which seems to be the case on the PSP.)
763 // Some games depend on this, for example Sword Art Online (doesn't clear A's from buffer.)
writeVideoLineRGBA(void * destp,const void * srcp,int width)764 inline void writeVideoLineRGBA(void *destp, const void *srcp, int width) {
765 // TODO: Use SSE/NEON, investigate why AV_PIX_FMT_RGB0 does not work.
766 u32_le *dest = (u32_le *)destp;
767 const u32_le *src = (u32_le *)srcp;
768
769 const u32 mask = 0x00FFFFFF;
770 for (int i = 0; i < width; ++i) {
771 dest[i] = src[i] & mask;
772 }
773 }
774
writeVideoLineABGR5650(void * destp,const void * srcp,int width)775 inline void writeVideoLineABGR5650(void *destp, const void *srcp, int width) {
776 memcpy(destp, srcp, width * sizeof(u16));
777 }
778
writeVideoLineABGR5551(void * destp,const void * srcp,int width)779 inline void writeVideoLineABGR5551(void *destp, const void *srcp, int width) {
780 // TODO: Use SSE/NEON.
781 u16_le *dest = (u16_le *)destp;
782 const u16_le *src = (u16_le *)srcp;
783
784 const u16 mask = 0x7FFF;
785 for (int i = 0; i < width; ++i) {
786 dest[i] = src[i] & mask;
787 }
788 }
789
writeVideoLineABGR4444(void * destp,const void * srcp,int width)790 inline void writeVideoLineABGR4444(void *destp, const void *srcp, int width) {
791 // TODO: Use SSE/NEON.
792 u16_le *dest = (u16_le *)destp;
793 const u16_le *src = (u16_le *)srcp;
794
795 const u16 mask = 0x0FFF;
796 for (int i = 0; i < width; ++i) {
797 dest[i] = src[i] & mask;
798 }
799 }
800
writeVideoImage(u32 bufferPtr,int frameWidth,int videoPixelMode)801 int MediaEngine::writeVideoImage(u32 bufferPtr, int frameWidth, int videoPixelMode) {
802 if (!Memory::IsValidAddress(bufferPtr) || frameWidth > 2048) {
803 // Clearly invalid values. Let's just not.
804 ERROR_LOG_REPORT(ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);
805 return 0;
806 }
807
808 u8 *buffer = Memory::GetPointer(bufferPtr);
809
810 #ifdef USE_FFMPEG
811 if (!m_pFrame || !m_pFrameRGB)
812 return 0;
813
814 // lock the image size
815 int height = m_desHeight;
816 int width = m_desWidth;
817 u8 *imgbuf = buffer;
818 const u8 *data = m_pFrameRGB->data[0];
819
820 int videoLineSize = 0;
821 switch (videoPixelMode) {
822 case GE_CMODE_32BIT_ABGR8888:
823 videoLineSize = frameWidth * sizeof(u32);
824 break;
825 case GE_CMODE_16BIT_BGR5650:
826 case GE_CMODE_16BIT_ABGR5551:
827 case GE_CMODE_16BIT_ABGR4444:
828 videoLineSize = frameWidth * sizeof(u16);
829 break;
830 }
831
832 int videoImageSize = videoLineSize * height;
833
834 bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;
835 if (swizzle) {
836 imgbuf = new u8[videoImageSize];
837 }
838
839 switch (videoPixelMode) {
840 case GE_CMODE_32BIT_ABGR8888:
841 for (int y = 0; y < height; y++) {
842 writeVideoLineRGBA(imgbuf + videoLineSize * y, data, width);
843 data += width * sizeof(u32);
844 }
845 break;
846
847 case GE_CMODE_16BIT_BGR5650:
848 for (int y = 0; y < height; y++) {
849 writeVideoLineABGR5650(imgbuf + videoLineSize * y, data, width);
850 data += width * sizeof(u16);
851 }
852 break;
853
854 case GE_CMODE_16BIT_ABGR5551:
855 for (int y = 0; y < height; y++) {
856 writeVideoLineABGR5551(imgbuf + videoLineSize * y, data, width);
857 data += width * sizeof(u16);
858 }
859 break;
860
861 case GE_CMODE_16BIT_ABGR4444:
862 for (int y = 0; y < height; y++) {
863 writeVideoLineABGR4444(imgbuf + videoLineSize * y, data, width);
864 data += width * sizeof(u16);
865 }
866 break;
867
868 default:
869 ERROR_LOG_REPORT(ME, "Unsupported video pixel format %d", videoPixelMode);
870 break;
871 }
872
873 if (swizzle) {
874 const int bxc = videoLineSize / 16;
875 int byc = (height + 7) / 8;
876 if (byc == 0)
877 byc = 1;
878
879 DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);
880 delete [] imgbuf;
881 }
882
883 NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecode");
884
885 return videoImageSize;
886 #endif // USE_FFMPEG
887 return 0;
888 }
889
writeVideoImageWithRange(u32 bufferPtr,int frameWidth,int videoPixelMode,int xpos,int ypos,int width,int height)890 int MediaEngine::writeVideoImageWithRange(u32 bufferPtr, int frameWidth, int videoPixelMode,
891 int xpos, int ypos, int width, int height) {
892 if (!Memory::IsValidAddress(bufferPtr) || frameWidth > 2048) {
893 // Clearly invalid values. Let's just not.
894 ERROR_LOG_REPORT(ME, "Ignoring invalid video decode address %08x/%x", bufferPtr, frameWidth);
895 return 0;
896 }
897
898 u8 *buffer = Memory::GetPointer(bufferPtr);
899
900 #ifdef USE_FFMPEG
901 if (!m_pFrame || !m_pFrameRGB)
902 return 0;
903
904 // lock the image size
905 u8 *imgbuf = buffer;
906 const u8 *data = m_pFrameRGB->data[0];
907
908 int videoLineSize = 0;
909 switch (videoPixelMode) {
910 case GE_CMODE_32BIT_ABGR8888:
911 videoLineSize = frameWidth * sizeof(u32);
912 break;
913 case GE_CMODE_16BIT_BGR5650:
914 case GE_CMODE_16BIT_ABGR5551:
915 case GE_CMODE_16BIT_ABGR4444:
916 videoLineSize = frameWidth * sizeof(u16);
917 break;
918 }
919
920 int videoImageSize = videoLineSize * height;
921 bool swizzle = Memory::IsVRAMAddress(bufferPtr) && (bufferPtr & 0x00200000) == 0x00200000;
922 if (swizzle) {
923 imgbuf = new u8[videoImageSize];
924 }
925
926 if (width > m_desWidth - xpos)
927 width = m_desWidth - xpos;
928 if (height > m_desHeight - ypos)
929 height = m_desHeight - ypos;
930
931 switch (videoPixelMode) {
932 case GE_CMODE_32BIT_ABGR8888:
933 data += (ypos * m_desWidth + xpos) * sizeof(u32);
934 for (int y = 0; y < height; y++) {
935 writeVideoLineRGBA(imgbuf, data, width);
936 data += m_desWidth * sizeof(u32);
937 imgbuf += videoLineSize;
938 }
939 break;
940
941 case GE_CMODE_16BIT_BGR5650:
942 data += (ypos * m_desWidth + xpos) * sizeof(u16);
943 for (int y = 0; y < height; y++) {
944 writeVideoLineABGR5650(imgbuf, data, width);
945 data += m_desWidth * sizeof(u16);
946 imgbuf += videoLineSize;
947 }
948 break;
949
950 case GE_CMODE_16BIT_ABGR5551:
951 data += (ypos * m_desWidth + xpos) * sizeof(u16);
952 for (int y = 0; y < height; y++) {
953 writeVideoLineABGR5551(imgbuf, data, width);
954 data += m_desWidth * sizeof(u16);
955 imgbuf += videoLineSize;
956 }
957 break;
958
959 case GE_CMODE_16BIT_ABGR4444:
960 data += (ypos * m_desWidth + xpos) * sizeof(u16);
961 for (int y = 0; y < height; y++) {
962 writeVideoLineABGR4444(imgbuf, data, width);
963 data += m_desWidth * sizeof(u16);
964 imgbuf += videoLineSize;
965 }
966 break;
967
968 default:
969 ERROR_LOG_REPORT(ME, "Unsupported video pixel format %d", videoPixelMode);
970 break;
971 }
972
973 if (swizzle) {
974 WARN_LOG_REPORT_ONCE(vidswizzle, ME, "Swizzling Video with range");
975
976 const int bxc = videoLineSize / 16;
977 int byc = (height + 7) / 8;
978 if (byc == 0)
979 byc = 1;
980
981 DoSwizzleTex16((const u32 *)imgbuf, buffer, bxc, byc, videoLineSize);
982 delete [] imgbuf;
983 }
984 NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, videoImageSize, "VideoDecodeRange");
985
986 return videoImageSize;
987 #endif // USE_FFMPEG
988 return 0;
989 }
990
getFrameImage()991 u8 *MediaEngine::getFrameImage() {
992 #ifdef USE_FFMPEG
993 return m_pFrameRGB->data[0];
994 #else
995 return NULL;
996 #endif
997 }
998
getRemainSize()999 int MediaEngine::getRemainSize() {
1000 if (!m_pdata)
1001 return 0;
1002 return std::max(m_pdata->getRemainSize() - m_decodingsize - 2048, 0);
1003 }
1004
getAudioRemainSize()1005 int MediaEngine::getAudioRemainSize() {
1006 if (!m_demux) {
1007 // No audio, so it can't be full, return video instead.
1008 return getRemainSize();
1009 }
1010
1011 return m_demux->getRemainSize();
1012 }
1013
getNextAudioFrame(u8 ** buf,int * headerCode1,int * headerCode2)1014 int MediaEngine::getNextAudioFrame(u8 **buf, int *headerCode1, int *headerCode2) {
1015 // When getting a frame, increment pts
1016 m_audiopts += 4180;
1017
1018 // Demux now (rather than on add data) so that we select the right stream.
1019 m_demux->demux(m_audioStream);
1020
1021 s64 pts = 0;
1022 int result = m_demux->getNextAudioFrame(buf, headerCode1, headerCode2, &pts);
1023 if (pts != 0) {
1024 // m_audiopts is supposed to be after the returned frame.
1025 m_audiopts = pts - m_firstTimeStamp + 4180;
1026 }
1027 return result;
1028 }
1029
getAudioSamples(u32 bufferPtr)1030 int MediaEngine::getAudioSamples(u32 bufferPtr) {
1031 if (!Memory::IsValidAddress(bufferPtr)) {
1032 ERROR_LOG_REPORT(ME, "Ignoring bad audio decode address %08x during video playback", bufferPtr);
1033 }
1034
1035 u8 *buffer = Memory::GetPointer(bufferPtr);
1036 if (!m_demux) {
1037 return 0;
1038 }
1039
1040 u8 *audioFrame = 0;
1041 int headerCode1, headerCode2;
1042 int frameSize = getNextAudioFrame(&audioFrame, &headerCode1, &headerCode2);
1043 if (frameSize == 0) {
1044 return 0;
1045 }
1046 int outbytes = 0;
1047
1048 if (m_audioContext != nullptr) {
1049 if (headerCode1 == 0x24) {
1050 // This means mono audio - tell the decoder to expect it before the first frame.
1051 // Note that it will always send us back stereo audio.
1052 m_audioContext->SetChannels(1);
1053 }
1054
1055 if (!m_audioContext->Decode(audioFrame, frameSize, buffer, &outbytes)) {
1056 ERROR_LOG(ME, "Audio (%s) decode failed during video playback", GetCodecName(m_audioType));
1057 }
1058
1059 NotifyMemInfo(MemBlockFlags::WRITE, bufferPtr, outbytes, "VideoDecodeAudio");
1060 }
1061
1062 return 0x2000;
1063 }
1064
IsNoAudioData()1065 bool MediaEngine::IsNoAudioData() {
1066 if (!m_demux) {
1067 return true;
1068 }
1069
1070 // Let's double check. Here should be a safe enough place to demux.
1071 m_demux->demux(m_audioStream);
1072 return !m_demux->hasNextAudioFrame(NULL, NULL, NULL, NULL);
1073 }
1074
IsActuallyPlayingAudio()1075 bool MediaEngine::IsActuallyPlayingAudio() {
1076 return getAudioTimeStamp() >= 0;
1077 }
1078
getVideoTimeStamp()1079 s64 MediaEngine::getVideoTimeStamp() {
1080 return m_videopts;
1081 }
1082
getAudioTimeStamp()1083 s64 MediaEngine::getAudioTimeStamp() {
1084 return m_demux ? m_audiopts - 4180 : -1;
1085 }
1086
getLastTimeStamp()1087 s64 MediaEngine::getLastTimeStamp() {
1088 if (!m_pdata)
1089 return 0;
1090 return m_lastTimeStamp - m_firstTimeStamp;
1091 }
1092