1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3 * You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5 #include <cstdio>
6 #include <queue>
7
8 #include "common/browser_logging/CSFLog.h"
9 #include "nspr.h"
10
11 #include "JavaCallbacksSupport.h"
12 #include "MediaCodec.h"
13 #include "WebrtcMediaCodecVP8VideoCodec.h"
14 #include "mozilla/ArrayUtils.h"
15 #include "nsThreadUtils.h"
16 #include "mozilla/Monitor.h"
17 #include "transport/runnable_utils.h"
18 #include "MediaResult.h"
19
20 #include "AudioConduit.h"
21 #include "VideoConduit.h"
22 #include "libyuv/convert_from.h"
23 #include "libyuv/convert.h"
24 #include "libyuv/row.h"
25
26 #include "webrtc/modules/video_coding/include/video_error_codes.h"
27
28 #include "webrtc/api/video/i420_buffer.h"
29 #include <webrtc/common_video/libyuv/include/webrtc_libyuv.h>
30
31 using namespace mozilla;
32
33 static const int32_t DECODER_TIMEOUT = 10 * PR_USEC_PER_MSEC; // 10ms
34 static const char MEDIACODEC_VIDEO_MIME_VP8[] = "video/x-vnd.on2.vp8";
35
36 namespace mozilla {
37
38 static const char* wmcLogTag = "WebrtcMediaCodecVP8VideoCodec";
39 #ifdef LOGTAG
40 # undef LOGTAG
41 #endif
42 #define LOGTAG wmcLogTag
43
44 class CallbacksSupport final : public JavaCallbacksSupport {
45 public:
CallbacksSupport(webrtc::EncodedImageCallback * aCallback)46 explicit CallbacksSupport(webrtc::EncodedImageCallback* aCallback)
47 : mCallback(aCallback), mPictureId(0) {
48 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
49 memset(&mEncodedImage, 0, sizeof(mEncodedImage));
50 }
51
~CallbacksSupport()52 ~CallbacksSupport() {
53 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
54 if (mEncodedImage._size) {
55 delete[] mEncodedImage._buffer;
56 mEncodedImage._buffer = nullptr;
57 mEncodedImage._size = 0;
58 }
59 }
60
VerifyAndAllocate(const uint32_t minimumSize)61 void VerifyAndAllocate(const uint32_t minimumSize) {
62 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
63 if (minimumSize > mEncodedImage._size) {
64 uint8_t* newBuffer = new uint8_t[minimumSize];
65 MOZ_RELEASE_ASSERT(newBuffer);
66
67 if (mEncodedImage._buffer) {
68 delete[] mEncodedImage._buffer;
69 }
70 mEncodedImage._buffer = newBuffer;
71 mEncodedImage._size = minimumSize;
72 }
73 }
74
HandleInput(jlong aTimestamp,bool aProcessed)75 void HandleInput(jlong aTimestamp, bool aProcessed) override {
76 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
77 }
78
HandleOutputFormatChanged(java::sdk::MediaFormat::Param aFormat)79 void HandleOutputFormatChanged(
80 java::sdk::MediaFormat::Param aFormat) override {
81 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
82 }
83
HandleOutput(java::Sample::Param aSample,java::SampleBuffer::Param aBuffer)84 void HandleOutput(java::Sample::Param aSample,
85 java::SampleBuffer::Param aBuffer) override {
86 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
87 java::sdk::BufferInfo::LocalRef info = aSample->Info();
88
89 int32_t size;
90 bool ok = NS_SUCCEEDED(info->Size(&size));
91 MOZ_RELEASE_ASSERT(ok);
92
93 if (size > 0) {
94 rtc::CritScope lock(&mCritSect);
95 VerifyAndAllocate(size);
96
97 int64_t presentationTimeUs;
98 ok = NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
99 MOZ_RELEASE_ASSERT(ok);
100
101 mEncodedImage._timeStamp = presentationTimeUs / PR_USEC_PER_MSEC;
102 mEncodedImage.capture_time_ms_ = mEncodedImage._timeStamp;
103
104 int32_t flags;
105 ok = NS_SUCCEEDED(info->Flags(&flags));
106 MOZ_ASSERT(ok);
107
108 if (flags == java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME) {
109 mEncodedImage._frameType = webrtc::kVideoFrameKey;
110 } else {
111 mEncodedImage._frameType = webrtc::kVideoFrameDelta;
112 }
113 mEncodedImage._completeFrame = true;
114 mEncodedImage._length = size;
115
116 jni::ByteBuffer::LocalRef dest =
117 jni::ByteBuffer::New(mEncodedImage._buffer, size);
118 aBuffer->WriteToByteBuffer(dest, 0, size);
119
120 webrtc::CodecSpecificInfo info;
121 info.codecType = webrtc::kVideoCodecVP8;
122 info.codecSpecific.VP8.pictureId = mPictureId;
123 mPictureId = (mPictureId + 1) & 0x7FFF;
124 info.codecSpecific.VP8.tl0PicIdx = -1;
125 info.codecSpecific.VP8.keyIdx = -1;
126 info.codecSpecific.VP8.temporalIdx = 1;
127 info.codecSpecific.VP8.simulcastIdx = 0;
128
129 webrtc::RTPFragmentationHeader header;
130 memset(&header, 0, sizeof(header));
131 header.VerifyAndAllocateFragmentationHeader(1);
132 header.fragmentationLength[0] = mEncodedImage._length;
133
134 MOZ_RELEASE_ASSERT(mCallback);
135 mCallback->OnEncodedImage(mEncodedImage, &info, &header);
136 }
137 }
138
HandleError(const MediaResult & aError)139 void HandleError(const MediaResult& aError) override {
140 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
141 }
142
143 friend class WebrtcMediaCodecVP8VideoRemoteEncoder;
144
145 private:
146 webrtc::EncodedImageCallback* mCallback;
147 Atomic<bool> mCanceled;
148 webrtc::EncodedImage mEncodedImage;
149 rtc::CriticalSection mCritSect;
150 uint32_t mPictureId;
151 };
152
CreateDecoder(const char * aMimeType)153 static java::sdk::MediaCodec::LocalRef CreateDecoder(const char* aMimeType) {
154 if (!aMimeType) {
155 return nullptr;
156 }
157
158 java::sdk::MediaCodec::LocalRef codec;
159 java::sdk::MediaCodec::CreateDecoderByType(aMimeType, &codec);
160 return codec;
161 }
162
CreateEncoder(const char * aMimeType)163 static java::sdk::MediaCodec::LocalRef CreateEncoder(const char* aMimeType) {
164 if (!aMimeType) {
165 return nullptr;
166 }
167
168 java::sdk::MediaCodec::LocalRef codec;
169 java::sdk::MediaCodec::CreateEncoderByType(aMimeType, &codec);
170 return codec;
171 }
172
ShutdownThread(const nsCOMPtr<nsIThread> & aThread)173 static void ShutdownThread(const nsCOMPtr<nsIThread>& aThread) {
174 aThread->Shutdown();
175 }
176
177 // Base runnable class to repeatly pull MediaCodec output buffers in seperate
178 // thread. How to use:
179 // - implementing DrainOutput() to get output. Remember to return false to tell
180 // drain not to pop input queue.
181 // - call QueueInput() to schedule a run to drain output. The input, aFrame,
182 // should contains corresponding info such as image size and timestamps for
183 // DrainOutput() implementation to construct data needed by encoded/decoded
184 // callbacks.
185 class MediaCodecOutputDrain : public Runnable {
186 public:
Start()187 void Start() {
188 MonitorAutoLock lock(mMonitor);
189 if (mThread == nullptr) {
190 NS_NewNamedThread("OutputDrain", getter_AddRefs(mThread));
191 }
192 mEnding = false;
193 mThread->Dispatch(this, NS_DISPATCH_NORMAL);
194 }
195
Stop()196 void Stop() {
197 MonitorAutoLock lock(mMonitor);
198 mEnding = true;
199 lock.NotifyAll(); // In case Run() is waiting.
200
201 if (mThread != nullptr) {
202 MonitorAutoUnlock unlock(mMonitor);
203 NS_DispatchToMainThread(
204 WrapRunnableNM(&ShutdownThread, nsCOMPtr<nsIThread>(mThread)));
205 mThread = nullptr;
206 }
207 }
208
QueueInput(const EncodedFrame & aFrame)209 void QueueInput(const EncodedFrame& aFrame) {
210 MonitorAutoLock lock(mMonitor);
211
212 MOZ_ASSERT(mThread);
213
214 mInputFrames.push(aFrame);
215 // Notify Run() about queued input and it can start working.
216 lock.NotifyAll();
217 }
218
Run()219 NS_IMETHOD Run() override {
220 MOZ_ASSERT(mThread);
221
222 MonitorAutoLock lock(mMonitor);
223 while (true) {
224 if (mInputFrames.empty()) {
225 // Wait for new input.
226 lock.Wait();
227 }
228
229 if (mEnding) {
230 // Stop draining.
231 break;
232 }
233
234 MOZ_ASSERT(!mInputFrames.empty());
235 {
236 // Release monitor while draining because it's blocking.
237 MonitorAutoUnlock unlock(mMonitor);
238 DrainOutput();
239 }
240 }
241
242 return NS_OK;
243 }
244
245 protected:
MediaCodecOutputDrain()246 MediaCodecOutputDrain()
247 : Runnable("MediaCodecOutputDrain"),
248 mMonitor("MediaCodecOutputDrain monitor"),
249 mEnding(false) {}
250
251 // Drain output buffer for input frame queue mInputFrames.
252 // mInputFrames contains info such as size and time of the input frames.
253 // We have to give a queue to handle encoder frame skips - we can input 10
254 // frames and get one back. NOTE: any access of aInputFrames MUST be preceded
255 // locking mMonitor!
256
257 // Blocks waiting for decoded buffers, but for a limited period because
258 // we need to check for shutdown.
259 virtual bool DrainOutput() = 0;
260
261 protected:
262 // This monitor protects all things below it, and is also used to
263 // wait/notify queued input.
264 Monitor mMonitor;
265 std::queue<EncodedFrame> mInputFrames;
266
267 private:
268 // also protected by mMonitor
269 nsCOMPtr<nsIThread> mThread;
270 bool mEnding;
271 };
272
273 class WebrtcAndroidMediaCodec {
274 public:
WebrtcAndroidMediaCodec()275 WebrtcAndroidMediaCodec()
276 : mEncoderCallback(nullptr),
277 mDecoderCallback(nullptr),
278 isStarted(false),
279 mEnding(false) {
280 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
281 }
282
Configure(uint32_t width,uint32_t height,const jobject aSurface,uint32_t flags,const char * mime,bool encoder)283 nsresult Configure(uint32_t width, uint32_t height, const jobject aSurface,
284 uint32_t flags, const char* mime, bool encoder) {
285 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
286 nsresult res = NS_OK;
287
288 if (!mCoder) {
289 mWidth = width;
290 mHeight = height;
291
292 java::sdk::MediaFormat::LocalRef format;
293
294 res = java::sdk::MediaFormat::CreateVideoFormat(nsCString(mime), mWidth,
295 mHeight, &format);
296
297 if (NS_FAILED(res)) {
298 CSFLogDebug(
299 LOGTAG,
300 "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d",
301 __FUNCTION__, (int)res);
302 return NS_ERROR_FAILURE;
303 }
304
305 if (encoder) {
306 mCoder = CreateEncoder(mime);
307
308 if (NS_FAILED(res)) {
309 CSFLogDebug(LOGTAG,
310 "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed "
311 "err = %d",
312 __FUNCTION__, (int)res);
313 return NS_ERROR_FAILURE;
314 }
315
316 res = format->SetInteger(java::sdk::MediaFormat::KEY_BIT_RATE,
317 1000 * 300);
318 res = format->SetInteger(java::sdk::MediaFormat::KEY_BITRATE_MODE, 2);
319 res = format->SetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT, 21);
320 res = format->SetInteger(java::sdk::MediaFormat::KEY_FRAME_RATE, 30);
321 res = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL,
322 100);
323
324 } else {
325 mCoder = CreateDecoder(mime);
326 if (NS_FAILED(res)) {
327 CSFLogDebug(LOGTAG,
328 "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed "
329 "err = %d",
330 __FUNCTION__, (int)res);
331 return NS_ERROR_FAILURE;
332 }
333 }
334 res = mCoder->Configure(format, nullptr, nullptr, flags);
335 if (NS_FAILED(res)) {
336 CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, err = %d",
337 __FUNCTION__, (int)res);
338 }
339 }
340
341 return res;
342 }
343
Start()344 nsresult Start() {
345 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
346
347 if (!mCoder) {
348 return NS_ERROR_FAILURE;
349 }
350
351 mEnding = false;
352
353 nsresult res;
354 res = mCoder->Start();
355 if (NS_FAILED(res)) {
356 CSFLogDebug(
357 LOGTAG,
358 "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
359 __FUNCTION__, (int)res);
360 return res;
361 }
362 isStarted = true;
363 return NS_OK;
364 }
365
Stop()366 nsresult Stop() {
367 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
368 mEnding = true;
369
370 if (mOutputDrain != nullptr) {
371 mOutputDrain->Stop();
372 mOutputDrain = nullptr;
373 }
374
375 mCoder->Stop();
376 mCoder->Release();
377 isStarted = false;
378 return NS_OK;
379 }
380
GenerateVideoFrame(size_t width,size_t height,uint32_t timeStamp,void * decoded,int color_format)381 void GenerateVideoFrame(size_t width, size_t height, uint32_t timeStamp,
382 void* decoded, int color_format) {
383 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
384
385 // TODO: eliminate extra pixel copy/color conversion
386 size_t widthUV = (width + 1) / 2;
387 rtc::scoped_refptr<webrtc::I420Buffer> buffer;
388 buffer = webrtc::I420Buffer::Create(width, height, width, widthUV, widthUV);
389
390 uint8_t* src_nv12 = static_cast<uint8_t*>(decoded);
391 int src_nv12_y_size = width * height;
392
393 uint8_t* dstY = buffer->MutableDataY();
394 uint8_t* dstU = buffer->MutableDataU();
395 uint8_t* dstV = buffer->MutableDataV();
396
397 libyuv::NV12ToI420(src_nv12, width, src_nv12 + src_nv12_y_size,
398 (width + 1) & ~1, dstY, width, dstU, (width + 1) / 2,
399 dstV, (width + 1) / 2, width, height);
400
401 mVideoFrame.reset(
402 new webrtc::VideoFrame(buffer, timeStamp, 0, webrtc::kVideoRotation_0));
403 }
404
FeedMediaCodecInput(const webrtc::EncodedImage & inputImage,int64_t renderTimeMs)405 int32_t FeedMediaCodecInput(const webrtc::EncodedImage& inputImage,
406 int64_t renderTimeMs) {
407 #ifdef WEBRTC_MEDIACODEC_DEBUG
408 uint32_t time = PR_IntervalNow();
409 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
410 #endif
411
412 int inputIndex = DequeueInputBuffer(DECODER_TIMEOUT);
413 if (inputIndex == -1) {
414 CSFLogError(LOGTAG, "%s equeue input buffer failed", __FUNCTION__);
415 return inputIndex;
416 }
417
418 #ifdef WEBRTC_MEDIACODEC_DEBUG
419 CSFLogDebug(LOGTAG, "%s dequeue input buffer took %u ms", __FUNCTION__,
420 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
421 time = PR_IntervalNow();
422 #endif
423
424 size_t size = inputImage._length;
425
426 JNIEnv* const env = jni::GetEnvForThread();
427 jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
428 void* directBuffer = env->GetDirectBufferAddress(buffer);
429
430 PodCopy((uint8_t*)directBuffer, inputImage._buffer, size);
431
432 if (inputIndex >= 0) {
433 CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__,
434 inputIndex);
435 QueueInputBuffer(inputIndex, 0, size, renderTimeMs, 0);
436
437 {
438 if (mOutputDrain == nullptr) {
439 mOutputDrain = new OutputDrain(this);
440 mOutputDrain->Start();
441 }
442 EncodedFrame frame;
443 frame.width_ = mWidth;
444 frame.height_ = mHeight;
445 frame.timeStamp_ = inputImage._timeStamp;
446 frame.decode_timestamp_ = renderTimeMs;
447 mOutputDrain->QueueInput(frame);
448 }
449 env->DeleteLocalRef(buffer);
450 }
451
452 return inputIndex;
453 }
454
DrainOutput(std::queue<EncodedFrame> & aInputFrames,Monitor & aMonitor)455 nsresult DrainOutput(std::queue<EncodedFrame>& aInputFrames,
456 Monitor& aMonitor) {
457 MOZ_ASSERT(mCoder != nullptr);
458 if (mCoder == nullptr) {
459 return NS_ERROR_FAILURE;
460 }
461
462 #ifdef WEBRTC_MEDIACODEC_DEBUG
463 uint32_t time = PR_IntervalNow();
464 #endif
465 nsresult res;
466 java::sdk::BufferInfo::LocalRef bufferInfo;
467 res = java::sdk::BufferInfo::New(&bufferInfo);
468 if (NS_FAILED(res)) {
469 CSFLogDebug(
470 LOGTAG,
471 "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
472 __FUNCTION__, (int)res);
473 return res;
474 }
475 int32_t outputIndex = DequeueOutputBuffer(bufferInfo);
476
477 if (outputIndex == java::sdk::MediaCodec::INFO_TRY_AGAIN_LATER) {
478 // Not an error: output not available yet. Try later.
479 CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__,
480 outputIndex);
481 } else if (outputIndex ==
482 java::sdk::MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
483 // handle format change
484 CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d",
485 __FUNCTION__, outputIndex);
486 } else if (outputIndex ==
487 java::sdk::MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
488 CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__,
489 outputIndex);
490 GetOutputBuffers();
491 } else if (outputIndex < 0) {
492 CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d",
493 __FUNCTION__, outputIndex);
494 MonitorAutoLock lock(aMonitor);
495 aInputFrames.pop();
496 } else {
497 #ifdef WEBRTC_MEDIACODEC_DEBUG
498 CSFLogDebug(LOGTAG,
499 "%s dequeue output buffer# return status is %d took %u ms",
500 __FUNCTION__, outputIndex,
501 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
502 #endif
503 EncodedFrame frame;
504 {
505 MonitorAutoLock lock(aMonitor);
506 frame = aInputFrames.front();
507 aInputFrames.pop();
508 }
509
510 if (mEnding) {
511 ReleaseOutputBuffer(outputIndex, false);
512 return NS_OK;
513 }
514
515 JNIEnv* const env = jni::GetEnvForThread();
516 jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
517 if (buffer) {
518 // The buffer will be null on Android L if we are decoding to a Surface
519 void* directBuffer = env->GetDirectBufferAddress(buffer);
520
521 int color_format = 0;
522
523 CSFLogDebug(
524 LOGTAG,
525 "%s generate video frame, width = %d, height = %d, timeStamp_ = %d",
526 __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
527 GenerateVideoFrame(frame.width_, frame.height_, frame.timeStamp_,
528 directBuffer, color_format);
529 mDecoderCallback->Decoded(*mVideoFrame);
530
531 ReleaseOutputBuffer(outputIndex, false);
532 env->DeleteLocalRef(buffer);
533 }
534 }
535 return NS_OK;
536 }
537
DequeueInputBuffer(int64_t time)538 int32_t DequeueInputBuffer(int64_t time) {
539 nsresult res;
540 int32_t inputIndex;
541 res = mCoder->DequeueInputBuffer(time, &inputIndex);
542
543 if (NS_FAILED(res)) {
544 CSFLogDebug(LOGTAG,
545 "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() "
546 "return err = %d",
547 __FUNCTION__, (int)res);
548 return -1;
549 }
550 return inputIndex;
551 }
552
QueueInputBuffer(int32_t inputIndex,int32_t offset,size_t size,int64_t renderTimes,int32_t flags)553 void QueueInputBuffer(int32_t inputIndex, int32_t offset, size_t size,
554 int64_t renderTimes, int32_t flags) {
555 nsresult res = NS_OK;
556 res =
557 mCoder->QueueInputBuffer(inputIndex, offset, size, renderTimes, flags);
558
559 if (NS_FAILED(res)) {
560 CSFLogDebug(LOGTAG,
561 "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() "
562 "return err = %d",
563 __FUNCTION__, (int)res);
564 }
565 }
566
DequeueOutputBuffer(java::sdk::BufferInfo::Param aInfo)567 int32_t DequeueOutputBuffer(java::sdk::BufferInfo::Param aInfo) {
568 nsresult res;
569
570 int32_t outputStatus;
571 res = mCoder->DequeueOutputBuffer(aInfo, DECODER_TIMEOUT, &outputStatus);
572
573 if (NS_FAILED(res)) {
574 CSFLogDebug(LOGTAG,
575 "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() "
576 "return err = %d",
577 __FUNCTION__, (int)res);
578 return -1;
579 }
580
581 return outputStatus;
582 }
583
ReleaseOutputBuffer(int32_t index,bool flag)584 void ReleaseOutputBuffer(int32_t index, bool flag) {
585 mCoder->ReleaseOutputBuffer(index, flag);
586 }
587
GetInputBuffers()588 jobjectArray GetInputBuffers() {
589 JNIEnv* const env = jni::GetEnvForThread();
590
591 if (mInputBuffers) {
592 env->DeleteGlobalRef(mInputBuffers);
593 }
594
595 nsresult res;
596 jni::ObjectArray::LocalRef inputBuffers;
597 res = mCoder->GetInputBuffers(&inputBuffers);
598 mInputBuffers = (jobjectArray)env->NewGlobalRef(inputBuffers.Get());
599 if (NS_FAILED(res)) {
600 CSFLogDebug(
601 LOGTAG,
602 "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
603 __FUNCTION__, (int)res);
604 return nullptr;
605 }
606
607 return mInputBuffers;
608 }
609
GetOutputBuffers()610 jobjectArray GetOutputBuffers() {
611 JNIEnv* const env = jni::GetEnvForThread();
612
613 if (mOutputBuffers) {
614 env->DeleteGlobalRef(mOutputBuffers);
615 }
616
617 nsresult res;
618 jni::ObjectArray::LocalRef outputBuffers;
619 res = mCoder->GetOutputBuffers(&outputBuffers);
620 mOutputBuffers = (jobjectArray)env->NewGlobalRef(outputBuffers.Get());
621 if (NS_FAILED(res)) {
622 CSFLogDebug(
623 LOGTAG,
624 "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
625 __FUNCTION__, (int)res);
626 return nullptr;
627 }
628
629 return mOutputBuffers;
630 }
631
SetDecoderCallback(webrtc::DecodedImageCallback * aCallback)632 void SetDecoderCallback(webrtc::DecodedImageCallback* aCallback) {
633 mDecoderCallback = aCallback;
634 }
635
SetEncoderCallback(webrtc::EncodedImageCallback * aCallback)636 void SetEncoderCallback(webrtc::EncodedImageCallback* aCallback) {
637 mEncoderCallback = aCallback;
638 }
639
640 protected:
~WebrtcAndroidMediaCodec()641 virtual ~WebrtcAndroidMediaCodec() {}
642
643 private:
644 class OutputDrain : public MediaCodecOutputDrain {
645 public:
OutputDrain(WebrtcAndroidMediaCodec * aMediaCodec)646 explicit OutputDrain(WebrtcAndroidMediaCodec* aMediaCodec)
647 : MediaCodecOutputDrain(), mMediaCodec(aMediaCodec) {}
648
649 protected:
DrainOutput()650 virtual bool DrainOutput() override {
651 return (mMediaCodec->DrainOutput(mInputFrames, mMonitor) == NS_OK);
652 }
653
654 private:
655 WebrtcAndroidMediaCodec* mMediaCodec;
656 };
657
658 friend class WebrtcMediaCodecVP8VideoEncoder;
659 friend class WebrtcMediaCodecVP8VideoDecoder;
660
661 java::sdk::MediaCodec::GlobalRef mCoder;
662 webrtc::EncodedImageCallback* mEncoderCallback;
663 webrtc::DecodedImageCallback* mDecoderCallback;
664 std::unique_ptr<webrtc::VideoFrame> mVideoFrame;
665
666 jobjectArray mInputBuffers;
667 jobjectArray mOutputBuffers;
668
669 RefPtr<OutputDrain> mOutputDrain;
670 uint32_t mWidth;
671 uint32_t mHeight;
672 bool isStarted;
673 bool mEnding;
674
675 NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcAndroidMediaCodec)
676 };
677
I420toNV12(uint8_t * dstY,uint16_t * dstUV,const webrtc::VideoFrame & inputImage)678 static bool I420toNV12(uint8_t* dstY, uint16_t* dstUV,
679 const webrtc::VideoFrame& inputImage) {
680 rtc::scoped_refptr<webrtc::I420BufferInterface> inputBuffer =
681 inputImage.video_frame_buffer()->GetI420();
682
683 uint8_t* buffer = dstY;
684 uint8_t* dst_y = buffer;
685 int dst_stride_y = inputBuffer->StrideY();
686 uint8_t* dst_uv = buffer + inputBuffer->StrideY() * inputImage.height();
687 int dst_stride_uv = inputBuffer->StrideU() * 2;
688
689 // Why NV12? Because COLOR_FORMAT_YUV420_SEMIPLANAR. Most hardware is
690 // NV12-friendly.
691 bool converted = !libyuv::I420ToNV12(
692 inputBuffer->DataY(), inputBuffer->StrideY(), inputBuffer->DataU(),
693 inputBuffer->StrideU(), inputBuffer->DataV(), inputBuffer->StrideV(),
694 dst_y, dst_stride_y, dst_uv, dst_stride_uv, inputImage.width(),
695 inputImage.height());
696 return converted;
697 }
698
699 // Encoder.
WebrtcMediaCodecVP8VideoEncoder()700 WebrtcMediaCodecVP8VideoEncoder::WebrtcMediaCodecVP8VideoEncoder()
701 : mCallback(nullptr), mMediaCodecEncoder(nullptr) {
702 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
703
704 memset(&mEncodedImage, 0, sizeof(mEncodedImage));
705 }
706
ResetInputBuffers()707 bool WebrtcMediaCodecVP8VideoEncoder::ResetInputBuffers() {
708 mInputBuffers = mMediaCodecEncoder->GetInputBuffers();
709
710 if (!mInputBuffers) return false;
711
712 return true;
713 }
714
ResetOutputBuffers()715 bool WebrtcMediaCodecVP8VideoEncoder::ResetOutputBuffers() {
716 mOutputBuffers = mMediaCodecEncoder->GetOutputBuffers();
717
718 if (!mOutputBuffers) return false;
719
720 return true;
721 }
722
VerifyAndAllocate(const uint32_t minimumSize)723 int32_t WebrtcMediaCodecVP8VideoEncoder::VerifyAndAllocate(
724 const uint32_t minimumSize) {
725 if (minimumSize > mEncodedImage._size) {
726 // create buffer of sufficient size
727 uint8_t* newBuffer = new uint8_t[minimumSize];
728 if (newBuffer == nullptr) {
729 return -1;
730 }
731 if (mEncodedImage._buffer) {
732 // copy old data
733 memcpy(newBuffer, mEncodedImage._buffer, mEncodedImage._size);
734 delete[] mEncodedImage._buffer;
735 }
736 mEncodedImage._buffer = newBuffer;
737 mEncodedImage._size = minimumSize;
738 }
739 return 0;
740 }
741
InitEncode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores,size_t maxPayloadSize)742 int32_t WebrtcMediaCodecVP8VideoEncoder::InitEncode(
743 const webrtc::VideoCodec* codecSettings, int32_t numberOfCores,
744 size_t maxPayloadSize) {
745 mMaxPayloadSize = maxPayloadSize;
746 CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width,
747 codecSettings->height);
748
749 return WEBRTC_VIDEO_CODEC_OK;
750 }
751
Encode(const webrtc::VideoFrame & inputImage,const webrtc::CodecSpecificInfo * codecSpecificInfo,const std::vector<webrtc::FrameType> * frame_types)752 int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
753 const webrtc::VideoFrame& inputImage,
754 const webrtc::CodecSpecificInfo* codecSpecificInfo,
755 const std::vector<webrtc::FrameType>* frame_types) {
756 CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(),
757 inputImage.height());
758
759 if (!mMediaCodecEncoder) {
760 mMediaCodecEncoder = new WebrtcAndroidMediaCodec();
761 }
762
763 if (!mMediaCodecEncoder->isStarted) {
764 if (inputImage.width() == 0 || inputImage.height() == 0) {
765 return WEBRTC_VIDEO_CODEC_ERROR;
766 } else {
767 mFrameWidth = inputImage.width();
768 mFrameHeight = inputImage.height();
769 }
770
771 mMediaCodecEncoder->SetEncoderCallback(mCallback);
772 nsresult res = mMediaCodecEncoder->Configure(
773 mFrameWidth, mFrameHeight, nullptr,
774 java::sdk::MediaCodec::CONFIGURE_FLAG_ENCODE, MEDIACODEC_VIDEO_MIME_VP8,
775 true /* encoder */);
776
777 if (res != NS_OK) {
778 CSFLogDebug(LOGTAG, "%s, encoder configure return err = %d", __FUNCTION__,
779 (int)res);
780 return WEBRTC_VIDEO_CODEC_ERROR;
781 }
782
783 res = mMediaCodecEncoder->Start();
784
785 if (NS_FAILED(res)) {
786 mMediaCodecEncoder->isStarted = false;
787 CSFLogDebug(LOGTAG, "%s start encoder. err = %d", __FUNCTION__, (int)res);
788 return WEBRTC_VIDEO_CODEC_ERROR;
789 }
790
791 bool retBool = ResetInputBuffers();
792 if (!retBool) {
793 CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
794 return WEBRTC_VIDEO_CODEC_ERROR;
795 }
796 retBool = ResetOutputBuffers();
797 if (!retBool) {
798 CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
799 return WEBRTC_VIDEO_CODEC_ERROR;
800 }
801
802 mMediaCodecEncoder->isStarted = true;
803 }
804
805 #ifdef WEBRTC_MEDIACODEC_DEBUG
806 uint32_t time = PR_IntervalNow();
807 #endif
808
809 rtc::scoped_refptr<webrtc::I420BufferInterface> inputBuffer =
810 inputImage.video_frame_buffer()->GetI420();
811 size_t sizeY = inputImage.height() * inputBuffer->StrideY();
812 size_t sizeUV = ((inputImage.height() + 1) / 2) * inputBuffer->StrideU();
813 size_t size = sizeY + 2 * sizeUV;
814
815 int inputIndex = mMediaCodecEncoder->DequeueInputBuffer(DECODER_TIMEOUT);
816 if (inputIndex == -1) {
817 CSFLogError(LOGTAG, "%s dequeue input buffer failed", __FUNCTION__);
818 return inputIndex;
819 }
820
821 #ifdef WEBRTC_MEDIACODEC_DEBUG
822 CSFLogDebug(LOGTAG,
823 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input "
824 "buffer took %u ms",
825 __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow() - time));
826 #endif
827
828 if (inputIndex >= 0) {
829 JNIEnv* const env = jni::GetEnvForThread();
830 jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
831 void* directBuffer = env->GetDirectBufferAddress(buffer);
832
833 uint8_t* dstY = static_cast<uint8_t*>(directBuffer);
834 uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
835
836 bool converted = I420toNV12(dstY, dstUV, inputImage);
837 if (!converted) {
838 CSFLogError(LOGTAG,
839 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input "
840 "buffer to NV12 error.",
841 __FUNCTION__);
842 return WEBRTC_VIDEO_CODEC_ERROR;
843 }
844
845 env->DeleteLocalRef(buffer);
846
847 #ifdef WEBRTC_MEDIACODEC_DEBUG
848 time = PR_IntervalNow();
849 CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__,
850 inputIndex);
851 #endif
852
853 mMediaCodecEncoder->QueueInputBuffer(
854 inputIndex, 0, size,
855 inputImage.render_time_ms() * PR_USEC_PER_MSEC /* ms to us */, 0);
856 #ifdef WEBRTC_MEDIACODEC_DEBUG
857 CSFLogDebug(LOGTAG,
858 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input "
859 "buffer took %u ms",
860 __FUNCTION__,
861 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
862 #endif
863 mEncodedImage._encodedWidth = inputImage.width();
864 mEncodedImage._encodedHeight = inputImage.height();
865 mEncodedImage._timeStamp = inputImage.timestamp();
866 mEncodedImage.capture_time_ms_ = inputImage.timestamp();
867
868 nsresult res;
869 java::sdk::BufferInfo::LocalRef bufferInfo;
870 res = java::sdk::BufferInfo::New(&bufferInfo);
871 if (NS_FAILED(res)) {
872 CSFLogDebug(LOGTAG,
873 "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return "
874 "err = %d",
875 __FUNCTION__, (int)res);
876 return -1;
877 }
878
879 int32_t outputIndex = mMediaCodecEncoder->DequeueOutputBuffer(bufferInfo);
880
881 if (outputIndex == java::sdk::MediaCodec::INFO_TRY_AGAIN_LATER) {
882 // Not an error: output not available yet. Try later.
883 CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__,
884 outputIndex);
885 } else if (outputIndex ==
886 java::sdk::MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
887 // handle format change
888 CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d",
889 __FUNCTION__, outputIndex);
890 } else if (outputIndex ==
891 java::sdk::MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
892 CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__,
893 outputIndex);
894 mMediaCodecEncoder->GetOutputBuffers();
895 } else if (outputIndex < 0) {
896 CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d",
897 __FUNCTION__, outputIndex);
898 } else {
899 #ifdef WEBRTC_MEDIACODEC_DEBUG
900 CSFLogDebug(LOGTAG,
901 "%s dequeue output buffer return status is %d took %u ms",
902 __FUNCTION__, outputIndex,
903 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
904 #endif
905
906 JNIEnv* const env = jni::GetEnvForThread();
907 jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
908 if (buffer) {
909 int32_t offset;
910 bufferInfo->Offset(&offset);
911 int32_t flags;
912 bufferInfo->Flags(&flags);
913
914 // The buffer will be null on Android L if we are decoding to a Surface
915 void* directBuffer =
916 reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer)) +
917 offset;
918
919 if (flags == java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME) {
920 mEncodedImage._frameType = webrtc::kVideoFrameKey;
921 } else {
922 mEncodedImage._frameType = webrtc::kVideoFrameDelta;
923 }
924 mEncodedImage._completeFrame = true;
925
926 int32_t size;
927 bufferInfo->Size(&size);
928 #ifdef WEBRTC_MEDIACODEC_DEBUG
929 CSFLogDebug(LOGTAG,
930 "%s dequeue output buffer ok, index:%d, buffer size = %d, "
931 "buffer offset = %d, flags = %d",
932 __FUNCTION__, outputIndex, size, offset, flags);
933 #endif
934
935 if (VerifyAndAllocate(size) == -1) {
936 CSFLogDebug(LOGTAG, "%s VerifyAndAllocate buffers failed",
937 __FUNCTION__);
938 return WEBRTC_VIDEO_CODEC_ERROR;
939 }
940
941 mEncodedImage._length = size;
942
943 // xxx It's too bad the mediacodec API forces us to memcpy this....
944 // we should find a way that able to 'hold' the buffer or transfer it
945 // from inputImage (ping-pong buffers or select them from a small pool)
946 memcpy(mEncodedImage._buffer, directBuffer, mEncodedImage._length);
947
948 webrtc::CodecSpecificInfo info;
949 info.codecType = webrtc::kVideoCodecVP8;
950 info.codecSpecific.VP8.pictureId = -1;
951 info.codecSpecific.VP8.tl0PicIdx = -1;
952 info.codecSpecific.VP8.keyIdx = -1;
953 info.codecSpecific.VP8.temporalIdx = 1;
954
955 // Generate a header describing a single fragment.
956 webrtc::RTPFragmentationHeader header;
957 memset(&header, 0, sizeof(header));
958 header.VerifyAndAllocateFragmentationHeader(1);
959 header.fragmentationLength[0] = mEncodedImage._length;
960
961 mCallback->OnEncodedImage(mEncodedImage, &info, &header);
962
963 mMediaCodecEncoder->ReleaseOutputBuffer(outputIndex, false);
964 env->DeleteLocalRef(buffer);
965 }
966 }
967 }
968
969 return WEBRTC_VIDEO_CODEC_OK;
970 }
971
RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback * callback)972 int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(
973 webrtc::EncodedImageCallback* callback) {
974 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
975 mCallback = callback;
976
977 return WEBRTC_VIDEO_CODEC_OK;
978 }
979
Release()980 int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
981 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
982 delete mMediaCodecEncoder;
983 mMediaCodecEncoder = nullptr;
984
985 delete[] mEncodedImage._buffer;
986 mEncodedImage._buffer = nullptr;
987 mEncodedImage._size = 0;
988
989 return WEBRTC_VIDEO_CODEC_OK;
990 }
991
~WebrtcMediaCodecVP8VideoEncoder()992 WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
993 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
994 Release();
995 }
996
SetChannelParameters(uint32_t packetLoss,int64_t rtt)997 int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(
998 uint32_t packetLoss, int64_t rtt) {
999 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1000 return WEBRTC_VIDEO_CODEC_OK;
1001 }
1002
SetRates(uint32_t newBitRate,uint32_t frameRate)1003 int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate,
1004 uint32_t frameRate) {
1005 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1006 if (!mMediaCodecEncoder) {
1007 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1008 }
1009
1010 // XXX
1011 // 1. implement MediaCodec's setParameters method
1012 // 2.find a way to initiate a Java Bundle instance as parameter for MediaCodec
1013 // setParameters method. mMediaCodecEncoder->setParameters
1014
1015 return WEBRTC_VIDEO_CODEC_OK;
1016 }
1017
1018 WebrtcMediaCodecVP8VideoRemoteEncoder::
~WebrtcMediaCodecVP8VideoRemoteEncoder()1019 ~WebrtcMediaCodecVP8VideoRemoteEncoder() {
1020 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
1021 Release();
1022 }
1023
InitEncode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores,size_t maxPayloadSize)1024 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::InitEncode(
1025 const webrtc::VideoCodec* codecSettings, int32_t numberOfCores,
1026 size_t maxPayloadSize) {
1027 return WEBRTC_VIDEO_CODEC_OK;
1028 }
1029
SetRates(uint32_t newBitRate,uint32_t frameRate)1030 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::SetRates(uint32_t newBitRate,
1031 uint32_t frameRate) {
1032 CSFLogDebug(LOGTAG, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__,
1033 newBitRate, frameRate);
1034 if (!mJavaEncoder) {
1035 return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1036 }
1037 mJavaEncoder->SetBitrate(newBitRate * 1000);
1038 return WEBRTC_VIDEO_CODEC_OK;
1039 }
1040
Encode(const webrtc::VideoFrame & inputImage,const webrtc::CodecSpecificInfo * codecSpecificInfo,const std::vector<webrtc::FrameType> * frame_types)1041 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
1042 const webrtc::VideoFrame& inputImage,
1043 const webrtc::CodecSpecificInfo* codecSpecificInfo,
1044 const std::vector<webrtc::FrameType>* frame_types) {
1045 CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(),
1046 inputImage.height());
1047 if (inputImage.width() == 0 || inputImage.height() == 0) {
1048 return WEBRTC_VIDEO_CODEC_ERROR;
1049 }
1050
1051 if (!mJavaEncoder) {
1052 JavaCallbacksSupport::Init();
1053 mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
1054
1055 JavaCallbacksSupport::AttachNative(
1056 mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(mCallback));
1057
1058 java::sdk::MediaFormat::LocalRef format;
1059
1060 nsresult res = java::sdk::MediaFormat::CreateVideoFormat(
1061 nsCString(MEDIACODEC_VIDEO_MIME_VP8), inputImage.width(),
1062 inputImage.height(), &format);
1063
1064 if (NS_FAILED(res)) {
1065 CSFLogDebug(LOGTAG, "%s, CreateVideoFormat failed err = %d", __FUNCTION__,
1066 (int)res);
1067 return WEBRTC_VIDEO_CODEC_ERROR;
1068 }
1069
1070 res = format->SetInteger(nsCString("bitrate"), 300 * 1000);
1071 res = format->SetInteger(nsCString("bitrate-mode"), 2);
1072 res = format->SetInteger(nsCString("color-format"), 21);
1073 res = format->SetInteger(nsCString("frame-rate"), 30);
1074 res = format->SetInteger(nsCString("i-frame-interval"), 100);
1075
1076 mJavaEncoder =
1077 java::CodecProxy::Create(true, format, nullptr, mJavaCallbacks, u""_ns);
1078
1079 if (mJavaEncoder == nullptr) {
1080 return WEBRTC_VIDEO_CODEC_ERROR;
1081 }
1082 }
1083
1084 rtc::scoped_refptr<webrtc::I420BufferInterface> inputBuffer =
1085 inputImage.video_frame_buffer()->GetI420();
1086 size_t sizeY = inputImage.height() * inputBuffer->StrideY();
1087 size_t sizeUV = ((inputImage.height() + 1) / 2) * inputBuffer->StrideU();
1088 size_t size = sizeY + 2 * sizeUV;
1089
1090 if (mConvertBuf == nullptr) {
1091 mConvertBuf = new uint8_t[size];
1092 mConvertBufsize = size;
1093 }
1094
1095 uint8_t* dstY = mConvertBuf;
1096 uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
1097
1098 bool converted = I420toNV12(dstY, dstUV, inputImage);
1099 if (!converted) {
1100 CSFLogError(LOGTAG,
1101 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input "
1102 "buffer to NV12 error.",
1103 __FUNCTION__);
1104 return WEBRTC_VIDEO_CODEC_ERROR;
1105 }
1106
1107 jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(mConvertBuf, size);
1108
1109 java::sdk::BufferInfo::LocalRef bufferInfo;
1110 nsresult rv = java::sdk::BufferInfo::New(&bufferInfo);
1111 if (NS_FAILED(rv)) {
1112 return WEBRTC_VIDEO_CODEC_ERROR;
1113 }
1114
1115 if ((*frame_types)[0] == webrtc::kVideoFrameKey) {
1116 bufferInfo->Set(0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC,
1117 java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME);
1118 } else {
1119 bufferInfo->Set(0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC, 0);
1120 }
1121
1122 mJavaEncoder->Input(bytes, bufferInfo, nullptr);
1123
1124 return WEBRTC_VIDEO_CODEC_OK;
1125 }
1126
RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback * callback)1127 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::RegisterEncodeCompleteCallback(
1128 webrtc::EncodedImageCallback* callback) {
1129 mCallback = callback;
1130 return WEBRTC_VIDEO_CODEC_OK;
1131 }
1132
Release()1133 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Release() {
1134 CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
1135
1136 if (mJavaEncoder) {
1137 mJavaEncoder->Release();
1138 mJavaEncoder = nullptr;
1139 }
1140
1141 if (mJavaCallbacks) {
1142 JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
1143 JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
1144 mJavaCallbacks = nullptr;
1145 }
1146
1147 if (mConvertBuf) {
1148 delete[] mConvertBuf;
1149 mConvertBuf = nullptr;
1150 }
1151
1152 return WEBRTC_VIDEO_CODEC_OK;
1153 }
1154
1155 // Decoder.
WebrtcMediaCodecVP8VideoDecoder()1156 WebrtcMediaCodecVP8VideoDecoder::WebrtcMediaCodecVP8VideoDecoder()
1157 : mCallback(nullptr),
1158 mFrameWidth(0),
1159 mFrameHeight(0),
1160 mMediaCodecDecoder(nullptr) {
1161 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1162 }
1163
ResetInputBuffers()1164 bool WebrtcMediaCodecVP8VideoDecoder::ResetInputBuffers() {
1165 mInputBuffers = mMediaCodecDecoder->GetInputBuffers();
1166
1167 if (!mInputBuffers) return false;
1168
1169 return true;
1170 }
1171
ResetOutputBuffers()1172 bool WebrtcMediaCodecVP8VideoDecoder::ResetOutputBuffers() {
1173 mOutputBuffers = mMediaCodecDecoder->GetOutputBuffers();
1174
1175 if (!mOutputBuffers) return false;
1176
1177 return true;
1178 }
1179
InitDecode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores)1180 int32_t WebrtcMediaCodecVP8VideoDecoder::InitDecode(
1181 const webrtc::VideoCodec* codecSettings, int32_t numberOfCores) {
1182 if (!mMediaCodecDecoder) {
1183 mMediaCodecDecoder = new WebrtcAndroidMediaCodec();
1184 }
1185
1186 return WEBRTC_VIDEO_CODEC_OK;
1187 }
1188
Decode(const webrtc::EncodedImage & inputImage,bool missingFrames,const webrtc::RTPFragmentationHeader * fragmentation,const webrtc::CodecSpecificInfo * codecSpecificInfo,int64_t renderTimeMs)1189 int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
1190 const webrtc::EncodedImage& inputImage, bool missingFrames,
1191 const webrtc::RTPFragmentationHeader* fragmentation,
1192 const webrtc::CodecSpecificInfo* codecSpecificInfo, int64_t renderTimeMs) {
1193 CSFLogDebug(LOGTAG, "%s, renderTimeMs = %" PRId64, __FUNCTION__,
1194 renderTimeMs);
1195
1196 if (inputImage._length == 0 || !inputImage._buffer) {
1197 CSFLogDebug(LOGTAG, "%s, input Image invalid. length = %" PRIdPTR,
1198 __FUNCTION__, inputImage._length);
1199 return WEBRTC_VIDEO_CODEC_ERROR;
1200 }
1201
1202 if (inputImage._frameType == webrtc::kVideoFrameKey) {
1203 CSFLogDebug(LOGTAG, "%s, inputImage is Golden frame", __FUNCTION__);
1204 mFrameWidth = inputImage._encodedWidth;
1205 mFrameHeight = inputImage._encodedHeight;
1206 }
1207
1208 if (!mMediaCodecDecoder->isStarted) {
1209 if (mFrameWidth == 0 || mFrameHeight == 0) {
1210 return WEBRTC_VIDEO_CODEC_ERROR;
1211 }
1212
1213 mMediaCodecDecoder->SetDecoderCallback(mCallback);
1214 nsresult res = mMediaCodecDecoder->Configure(
1215 mFrameWidth, mFrameHeight, nullptr, 0, MEDIACODEC_VIDEO_MIME_VP8,
1216 false /* decoder */);
1217
1218 if (res != NS_OK) {
1219 CSFLogDebug(LOGTAG, "%s, decoder configure return err = %d", __FUNCTION__,
1220 (int)res);
1221 return WEBRTC_VIDEO_CODEC_ERROR;
1222 }
1223
1224 res = mMediaCodecDecoder->Start();
1225
1226 if (NS_FAILED(res)) {
1227 mMediaCodecDecoder->isStarted = false;
1228 CSFLogDebug(LOGTAG, "%s start decoder. err = %d", __FUNCTION__, (int)res);
1229 return WEBRTC_VIDEO_CODEC_ERROR;
1230 }
1231
1232 bool retBool = ResetInputBuffers();
1233 if (!retBool) {
1234 CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
1235 return WEBRTC_VIDEO_CODEC_ERROR;
1236 }
1237 retBool = ResetOutputBuffers();
1238 if (!retBool) {
1239 CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
1240 return WEBRTC_VIDEO_CODEC_ERROR;
1241 }
1242
1243 mMediaCodecDecoder->isStarted = true;
1244 }
1245 #ifdef WEBRTC_MEDIACODEC_DEBUG
1246 uint32_t time = PR_IntervalNow();
1247 CSFLogDebug(LOGTAG, "%s start decoder took %u ms", __FUNCTION__,
1248 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
1249 #endif
1250
1251 bool feedFrame = true;
1252 int32_t ret = WEBRTC_VIDEO_CODEC_ERROR;
1253
1254 while (feedFrame) {
1255 ret = mMediaCodecDecoder->FeedMediaCodecInput(inputImage, renderTimeMs);
1256 feedFrame = (ret == -1);
1257 }
1258
1259 CSFLogDebug(LOGTAG, "%s end, ret = %d", __FUNCTION__, ret);
1260
1261 return ret;
1262 }
1263
DecodeFrame(EncodedFrame * frame)1264 void WebrtcMediaCodecVP8VideoDecoder::DecodeFrame(EncodedFrame* frame) {
1265 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1266 }
1267
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback * callback)1268 int32_t WebrtcMediaCodecVP8VideoDecoder::RegisterDecodeCompleteCallback(
1269 webrtc::DecodedImageCallback* callback) {
1270 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1271
1272 mCallback = callback;
1273 return WEBRTC_VIDEO_CODEC_OK;
1274 }
1275
Release()1276 int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
1277 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1278
1279 delete mMediaCodecDecoder;
1280 mMediaCodecDecoder = nullptr;
1281
1282 return WEBRTC_VIDEO_CODEC_OK;
1283 }
1284
~WebrtcMediaCodecVP8VideoDecoder()1285 WebrtcMediaCodecVP8VideoDecoder::~WebrtcMediaCodecVP8VideoDecoder() {
1286 CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1287
1288 Release();
1289 }
1290
1291 } // namespace mozilla
1292