1 /* This Source Code Form is subject to the terms of the Mozilla Public
2  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
3  * You can obtain one at http://mozilla.org/MPL/2.0/. */
4 
5 #include <cstdio>
6 #include <queue>
7 
8 #include "CSFLog.h"
9 #include "nspr.h"
10 
11 #include "JavaCallbacksSupport.h"
12 #include "MediaCodec.h"
13 #include "WebrtcMediaCodecVP8VideoCodec.h"
14 #include "mozilla/ArrayUtils.h"
15 #include "nsThreadUtils.h"
16 #include "mozilla/Monitor.h"
17 #include "runnable_utils.h"
18 #include "MediaResult.h"
19 
20 #include "AudioConduit.h"
21 #include "VideoConduit.h"
22 #include "libyuv/convert_from.h"
23 #include "libyuv/convert.h"
24 #include "libyuv/row.h"
25 
26 #include "webrtc/modules/video_coding/include/video_error_codes.h"
27 #include "webrtc/system_wrappers/include/critical_section_wrapper.h"
28 
29 #include "webrtc/api/video/i420_buffer.h"
30 #include <webrtc/common_video/libyuv/include/webrtc_libyuv.h>
31 
32 using namespace mozilla;
33 using namespace mozilla::java;
34 using namespace mozilla::java::sdk;
35 
36 static const int32_t DECODER_TIMEOUT = 10 * PR_USEC_PER_MSEC;  // 10ms
37 static const char MEDIACODEC_VIDEO_MIME_VP8[] = "video/x-vnd.on2.vp8";
38 
39 namespace mozilla {
40 
41 static const char* wmcLogTag = "WebrtcMediaCodecVP8VideoCodec";
42 #ifdef LOGTAG
43 #undef LOGTAG
44 #endif
45 #define LOGTAG wmcLogTag
46 
47 class CallbacksSupport final : public JavaCallbacksSupport {
48  public:
CallbacksSupport(webrtc::EncodedImageCallback * aCallback)49   CallbacksSupport(webrtc::EncodedImageCallback* aCallback)
50       : mCallback(aCallback),
51         mCritSect(webrtc::CriticalSectionWrapper::CreateCriticalSection()),
52         mPictureId(0) {
53     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
54     memset(&mEncodedImage, 0, sizeof(mEncodedImage));
55   }
56 
~CallbacksSupport()57   ~CallbacksSupport() {
58     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
59     if (mEncodedImage._size) {
60       delete[] mEncodedImage._buffer;
61       mEncodedImage._buffer = nullptr;
62       mEncodedImage._size = 0;
63     }
64   }
65 
VerifyAndAllocate(const uint32_t minimumSize)66   void VerifyAndAllocate(const uint32_t minimumSize) {
67     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
68     if (minimumSize > mEncodedImage._size) {
69       uint8_t* newBuffer = new uint8_t[minimumSize];
70       MOZ_RELEASE_ASSERT(newBuffer);
71 
72       if (mEncodedImage._buffer) {
73         delete[] mEncodedImage._buffer;
74       }
75       mEncodedImage._buffer = newBuffer;
76       mEncodedImage._size = minimumSize;
77     }
78   }
79 
HandleInput(jlong aTimestamp,bool aProcessed)80   void HandleInput(jlong aTimestamp, bool aProcessed) override {
81     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
82   }
83 
HandleOutputFormatChanged(MediaFormat::Param aFormat)84   void HandleOutputFormatChanged(MediaFormat::Param aFormat) override {
85     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
86   }
87 
HandleOutput(Sample::Param aSample)88   void HandleOutput(Sample::Param aSample) override {
89     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
90     BufferInfo::LocalRef info = aSample->Info();
91 
92     int32_t size;
93     bool ok = NS_SUCCEEDED(info->Size(&size));
94     MOZ_RELEASE_ASSERT(ok);
95 
96     if (size > 0) {
97       webrtc::CriticalSectionScoped lock(mCritSect.get());
98       VerifyAndAllocate(size);
99 
100       int64_t presentationTimeUs;
101       ok = NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
102       MOZ_RELEASE_ASSERT(ok);
103 
104       mEncodedImage._timeStamp = presentationTimeUs / PR_USEC_PER_MSEC;
105       mEncodedImage.capture_time_ms_ = mEncodedImage._timeStamp;
106 
107       int32_t flags;
108       ok = NS_SUCCEEDED(info->Flags(&flags));
109       MOZ_ASSERT(ok);
110 
111       if (flags == MediaCodec::BUFFER_FLAG_SYNC_FRAME) {
112         mEncodedImage._frameType = webrtc::kVideoFrameKey;
113       } else {
114         mEncodedImage._frameType = webrtc::kVideoFrameDelta;
115       }
116       mEncodedImage._completeFrame = true;
117       mEncodedImage._length = size;
118 
119       jni::ByteBuffer::LocalRef dest =
120           jni::ByteBuffer::New(mEncodedImage._buffer, size);
121       aSample->WriteToByteBuffer(dest);
122 
123       webrtc::CodecSpecificInfo info;
124       info.codecType = webrtc::kVideoCodecVP8;
125       info.codecSpecific.VP8.pictureId = mPictureId;
126       mPictureId = (mPictureId + 1) & 0x7FFF;
127       info.codecSpecific.VP8.tl0PicIdx = -1;
128       info.codecSpecific.VP8.keyIdx = -1;
129       info.codecSpecific.VP8.temporalIdx = 1;
130       info.codecSpecific.VP8.simulcastIdx = 0;
131 
132       webrtc::RTPFragmentationHeader header;
133       memset(&header, 0, sizeof(header));
134       header.VerifyAndAllocateFragmentationHeader(1);
135       header.fragmentationLength[0] = mEncodedImage._length;
136 
137       MOZ_RELEASE_ASSERT(mCallback);
138       mCallback->OnEncodedImage(mEncodedImage, &info, &header);
139     }
140   }
141 
HandleError(const MediaResult & aError)142   void HandleError(const MediaResult& aError) override {
143     CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
144   }
145 
146   friend class WebrtcMediaCodecVP8VideoRemoteEncoder;
147 
148  private:
149   webrtc::EncodedImageCallback* mCallback;
150   Atomic<bool> mCanceled;
151   webrtc::EncodedImage mEncodedImage;
152   std::unique_ptr<webrtc::CriticalSectionWrapper> mCritSect;
153   uint32_t mPictureId;
154 };
155 
CreateDecoder(const char * aMimeType)156 static MediaCodec::LocalRef CreateDecoder(const char* aMimeType) {
157   if (!aMimeType) {
158     return nullptr;
159   }
160 
161   MediaCodec::LocalRef codec;
162   MediaCodec::CreateDecoderByType(aMimeType, &codec);
163   return codec;
164 }
165 
CreateEncoder(const char * aMimeType)166 static MediaCodec::LocalRef CreateEncoder(const char* aMimeType) {
167   if (!aMimeType) {
168     return nullptr;
169   }
170 
171   MediaCodec::LocalRef codec;
172   MediaCodec::CreateEncoderByType(aMimeType, &codec);
173   return codec;
174 }
175 
ShutdownThread(nsCOMPtr<nsIThread> & aThread)176 static void ShutdownThread(nsCOMPtr<nsIThread>& aThread) {
177   aThread->Shutdown();
178 }
179 
180 // Base runnable class to repeatly pull MediaCodec output buffers in seperate
181 // thread. How to use:
182 // - implementing DrainOutput() to get output. Remember to return false to tell
183 //   drain not to pop input queue.
184 // - call QueueInput() to schedule a run to drain output. The input, aFrame,
185 //   should contains corresponding info such as image size and timestamps for
186 //   DrainOutput() implementation to construct data needed by encoded/decoded
187 //   callbacks.
188 class MediaCodecOutputDrain : public Runnable {
189  public:
Start()190   void Start() {
191     MonitorAutoLock lock(mMonitor);
192     if (mThread == nullptr) {
193       NS_NewNamedThread("OutputDrain", getter_AddRefs(mThread));
194     }
195     mEnding = false;
196     mThread->Dispatch(this, NS_DISPATCH_NORMAL);
197   }
198 
Stop()199   void Stop() {
200     MonitorAutoLock lock(mMonitor);
201     mEnding = true;
202     lock.NotifyAll();  // In case Run() is waiting.
203 
204     if (mThread != nullptr) {
205       MonitorAutoUnlock unlock(mMonitor);
206       NS_DispatchToMainThread(
207           WrapRunnableNM(&ShutdownThread, nsCOMPtr<nsIThread>(mThread)));
208       mThread = nullptr;
209     }
210   }
211 
QueueInput(const EncodedFrame & aFrame)212   void QueueInput(const EncodedFrame& aFrame) {
213     MonitorAutoLock lock(mMonitor);
214 
215     MOZ_ASSERT(mThread);
216 
217     mInputFrames.push(aFrame);
218     // Notify Run() about queued input and it can start working.
219     lock.NotifyAll();
220   }
221 
Run()222   NS_IMETHOD Run() override {
223     MOZ_ASSERT(mThread);
224 
225     MonitorAutoLock lock(mMonitor);
226     while (true) {
227       if (mInputFrames.empty()) {
228         // Wait for new input.
229         lock.Wait();
230       }
231 
232       if (mEnding) {
233         // Stop draining.
234         break;
235       }
236 
237       MOZ_ASSERT(!mInputFrames.empty());
238       {
239         // Release monitor while draining because it's blocking.
240         MonitorAutoUnlock unlock(mMonitor);
241         DrainOutput();
242       }
243     }
244 
245     return NS_OK;
246   }
247 
248  protected:
MediaCodecOutputDrain()249   MediaCodecOutputDrain()
250       : Runnable("MediaCodecOutputDrain"),
251         mMonitor("MediaCodecOutputDrain monitor"),
252         mEnding(false) {}
253 
254   // Drain output buffer for input frame queue mInputFrames.
255   // mInputFrames contains info such as size and time of the input frames.
256   // We have to give a queue to handle encoder frame skips - we can input 10
257   // frames and get one back.  NOTE: any access of aInputFrames MUST be preceded
258   // locking mMonitor!
259 
260   // Blocks waiting for decoded buffers, but for a limited period because
261   // we need to check for shutdown.
262   virtual bool DrainOutput() = 0;
263 
264  protected:
265   // This monitor protects all things below it, and is also used to
266   // wait/notify queued input.
267   Monitor mMonitor;
268   std::queue<EncodedFrame> mInputFrames;
269 
270  private:
271   // also protected by mMonitor
272   nsCOMPtr<nsIThread> mThread;
273   bool mEnding;
274 };
275 
276 class WebrtcAndroidMediaCodec {
277  public:
WebrtcAndroidMediaCodec()278   WebrtcAndroidMediaCodec()
279       : mEncoderCallback(nullptr),
280         mDecoderCallback(nullptr),
281         isStarted(false),
282         mEnding(false) {
283     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
284   }
285 
Configure(uint32_t width,uint32_t height,const jobject aSurface,uint32_t flags,const char * mime,bool encoder)286   nsresult Configure(uint32_t width, uint32_t height, const jobject aSurface,
287                      uint32_t flags, const char* mime, bool encoder) {
288     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
289     nsresult res = NS_OK;
290 
291     if (!mCoder) {
292       mWidth = width;
293       mHeight = height;
294 
295       MediaFormat::LocalRef format;
296 
297       res = MediaFormat::CreateVideoFormat(nsCString(mime), mWidth, mHeight,
298                                            &format);
299 
300       if (NS_FAILED(res)) {
301         CSFLogDebug(
302             LOGTAG,
303             "WebrtcAndroidMediaCodec::%s, CreateVideoFormat failed err = %d",
304             __FUNCTION__, (int)res);
305         return NS_ERROR_FAILURE;
306       }
307 
308       if (encoder) {
309         mCoder = CreateEncoder(mime);
310 
311         if (NS_FAILED(res)) {
312           CSFLogDebug(LOGTAG,
313                       "WebrtcAndroidMediaCodec::%s, CreateEncoderByType failed "
314                       "err = %d",
315                       __FUNCTION__, (int)res);
316           return NS_ERROR_FAILURE;
317         }
318 
319         res = format->SetInteger(MediaFormat::KEY_BIT_RATE, 1000 * 300);
320         res = format->SetInteger(MediaFormat::KEY_BITRATE_MODE, 2);
321         res = format->SetInteger(MediaFormat::KEY_COLOR_FORMAT, 21);
322         res = format->SetInteger(MediaFormat::KEY_FRAME_RATE, 30);
323         res = format->SetInteger(MediaFormat::KEY_I_FRAME_INTERVAL, 100);
324 
325       } else {
326         mCoder = CreateDecoder(mime);
327         if (NS_FAILED(res)) {
328           CSFLogDebug(LOGTAG,
329                       "WebrtcAndroidMediaCodec::%s, CreateDecoderByType failed "
330                       "err = %d",
331                       __FUNCTION__, (int)res);
332           return NS_ERROR_FAILURE;
333         }
334       }
335       res = mCoder->Configure(format, nullptr, nullptr, flags);
336       if (NS_FAILED(res)) {
337         CSFLogDebug(LOGTAG, "WebrtcAndroidMediaCodec::%s, err = %d",
338                     __FUNCTION__, (int)res);
339       }
340     }
341 
342     return res;
343   }
344 
Start()345   nsresult Start() {
346     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
347 
348     if (!mCoder) {
349       return NS_ERROR_FAILURE;
350     }
351 
352     mEnding = false;
353 
354     nsresult res;
355     res = mCoder->Start();
356     if (NS_FAILED(res)) {
357       CSFLogDebug(
358           LOGTAG,
359           "WebrtcAndroidMediaCodec::%s, mCoder->start() return err = %d",
360           __FUNCTION__, (int)res);
361       return res;
362     }
363     isStarted = true;
364     return NS_OK;
365   }
366 
Stop()367   nsresult Stop() {
368     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
369     mEnding = true;
370 
371     if (mOutputDrain != nullptr) {
372       mOutputDrain->Stop();
373       mOutputDrain = nullptr;
374     }
375 
376     mCoder->Stop();
377     mCoder->Release();
378     isStarted = false;
379     return NS_OK;
380   }
381 
GenerateVideoFrame(size_t width,size_t height,uint32_t timeStamp,void * decoded,int color_format)382   void GenerateVideoFrame(size_t width, size_t height, uint32_t timeStamp,
383                           void* decoded, int color_format) {
384     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
385 
386     // TODO: eliminate extra pixel copy/color conversion
387     size_t widthUV = (width + 1) / 2;
388     rtc::scoped_refptr<webrtc::I420Buffer> buffer;
389     buffer = webrtc::I420Buffer::Create(width, height, width, widthUV, widthUV);
390 
391     uint8_t* src_nv12 = static_cast<uint8_t*>(decoded);
392     int src_nv12_y_size = width * height;
393 
394     uint8_t* dstY = buffer->MutableDataY();
395     uint8_t* dstU = buffer->MutableDataU();
396     uint8_t* dstV = buffer->MutableDataV();
397 
398     libyuv::NV12ToI420(src_nv12, width, src_nv12 + src_nv12_y_size,
399                        (width + 1) & ~1, dstY, width, dstU, (width + 1) / 2,
400                        dstV, (width + 1) / 2, width, height);
401 
402     mVideoFrame.reset(
403         new webrtc::VideoFrame(buffer, timeStamp, 0, webrtc::kVideoRotation_0));
404   }
405 
FeedMediaCodecInput(const webrtc::EncodedImage & inputImage,int64_t renderTimeMs)406   int32_t FeedMediaCodecInput(const webrtc::EncodedImage& inputImage,
407                               int64_t renderTimeMs) {
408 #ifdef WEBRTC_MEDIACODEC_DEBUG
409     uint32_t time = PR_IntervalNow();
410     CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
411 #endif
412 
413     int inputIndex = DequeueInputBuffer(DECODER_TIMEOUT);
414     if (inputIndex == -1) {
415       CSFLogError(LOGTAG, "%s equeue input buffer failed", __FUNCTION__);
416       return inputIndex;
417     }
418 
419 #ifdef WEBRTC_MEDIACODEC_DEBUG
420     CSFLogDebug(LOGTAG, "%s dequeue input buffer took %u ms", __FUNCTION__,
421                 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
422     time = PR_IntervalNow();
423 #endif
424 
425     size_t size = inputImage._length;
426 
427     JNIEnv* const env = jni::GetEnvForThread();
428     jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
429     void* directBuffer = env->GetDirectBufferAddress(buffer);
430 
431     PodCopy((uint8_t*)directBuffer, inputImage._buffer, size);
432 
433     if (inputIndex >= 0) {
434       CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__,
435                   inputIndex);
436       QueueInputBuffer(inputIndex, 0, size, renderTimeMs, 0);
437 
438       {
439         if (mOutputDrain == nullptr) {
440           mOutputDrain = new OutputDrain(this);
441           mOutputDrain->Start();
442         }
443         EncodedFrame frame;
444         frame.width_ = mWidth;
445         frame.height_ = mHeight;
446         frame.timeStamp_ = inputImage._timeStamp;
447         frame.decode_timestamp_ = renderTimeMs;
448         mOutputDrain->QueueInput(frame);
449       }
450       env->DeleteLocalRef(buffer);
451     }
452 
453     return inputIndex;
454   }
455 
DrainOutput(std::queue<EncodedFrame> & aInputFrames,Monitor & aMonitor)456   nsresult DrainOutput(std::queue<EncodedFrame>& aInputFrames,
457                        Monitor& aMonitor) {
458     MOZ_ASSERT(mCoder != nullptr);
459     if (mCoder == nullptr) {
460       return NS_ERROR_FAILURE;
461     }
462 
463 #ifdef WEBRTC_MEDIACODEC_DEBUG
464     uint32_t time = PR_IntervalNow();
465 #endif
466     nsresult res;
467     BufferInfo::LocalRef bufferInfo;
468     res = BufferInfo::New(&bufferInfo);
469     if (NS_FAILED(res)) {
470       CSFLogDebug(
471           LOGTAG,
472           "WebrtcAndroidMediaCodec::%s, BufferInfo::New return err = %d",
473           __FUNCTION__, (int)res);
474       return res;
475     }
476     int32_t outputIndex = DequeueOutputBuffer(bufferInfo);
477 
478     if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
479       // Not an error: output not available yet. Try later.
480       CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__,
481                   outputIndex);
482     } else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
483       // handle format change
484       CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d",
485                   __FUNCTION__, outputIndex);
486     } else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
487       CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__,
488                   outputIndex);
489       GetOutputBuffers();
490     } else if (outputIndex < 0) {
491       CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d",
492                   __FUNCTION__, outputIndex);
493       MonitorAutoLock lock(aMonitor);
494       aInputFrames.pop();
495     } else {
496 #ifdef WEBRTC_MEDIACODEC_DEBUG
497       CSFLogDebug(LOGTAG,
498                   "%s dequeue output buffer# return status is %d took %u ms",
499                   __FUNCTION__, outputIndex,
500                   PR_IntervalToMilliseconds(PR_IntervalNow() - time));
501 #endif
502       EncodedFrame frame;
503       {
504         MonitorAutoLock lock(aMonitor);
505         frame = aInputFrames.front();
506         aInputFrames.pop();
507       }
508 
509       if (mEnding) {
510         ReleaseOutputBuffer(outputIndex, false);
511         return NS_OK;
512       }
513 
514       JNIEnv* const env = jni::GetEnvForThread();
515       jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
516       if (buffer) {
517         // The buffer will be null on Android L if we are decoding to a Surface
518         void* directBuffer = env->GetDirectBufferAddress(buffer);
519 
520         int color_format = 0;
521 
522         CSFLogDebug(
523             LOGTAG,
524             "%s generate video frame, width = %d, height = %d, timeStamp_ = %d",
525             __FUNCTION__, frame.width_, frame.height_, frame.timeStamp_);
526         GenerateVideoFrame(frame.width_, frame.height_, frame.timeStamp_,
527                            directBuffer, color_format);
528         mDecoderCallback->Decoded(*mVideoFrame);
529 
530         ReleaseOutputBuffer(outputIndex, false);
531         env->DeleteLocalRef(buffer);
532       }
533     }
534     return NS_OK;
535   }
536 
DequeueInputBuffer(int64_t time)537   int32_t DequeueInputBuffer(int64_t time) {
538     nsresult res;
539     int32_t inputIndex;
540     res = mCoder->DequeueInputBuffer(time, &inputIndex);
541 
542     if (NS_FAILED(res)) {
543       CSFLogDebug(LOGTAG,
544                   "WebrtcAndroidMediaCodec::%s, mCoder->DequeueInputBuffer() "
545                   "return err = %d",
546                   __FUNCTION__, (int)res);
547       return -1;
548     }
549     return inputIndex;
550   }
551 
QueueInputBuffer(int32_t inputIndex,int32_t offset,size_t size,int64_t renderTimes,int32_t flags)552   void QueueInputBuffer(int32_t inputIndex, int32_t offset, size_t size,
553                         int64_t renderTimes, int32_t flags) {
554     nsresult res = NS_OK;
555     res =
556         mCoder->QueueInputBuffer(inputIndex, offset, size, renderTimes, flags);
557 
558     if (NS_FAILED(res)) {
559       CSFLogDebug(LOGTAG,
560                   "WebrtcAndroidMediaCodec::%s, mCoder->QueueInputBuffer() "
561                   "return err = %d",
562                   __FUNCTION__, (int)res);
563     }
564   }
565 
DequeueOutputBuffer(BufferInfo::Param aInfo)566   int32_t DequeueOutputBuffer(BufferInfo::Param aInfo) {
567     nsresult res;
568 
569     int32_t outputStatus;
570     res = mCoder->DequeueOutputBuffer(aInfo, DECODER_TIMEOUT, &outputStatus);
571 
572     if (NS_FAILED(res)) {
573       CSFLogDebug(LOGTAG,
574                   "WebrtcAndroidMediaCodec::%s, mCoder->DequeueOutputBuffer() "
575                   "return err = %d",
576                   __FUNCTION__, (int)res);
577       return -1;
578     }
579 
580     return outputStatus;
581   }
582 
ReleaseOutputBuffer(int32_t index,bool flag)583   void ReleaseOutputBuffer(int32_t index, bool flag) {
584     mCoder->ReleaseOutputBuffer(index, flag);
585   }
586 
GetInputBuffers()587   jobjectArray GetInputBuffers() {
588     JNIEnv* const env = jni::GetEnvForThread();
589 
590     if (mInputBuffers) {
591       env->DeleteGlobalRef(mInputBuffers);
592     }
593 
594     nsresult res;
595     jni::ObjectArray::LocalRef inputBuffers;
596     res = mCoder->GetInputBuffers(&inputBuffers);
597     mInputBuffers = (jobjectArray)env->NewGlobalRef(inputBuffers.Get());
598     if (NS_FAILED(res)) {
599       CSFLogDebug(
600           LOGTAG,
601           "WebrtcAndroidMediaCodec::%s, GetInputBuffers return err = %d",
602           __FUNCTION__, (int)res);
603       return nullptr;
604     }
605 
606     return mInputBuffers;
607   }
608 
GetOutputBuffers()609   jobjectArray GetOutputBuffers() {
610     JNIEnv* const env = jni::GetEnvForThread();
611 
612     if (mOutputBuffers) {
613       env->DeleteGlobalRef(mOutputBuffers);
614     }
615 
616     nsresult res;
617     jni::ObjectArray::LocalRef outputBuffers;
618     res = mCoder->GetOutputBuffers(&outputBuffers);
619     mOutputBuffers = (jobjectArray)env->NewGlobalRef(outputBuffers.Get());
620     if (NS_FAILED(res)) {
621       CSFLogDebug(
622           LOGTAG,
623           "WebrtcAndroidMediaCodec::%s, GetOutputBuffers return err = %d",
624           __FUNCTION__, (int)res);
625       return nullptr;
626     }
627 
628     return mOutputBuffers;
629   }
630 
SetDecoderCallback(webrtc::DecodedImageCallback * aCallback)631   void SetDecoderCallback(webrtc::DecodedImageCallback* aCallback) {
632     mDecoderCallback = aCallback;
633   }
634 
SetEncoderCallback(webrtc::EncodedImageCallback * aCallback)635   void SetEncoderCallback(webrtc::EncodedImageCallback* aCallback) {
636     mEncoderCallback = aCallback;
637   }
638 
639  protected:
~WebrtcAndroidMediaCodec()640   virtual ~WebrtcAndroidMediaCodec() {}
641 
642  private:
643   class OutputDrain : public MediaCodecOutputDrain {
644    public:
OutputDrain(WebrtcAndroidMediaCodec * aMediaCodec)645     OutputDrain(WebrtcAndroidMediaCodec* aMediaCodec)
646         : MediaCodecOutputDrain(), mMediaCodec(aMediaCodec) {}
647 
648    protected:
DrainOutput()649     virtual bool DrainOutput() override {
650       return (mMediaCodec->DrainOutput(mInputFrames, mMonitor) == NS_OK);
651     }
652 
653    private:
654     WebrtcAndroidMediaCodec* mMediaCodec;
655   };
656 
657   friend class WebrtcMediaCodecVP8VideoEncoder;
658   friend class WebrtcMediaCodecVP8VideoDecoder;
659 
660   MediaCodec::GlobalRef mCoder;
661   webrtc::EncodedImageCallback* mEncoderCallback;
662   webrtc::DecodedImageCallback* mDecoderCallback;
663   std::unique_ptr<webrtc::VideoFrame> mVideoFrame;
664 
665   jobjectArray mInputBuffers;
666   jobjectArray mOutputBuffers;
667 
668   RefPtr<OutputDrain> mOutputDrain;
669   uint32_t mWidth;
670   uint32_t mHeight;
671   bool isStarted;
672   bool mEnding;
673 
674   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcAndroidMediaCodec)
675 };
676 
I420toNV12(uint8_t * dstY,uint16_t * dstUV,const webrtc::VideoFrame & inputImage)677 static bool I420toNV12(uint8_t* dstY, uint16_t* dstUV,
678                        const webrtc::VideoFrame& inputImage) {
679   rtc::scoped_refptr<webrtc::VideoFrameBuffer> inputBuffer =
680       inputImage.video_frame_buffer();
681 
682   uint8_t* buffer = dstY;
683   uint8_t* dst_y = buffer;
684   int dst_stride_y = inputBuffer->StrideY();
685   uint8_t* dst_uv = buffer + inputBuffer->StrideY() * inputImage.height();
686   int dst_stride_uv = inputBuffer->StrideU() * 2;
687 
688   // Why NV12?  Because COLOR_FORMAT_YUV420_SEMIPLANAR.  Most hardware is
689   // NV12-friendly.
690   bool converted = !libyuv::I420ToNV12(
691       inputBuffer->DataY(), inputBuffer->StrideY(), inputBuffer->DataU(),
692       inputBuffer->StrideU(), inputBuffer->DataV(), inputBuffer->StrideV(),
693       dst_y, dst_stride_y, dst_uv, dst_stride_uv, inputImage.width(),
694       inputImage.height());
695   return converted;
696 }
697 
698 // Encoder.
WebrtcMediaCodecVP8VideoEncoder()699 WebrtcMediaCodecVP8VideoEncoder::WebrtcMediaCodecVP8VideoEncoder()
700     : mCallback(nullptr), mMediaCodecEncoder(nullptr) {
701   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
702 
703   memset(&mEncodedImage, 0, sizeof(mEncodedImage));
704 }
705 
ResetInputBuffers()706 bool WebrtcMediaCodecVP8VideoEncoder::ResetInputBuffers() {
707   mInputBuffers = mMediaCodecEncoder->GetInputBuffers();
708 
709   if (!mInputBuffers) return false;
710 
711   return true;
712 }
713 
ResetOutputBuffers()714 bool WebrtcMediaCodecVP8VideoEncoder::ResetOutputBuffers() {
715   mOutputBuffers = mMediaCodecEncoder->GetOutputBuffers();
716 
717   if (!mOutputBuffers) return false;
718 
719   return true;
720 }
721 
VerifyAndAllocate(const uint32_t minimumSize)722 int32_t WebrtcMediaCodecVP8VideoEncoder::VerifyAndAllocate(
723     const uint32_t minimumSize) {
724   if (minimumSize > mEncodedImage._size) {
725     // create buffer of sufficient size
726     uint8_t* newBuffer = new uint8_t[minimumSize];
727     if (newBuffer == nullptr) {
728       return -1;
729     }
730     if (mEncodedImage._buffer) {
731       // copy old data
732       memcpy(newBuffer, mEncodedImage._buffer, mEncodedImage._size);
733       delete[] mEncodedImage._buffer;
734     }
735     mEncodedImage._buffer = newBuffer;
736     mEncodedImage._size = minimumSize;
737   }
738   return 0;
739 }
740 
InitEncode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores,size_t maxPayloadSize)741 int32_t WebrtcMediaCodecVP8VideoEncoder::InitEncode(
742     const webrtc::VideoCodec* codecSettings, int32_t numberOfCores,
743     size_t maxPayloadSize) {
744   mMaxPayloadSize = maxPayloadSize;
745   CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, codecSettings->width,
746               codecSettings->height);
747 
748   return WEBRTC_VIDEO_CODEC_OK;
749 }
750 
Encode(const webrtc::VideoFrame & inputImage,const webrtc::CodecSpecificInfo * codecSpecificInfo,const std::vector<webrtc::FrameType> * frame_types)751 int32_t WebrtcMediaCodecVP8VideoEncoder::Encode(
752     const webrtc::VideoFrame& inputImage,
753     const webrtc::CodecSpecificInfo* codecSpecificInfo,
754     const std::vector<webrtc::FrameType>* frame_types) {
755   CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(),
756               inputImage.height());
757 
758   if (!mMediaCodecEncoder) {
759     mMediaCodecEncoder = new WebrtcAndroidMediaCodec();
760   }
761 
762   if (!mMediaCodecEncoder->isStarted) {
763     if (inputImage.width() == 0 || inputImage.height() == 0) {
764       return WEBRTC_VIDEO_CODEC_ERROR;
765     } else {
766       mFrameWidth = inputImage.width();
767       mFrameHeight = inputImage.height();
768     }
769 
770     mMediaCodecEncoder->SetEncoderCallback(mCallback);
771     nsresult res = mMediaCodecEncoder->Configure(
772         mFrameWidth, mFrameHeight, nullptr, MediaCodec::CONFIGURE_FLAG_ENCODE,
773         MEDIACODEC_VIDEO_MIME_VP8, true /* encoder */);
774 
775     if (res != NS_OK) {
776       CSFLogDebug(LOGTAG, "%s, encoder configure return err = %d", __FUNCTION__,
777                   (int)res);
778       return WEBRTC_VIDEO_CODEC_ERROR;
779     }
780 
781     res = mMediaCodecEncoder->Start();
782 
783     if (NS_FAILED(res)) {
784       mMediaCodecEncoder->isStarted = false;
785       CSFLogDebug(LOGTAG, "%s start encoder. err = %d", __FUNCTION__, (int)res);
786       return WEBRTC_VIDEO_CODEC_ERROR;
787     }
788 
789     bool retBool = ResetInputBuffers();
790     if (!retBool) {
791       CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
792       return WEBRTC_VIDEO_CODEC_ERROR;
793     }
794     retBool = ResetOutputBuffers();
795     if (!retBool) {
796       CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
797       return WEBRTC_VIDEO_CODEC_ERROR;
798     }
799 
800     mMediaCodecEncoder->isStarted = true;
801   }
802 
803 #ifdef WEBRTC_MEDIACODEC_DEBUG
804   uint32_t time = PR_IntervalNow();
805 #endif
806 
807   rtc::scoped_refptr<webrtc::VideoFrameBuffer> inputBuffer =
808       inputImage.video_frame_buffer();
809   size_t sizeY = inputImage.height() * inputBuffer->StrideY();
810   size_t sizeUV = ((inputImage.height() + 1) / 2) * inputBuffer->StrideU();
811   size_t size = sizeY + 2 * sizeUV;
812 
813   int inputIndex = mMediaCodecEncoder->DequeueInputBuffer(DECODER_TIMEOUT);
814   if (inputIndex == -1) {
815     CSFLogError(LOGTAG, "%s dequeue input buffer failed", __FUNCTION__);
816     return inputIndex;
817   }
818 
819 #ifdef WEBRTC_MEDIACODEC_DEBUG
820   CSFLogDebug(LOGTAG,
821               "%s WebrtcMediaCodecVP8VideoEncoder::Encode() dequeue OMX input "
822               "buffer took %u ms",
823               __FUNCTION__, PR_IntervalToMilliseconds(PR_IntervalNow() - time));
824 #endif
825 
826   if (inputIndex >= 0) {
827     JNIEnv* const env = jni::GetEnvForThread();
828     jobject buffer = env->GetObjectArrayElement(mInputBuffers, inputIndex);
829     void* directBuffer = env->GetDirectBufferAddress(buffer);
830 
831     uint8_t* dstY = static_cast<uint8_t*>(directBuffer);
832     uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
833 
834     bool converted = I420toNV12(dstY, dstUV, inputImage);
835     if (!converted) {
836       CSFLogError(LOGTAG,
837                   "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input "
838                   "buffer to NV12 error.",
839                   __FUNCTION__);
840       return WEBRTC_VIDEO_CODEC_ERROR;
841     }
842 
843     env->DeleteLocalRef(buffer);
844 
845 #ifdef WEBRTC_MEDIACODEC_DEBUG
846     time = PR_IntervalNow();
847     CSFLogError(LOGTAG, "%s queue input buffer inputIndex = %d", __FUNCTION__,
848                 inputIndex);
849 #endif
850 
851     mMediaCodecEncoder->QueueInputBuffer(
852         inputIndex, 0, size,
853         inputImage.render_time_ms() * PR_USEC_PER_MSEC /* ms to us */, 0);
854 #ifdef WEBRTC_MEDIACODEC_DEBUG
855     CSFLogDebug(LOGTAG,
856                 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() queue input "
857                 "buffer took %u ms",
858                 __FUNCTION__,
859                 PR_IntervalToMilliseconds(PR_IntervalNow() - time));
860 #endif
861     mEncodedImage._encodedWidth = inputImage.width();
862     mEncodedImage._encodedHeight = inputImage.height();
863     mEncodedImage._timeStamp = inputImage.timestamp();
864     mEncodedImage.capture_time_ms_ = inputImage.timestamp();
865 
866     nsresult res;
867     BufferInfo::LocalRef bufferInfo;
868     res = BufferInfo::New(&bufferInfo);
869     if (NS_FAILED(res)) {
870       CSFLogDebug(LOGTAG,
871                   "WebrtcMediaCodecVP8VideoEncoder::%s, BufferInfo::New return "
872                   "err = %d",
873                   __FUNCTION__, (int)res);
874       return -1;
875     }
876 
877     int32_t outputIndex = mMediaCodecEncoder->DequeueOutputBuffer(bufferInfo);
878 
879     if (outputIndex == MediaCodec::INFO_TRY_AGAIN_LATER) {
880       // Not an error: output not available yet. Try later.
881       CSFLogDebug(LOGTAG, "%s dequeue output buffer try again:%d", __FUNCTION__,
882                   outputIndex);
883     } else if (outputIndex == MediaCodec::INFO_OUTPUT_FORMAT_CHANGED) {
884       // handle format change
885       CSFLogDebug(LOGTAG, "%s dequeue output buffer format changed:%d",
886                   __FUNCTION__, outputIndex);
887     } else if (outputIndex == MediaCodec::INFO_OUTPUT_BUFFERS_CHANGED) {
888       CSFLogDebug(LOGTAG, "%s dequeue output buffer changed:%d", __FUNCTION__,
889                   outputIndex);
890       mMediaCodecEncoder->GetOutputBuffers();
891     } else if (outputIndex < 0) {
892       CSFLogDebug(LOGTAG, "%s dequeue output buffer unknow error:%d",
893                   __FUNCTION__, outputIndex);
894     } else {
895 #ifdef WEBRTC_MEDIACODEC_DEBUG
896       CSFLogDebug(LOGTAG,
897                   "%s dequeue output buffer return status is %d took %u ms",
898                   __FUNCTION__, outputIndex,
899                   PR_IntervalToMilliseconds(PR_IntervalNow() - time));
900 #endif
901 
902       JNIEnv* const env = jni::GetEnvForThread();
903       jobject buffer = env->GetObjectArrayElement(mOutputBuffers, outputIndex);
904       if (buffer) {
905         int32_t offset;
906         bufferInfo->Offset(&offset);
907         int32_t flags;
908         bufferInfo->Flags(&flags);
909 
910         // The buffer will be null on Android L if we are decoding to a Surface
911         void* directBuffer =
912             reinterpret_cast<uint8_t*>(env->GetDirectBufferAddress(buffer)) +
913             offset;
914 
915         if (flags == MediaCodec::BUFFER_FLAG_SYNC_FRAME) {
916           mEncodedImage._frameType = webrtc::kVideoFrameKey;
917         } else {
918           mEncodedImage._frameType = webrtc::kVideoFrameDelta;
919         }
920         mEncodedImage._completeFrame = true;
921 
922         int32_t size;
923         bufferInfo->Size(&size);
924 #ifdef WEBRTC_MEDIACODEC_DEBUG
925         CSFLogDebug(LOGTAG,
926                     "%s dequeue output buffer ok, index:%d, buffer size = %d, "
927                     "buffer offset = %d, flags = %d",
928                     __FUNCTION__, outputIndex, size, offset, flags);
929 #endif
930 
931         if (VerifyAndAllocate(size) == -1) {
932           CSFLogDebug(LOGTAG, "%s VerifyAndAllocate buffers failed",
933                       __FUNCTION__);
934           return WEBRTC_VIDEO_CODEC_ERROR;
935         }
936 
937         mEncodedImage._length = size;
938 
939         // xxx It's too bad the mediacodec API forces us to memcpy this....
940         // we should find a way that able to 'hold' the buffer or transfer it
941         // from inputImage (ping-pong buffers or select them from a small pool)
942         memcpy(mEncodedImage._buffer, directBuffer, mEncodedImage._length);
943 
944         webrtc::CodecSpecificInfo info;
945         info.codecType = webrtc::kVideoCodecVP8;
946         info.codecSpecific.VP8.pictureId = -1;
947         info.codecSpecific.VP8.tl0PicIdx = -1;
948         info.codecSpecific.VP8.keyIdx = -1;
949         info.codecSpecific.VP8.temporalIdx = 1;
950 
951         // Generate a header describing a single fragment.
952         webrtc::RTPFragmentationHeader header;
953         memset(&header, 0, sizeof(header));
954         header.VerifyAndAllocateFragmentationHeader(1);
955         header.fragmentationLength[0] = mEncodedImage._length;
956 
957         mCallback->OnEncodedImage(mEncodedImage, &info, &header);
958 
959         mMediaCodecEncoder->ReleaseOutputBuffer(outputIndex, false);
960         env->DeleteLocalRef(buffer);
961       }
962     }
963   }
964 
965   return WEBRTC_VIDEO_CODEC_OK;
966 }
967 
RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback * callback)968 int32_t WebrtcMediaCodecVP8VideoEncoder::RegisterEncodeCompleteCallback(
969     webrtc::EncodedImageCallback* callback) {
970   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
971   mCallback = callback;
972 
973   return WEBRTC_VIDEO_CODEC_OK;
974 }
975 
Release()976 int32_t WebrtcMediaCodecVP8VideoEncoder::Release() {
977   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
978   delete mMediaCodecEncoder;
979   mMediaCodecEncoder = nullptr;
980 
981   delete[] mEncodedImage._buffer;
982   mEncodedImage._buffer = nullptr;
983   mEncodedImage._size = 0;
984 
985   return WEBRTC_VIDEO_CODEC_OK;
986 }
987 
~WebrtcMediaCodecVP8VideoEncoder()988 WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
989   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
990   Release();
991 }
992 
SetChannelParameters(uint32_t packetLoss,int64_t rtt)993 int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(
994     uint32_t packetLoss, int64_t rtt) {
995   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
996   return WEBRTC_VIDEO_CODEC_OK;
997 }
998 
SetRates(uint32_t newBitRate,uint32_t frameRate)999 int32_t WebrtcMediaCodecVP8VideoEncoder::SetRates(uint32_t newBitRate,
1000                                                   uint32_t frameRate) {
1001   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1002   if (!mMediaCodecEncoder) {
1003     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1004   }
1005 
1006   // XXX
1007   // 1. implement MediaCodec's setParameters method
1008   // 2.find a way to initiate a Java Bundle instance as parameter for MediaCodec
1009   // setParameters method. mMediaCodecEncoder->setParameters
1010 
1011   return WEBRTC_VIDEO_CODEC_OK;
1012 }
1013 
1014 WebrtcMediaCodecVP8VideoRemoteEncoder::
~WebrtcMediaCodecVP8VideoRemoteEncoder()1015     ~WebrtcMediaCodecVP8VideoRemoteEncoder() {
1016   CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
1017   Release();
1018 }
1019 
InitEncode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores,size_t maxPayloadSize)1020 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::InitEncode(
1021     const webrtc::VideoCodec* codecSettings, int32_t numberOfCores,
1022     size_t maxPayloadSize) {
1023   return WEBRTC_VIDEO_CODEC_OK;
1024 }
1025 
SetRates(uint32_t newBitRate,uint32_t frameRate)1026 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::SetRates(uint32_t newBitRate,
1027                                                         uint32_t frameRate) {
1028   CSFLogDebug(LOGTAG, "%s, newBitRate: %d, frameRate: %d", __FUNCTION__,
1029               newBitRate, frameRate);
1030   if (!mJavaEncoder) {
1031     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
1032   }
1033   mJavaEncoder->SetRates(newBitRate);
1034   return WEBRTC_VIDEO_CODEC_OK;
1035 }
1036 
Encode(const webrtc::VideoFrame & inputImage,const webrtc::CodecSpecificInfo * codecSpecificInfo,const std::vector<webrtc::FrameType> * frame_types)1037 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Encode(
1038     const webrtc::VideoFrame& inputImage,
1039     const webrtc::CodecSpecificInfo* codecSpecificInfo,
1040     const std::vector<webrtc::FrameType>* frame_types) {
1041   CSFLogDebug(LOGTAG, "%s, w = %d, h = %d", __FUNCTION__, inputImage.width(),
1042               inputImage.height());
1043   if (inputImage.width() == 0 || inputImage.height() == 0) {
1044     return WEBRTC_VIDEO_CODEC_ERROR;
1045   }
1046 
1047   if (!mJavaEncoder) {
1048     JavaCallbacksSupport::Init();
1049     mJavaCallbacks = CodecProxy::NativeCallbacks::New();
1050 
1051     JavaCallbacksSupport::AttachNative(
1052         mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(mCallback));
1053 
1054     MediaFormat::LocalRef format;
1055 
1056     nsresult res = MediaFormat::CreateVideoFormat(
1057         nsCString(MEDIACODEC_VIDEO_MIME_VP8), inputImage.width(),
1058         inputImage.height(), &format);
1059 
1060     if (NS_FAILED(res)) {
1061       CSFLogDebug(LOGTAG, "%s, CreateVideoFormat failed err = %d", __FUNCTION__,
1062                   (int)res);
1063       return WEBRTC_VIDEO_CODEC_ERROR;
1064     }
1065 
1066     res = format->SetInteger(nsCString("bitrate"), 300 * 1000);
1067     res = format->SetInteger(nsCString("bitrate-mode"), 2);
1068     res = format->SetInteger(nsCString("color-format"), 21);
1069     res = format->SetInteger(nsCString("frame-rate"), 30);
1070     res = format->SetInteger(nsCString("i-frame-interval"), 100);
1071 
1072     mJavaEncoder = CodecProxy::Create(true, format, nullptr, mJavaCallbacks,
1073                                       EmptyString());
1074 
1075     if (mJavaEncoder == nullptr) {
1076       return WEBRTC_VIDEO_CODEC_ERROR;
1077     }
1078   }
1079 
1080   rtc::scoped_refptr<webrtc::VideoFrameBuffer> inputBuffer =
1081       inputImage.video_frame_buffer();
1082   size_t sizeY = inputImage.height() * inputBuffer->StrideY();
1083   size_t sizeUV = ((inputImage.height() + 1) / 2) * inputBuffer->StrideU();
1084   size_t size = sizeY + 2 * sizeUV;
1085 
1086   if (mConvertBuf == nullptr) {
1087     mConvertBuf = new uint8_t[size];
1088     mConvertBufsize = size;
1089   }
1090 
1091   uint8_t* dstY = mConvertBuf;
1092   uint16_t* dstUV = reinterpret_cast<uint16_t*>(dstY + sizeY);
1093 
1094   bool converted = I420toNV12(dstY, dstUV, inputImage);
1095   if (!converted) {
1096     CSFLogError(LOGTAG,
1097                 "%s WebrtcMediaCodecVP8VideoEncoder::Encode() convert input "
1098                 "buffer to NV12 error.",
1099                 __FUNCTION__);
1100     return WEBRTC_VIDEO_CODEC_ERROR;
1101   }
1102 
1103   jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(mConvertBuf, size);
1104 
1105   BufferInfo::LocalRef bufferInfo;
1106   nsresult rv = BufferInfo::New(&bufferInfo);
1107   if (NS_FAILED(rv)) {
1108     return WEBRTC_VIDEO_CODEC_ERROR;
1109   }
1110 
1111   if ((*frame_types)[0] == webrtc::kVideoFrameKey) {
1112     bufferInfo->Set(0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC,
1113                     MediaCodec::BUFFER_FLAG_SYNC_FRAME);
1114   } else {
1115     bufferInfo->Set(0, size, inputImage.render_time_ms() * PR_USEC_PER_MSEC, 0);
1116   }
1117 
1118   mJavaEncoder->Input(bytes, bufferInfo, nullptr);
1119 
1120   return WEBRTC_VIDEO_CODEC_OK;
1121 }
1122 
RegisterEncodeCompleteCallback(webrtc::EncodedImageCallback * callback)1123 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::RegisterEncodeCompleteCallback(
1124     webrtc::EncodedImageCallback* callback) {
1125   mCallback = callback;
1126   return WEBRTC_VIDEO_CODEC_OK;
1127 }
1128 
Release()1129 int32_t WebrtcMediaCodecVP8VideoRemoteEncoder::Release() {
1130   CSFLogDebug(LOGTAG, "%s %p", __FUNCTION__, this);
1131 
1132   if (mJavaEncoder) {
1133     mJavaEncoder->Release();
1134     mJavaEncoder = nullptr;
1135   }
1136 
1137   if (mJavaCallbacks) {
1138     JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
1139     JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
1140     mJavaCallbacks = nullptr;
1141   }
1142 
1143   if (mConvertBuf) {
1144     delete[] mConvertBuf;
1145     mConvertBuf = nullptr;
1146   }
1147 
1148   return WEBRTC_VIDEO_CODEC_OK;
1149 }
1150 
1151 // Decoder.
WebrtcMediaCodecVP8VideoDecoder()1152 WebrtcMediaCodecVP8VideoDecoder::WebrtcMediaCodecVP8VideoDecoder()
1153     : mCallback(nullptr),
1154       mFrameWidth(0),
1155       mFrameHeight(0),
1156       mMediaCodecDecoder(nullptr) {
1157   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1158 }
1159 
ResetInputBuffers()1160 bool WebrtcMediaCodecVP8VideoDecoder::ResetInputBuffers() {
1161   mInputBuffers = mMediaCodecDecoder->GetInputBuffers();
1162 
1163   if (!mInputBuffers) return false;
1164 
1165   return true;
1166 }
1167 
ResetOutputBuffers()1168 bool WebrtcMediaCodecVP8VideoDecoder::ResetOutputBuffers() {
1169   mOutputBuffers = mMediaCodecDecoder->GetOutputBuffers();
1170 
1171   if (!mOutputBuffers) return false;
1172 
1173   return true;
1174 }
1175 
InitDecode(const webrtc::VideoCodec * codecSettings,int32_t numberOfCores)1176 int32_t WebrtcMediaCodecVP8VideoDecoder::InitDecode(
1177     const webrtc::VideoCodec* codecSettings, int32_t numberOfCores) {
1178   if (!mMediaCodecDecoder) {
1179     mMediaCodecDecoder = new WebrtcAndroidMediaCodec();
1180   }
1181 
1182   return WEBRTC_VIDEO_CODEC_OK;
1183 }
1184 
Decode(const webrtc::EncodedImage & inputImage,bool missingFrames,const webrtc::RTPFragmentationHeader * fragmentation,const webrtc::CodecSpecificInfo * codecSpecificInfo,int64_t renderTimeMs)1185 int32_t WebrtcMediaCodecVP8VideoDecoder::Decode(
1186     const webrtc::EncodedImage& inputImage, bool missingFrames,
1187     const webrtc::RTPFragmentationHeader* fragmentation,
1188     const webrtc::CodecSpecificInfo* codecSpecificInfo, int64_t renderTimeMs) {
1189   CSFLogDebug(LOGTAG, "%s, renderTimeMs = %" PRId64, __FUNCTION__,
1190               renderTimeMs);
1191 
1192   if (inputImage._length == 0 || !inputImage._buffer) {
1193     CSFLogDebug(LOGTAG, "%s, input Image invalid. length = %" PRIdPTR,
1194                 __FUNCTION__, inputImage._length);
1195     return WEBRTC_VIDEO_CODEC_ERROR;
1196   }
1197 
1198   if (inputImage._frameType == webrtc::kVideoFrameKey) {
1199     CSFLogDebug(LOGTAG, "%s, inputImage is Golden frame", __FUNCTION__);
1200     mFrameWidth = inputImage._encodedWidth;
1201     mFrameHeight = inputImage._encodedHeight;
1202   }
1203 
1204   if (!mMediaCodecDecoder->isStarted) {
1205     if (mFrameWidth == 0 || mFrameHeight == 0) {
1206       return WEBRTC_VIDEO_CODEC_ERROR;
1207     }
1208 
1209     mMediaCodecDecoder->SetDecoderCallback(mCallback);
1210     nsresult res = mMediaCodecDecoder->Configure(
1211         mFrameWidth, mFrameHeight, nullptr, 0, MEDIACODEC_VIDEO_MIME_VP8,
1212         false /* decoder */);
1213 
1214     if (res != NS_OK) {
1215       CSFLogDebug(LOGTAG, "%s, decoder configure return err = %d", __FUNCTION__,
1216                   (int)res);
1217       return WEBRTC_VIDEO_CODEC_ERROR;
1218     }
1219 
1220     res = mMediaCodecDecoder->Start();
1221 
1222     if (NS_FAILED(res)) {
1223       mMediaCodecDecoder->isStarted = false;
1224       CSFLogDebug(LOGTAG, "%s start decoder. err = %d", __FUNCTION__, (int)res);
1225       return WEBRTC_VIDEO_CODEC_ERROR;
1226     }
1227 
1228     bool retBool = ResetInputBuffers();
1229     if (!retBool) {
1230       CSFLogDebug(LOGTAG, "%s ResetInputBuffers failed.", __FUNCTION__);
1231       return WEBRTC_VIDEO_CODEC_ERROR;
1232     }
1233     retBool = ResetOutputBuffers();
1234     if (!retBool) {
1235       CSFLogDebug(LOGTAG, "%s ResetOutputBuffers failed.", __FUNCTION__);
1236       return WEBRTC_VIDEO_CODEC_ERROR;
1237     }
1238 
1239     mMediaCodecDecoder->isStarted = true;
1240   }
1241 #ifdef WEBRTC_MEDIACODEC_DEBUG
1242   uint32_t time = PR_IntervalNow();
1243   CSFLogDebug(LOGTAG, "%s start decoder took %u ms", __FUNCTION__,
1244               PR_IntervalToMilliseconds(PR_IntervalNow() - time));
1245 #endif
1246 
1247   bool feedFrame = true;
1248   int32_t ret = WEBRTC_VIDEO_CODEC_ERROR;
1249 
1250   while (feedFrame) {
1251     ret = mMediaCodecDecoder->FeedMediaCodecInput(inputImage, renderTimeMs);
1252     feedFrame = (ret == -1);
1253   }
1254 
1255   CSFLogDebug(LOGTAG, "%s end, ret = %d", __FUNCTION__, ret);
1256 
1257   return ret;
1258 }
1259 
DecodeFrame(EncodedFrame * frame)1260 void WebrtcMediaCodecVP8VideoDecoder::DecodeFrame(EncodedFrame* frame) {
1261   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1262 }
1263 
RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback * callback)1264 int32_t WebrtcMediaCodecVP8VideoDecoder::RegisterDecodeCompleteCallback(
1265     webrtc::DecodedImageCallback* callback) {
1266   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1267 
1268   mCallback = callback;
1269   return WEBRTC_VIDEO_CODEC_OK;
1270 }
1271 
Release()1272 int32_t WebrtcMediaCodecVP8VideoDecoder::Release() {
1273   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1274 
1275   delete mMediaCodecDecoder;
1276   mMediaCodecDecoder = nullptr;
1277 
1278   return WEBRTC_VIDEO_CODEC_OK;
1279 }
1280 
~WebrtcMediaCodecVP8VideoDecoder()1281 WebrtcMediaCodecVP8VideoDecoder::~WebrtcMediaCodecVP8VideoDecoder() {
1282   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
1283 
1284   Release();
1285 }
1286 
1287 }  // namespace mozilla
1288