1 /* This Source Code Form is subject to the terms of the Mozilla Public
2 * License, v. 2.0. If a copy of the MPL was not distributed with this
3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4
5 #include "RemoteDataDecoder.h"
6
7 #include <jni.h>
8
9 #include "AndroidBridge.h"
10 #include "AndroidDecoderModule.h"
11 #include "EMEDecoderModule.h"
12 #include "GLImages.h"
13 #include "JavaCallbacksSupport.h"
14 #include "MediaData.h"
15 #include "MediaInfo.h"
16 #include "SimpleMap.h"
17 #include "VPXDecoder.h"
18 #include "VideoUtils.h"
19 #include "mozilla/java/CodecProxyWrappers.h"
20 #include "mozilla/java/GeckoSurfaceWrappers.h"
21 #include "mozilla/java/SampleBufferWrappers.h"
22 #include "mozilla/java/SampleWrappers.h"
23 #include "mozilla/java/SurfaceAllocatorWrappers.h"
24 #include "nsPromiseFlatString.h"
25 #include "nsThreadUtils.h"
26 #include "prlog.h"
27
28 #undef LOG
29 #define LOG(arg, ...) \
30 MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
31 ("RemoteDataDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
32
33 using namespace mozilla;
34 using namespace mozilla::gl;
35 using media::TimeUnit;
36
37 namespace mozilla {
38
39 // Hold a reference to the output buffer until we're ready to release it back to
40 // the Java codec (for rendering or not).
41 class RenderOrReleaseOutput {
42 public:
RenderOrReleaseOutput(java::CodecProxy::Param aCodec,java::Sample::Param aSample)43 RenderOrReleaseOutput(java::CodecProxy::Param aCodec,
44 java::Sample::Param aSample)
45 : mCodec(aCodec), mSample(aSample) {}
46
~RenderOrReleaseOutput()47 virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
48
49 protected:
ReleaseOutput(bool aToRender)50 void ReleaseOutput(bool aToRender) {
51 if (mCodec && mSample) {
52 mCodec->ReleaseOutput(mSample, aToRender);
53 mCodec = nullptr;
54 mSample = nullptr;
55 }
56 }
57
58 private:
59 java::CodecProxy::GlobalRef mCodec;
60 java::Sample::GlobalRef mSample;
61 };
62
63 class RemoteVideoDecoder : public RemoteDataDecoder {
64 public:
65 // Render the output to the surface when the frame is sent
66 // to compositor, or release it if not presented.
67 class CompositeListener
68 : private RenderOrReleaseOutput,
69 public layers::SurfaceTextureImage::SetCurrentCallback {
70 public:
CompositeListener(java::CodecProxy::Param aCodec,java::Sample::Param aSample)71 CompositeListener(java::CodecProxy::Param aCodec,
72 java::Sample::Param aSample)
73 : RenderOrReleaseOutput(aCodec, aSample) {}
74
operator ()(void)75 void operator()(void) override { ReleaseOutput(true); }
76 };
77
78 class InputInfo {
79 public:
InputInfo()80 InputInfo() {}
81
InputInfo(const int64_t aDurationUs,const gfx::IntSize & aImageSize,const gfx::IntSize & aDisplaySize)82 InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
83 const gfx::IntSize& aDisplaySize)
84 : mDurationUs(aDurationUs),
85 mImageSize(aImageSize),
86 mDisplaySize(aDisplaySize) {}
87
88 int64_t mDurationUs;
89 gfx::IntSize mImageSize;
90 gfx::IntSize mDisplaySize;
91 };
92
93 class CallbacksSupport final : public JavaCallbacksSupport {
94 public:
CallbacksSupport(RemoteVideoDecoder * aDecoder)95 explicit CallbacksSupport(RemoteVideoDecoder* aDecoder)
96 : mDecoder(aDecoder) {}
97
HandleInput(int64_t aTimestamp,bool aProcessed)98 void HandleInput(int64_t aTimestamp, bool aProcessed) override {
99 mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
100 }
101
HandleOutput(java::Sample::Param aSample,java::SampleBuffer::Param aBuffer)102 void HandleOutput(java::Sample::Param aSample,
103 java::SampleBuffer::Param aBuffer) override {
104 MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
105 // aSample will be implicitly converted into a GlobalRef.
106 mDecoder->ProcessOutput(std::move(aSample));
107 }
108
HandleError(const MediaResult & aError)109 void HandleError(const MediaResult& aError) override {
110 mDecoder->Error(aError);
111 }
112
113 friend class RemoteDataDecoder;
114
115 private:
116 RemoteVideoDecoder* mDecoder;
117 };
118
RemoteVideoDecoder(const VideoInfo & aConfig,java::sdk::MediaFormat::Param aFormat,const nsString & aDrmStubId)119 RemoteVideoDecoder(const VideoInfo& aConfig,
120 java::sdk::MediaFormat::Param aFormat,
121 const nsString& aDrmStubId)
122 : RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
123 aFormat, aDrmStubId),
124 mConfig(aConfig) {}
125
~RemoteVideoDecoder()126 ~RemoteVideoDecoder() {
127 if (mSurface) {
128 java::SurfaceAllocator::DisposeSurface(mSurface);
129 }
130 }
131
Init()132 RefPtr<InitPromise> Init() override {
133 mThread = GetCurrentSerialEventTarget();
134 java::sdk::BufferInfo::LocalRef bufferInfo;
135 if (NS_FAILED(java::sdk::BufferInfo::New(&bufferInfo)) || !bufferInfo) {
136 return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
137 }
138 mInputBufferInfo = bufferInfo;
139
140 mSurface =
141 java::GeckoSurface::LocalRef(java::SurfaceAllocator::AcquireSurface(
142 mConfig.mImage.width, mConfig.mImage.height, false));
143 if (!mSurface) {
144 return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
145 __func__);
146 }
147
148 mSurfaceHandle = mSurface->GetHandle();
149
150 // Register native methods.
151 JavaCallbacksSupport::Init();
152
153 mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
154 if (!mJavaCallbacks) {
155 return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
156 __func__);
157 }
158 JavaCallbacksSupport::AttachNative(
159 mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
160
161 mJavaDecoder = java::CodecProxy::Create(
162 false, // false indicates to create a decoder and true denotes encoder
163 mFormat, mSurface, mJavaCallbacks, mDrmStubId);
164 if (mJavaDecoder == nullptr) {
165 return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
166 __func__);
167 }
168 mIsCodecSupportAdaptivePlayback =
169 mJavaDecoder->IsAdaptivePlaybackSupported();
170 mIsHardwareAccelerated = mJavaDecoder->IsHardwareAccelerated();
171 return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
172 }
173
Flush()174 RefPtr<MediaDataDecoder::FlushPromise> Flush() override {
175 AssertOnThread();
176 mInputInfos.Clear();
177 mSeekTarget.reset();
178 mLatestOutputTime.reset();
179 return RemoteDataDecoder::Flush();
180 }
181
Decode(MediaRawData * aSample)182 RefPtr<MediaDataDecoder::DecodePromise> Decode(
183 MediaRawData* aSample) override {
184 AssertOnThread();
185
186 const VideoInfo* config =
187 aSample->mTrackInfo ? aSample->mTrackInfo->GetAsVideoInfo() : &mConfig;
188 MOZ_ASSERT(config);
189
190 InputInfo info(aSample->mDuration.ToMicroseconds(), config->mImage,
191 config->mDisplay);
192 mInputInfos.Insert(aSample->mTime.ToMicroseconds(), info);
193 return RemoteDataDecoder::Decode(aSample);
194 }
195
SupportDecoderRecycling() const196 bool SupportDecoderRecycling() const override {
197 return mIsCodecSupportAdaptivePlayback;
198 }
199
SetSeekThreshold(const TimeUnit & aTime)200 void SetSeekThreshold(const TimeUnit& aTime) override {
201 auto setter = [self = RefPtr{this}, aTime] {
202 if (aTime.IsValid()) {
203 self->mSeekTarget = Some(aTime);
204 } else {
205 self->mSeekTarget.reset();
206 }
207 };
208 if (mThread->IsOnCurrentThread()) {
209 setter();
210 } else {
211 nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
212 "RemoteVideoDecoder::SetSeekThreshold", std::move(setter));
213 nsresult rv = mThread->Dispatch(runnable.forget());
214 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
215 Unused << rv;
216 }
217 }
218
IsUsefulData(const RefPtr<MediaData> & aSample)219 bool IsUsefulData(const RefPtr<MediaData>& aSample) override {
220 AssertOnThread();
221
222 if (mLatestOutputTime && aSample->mTime < mLatestOutputTime.value()) {
223 return false;
224 }
225
226 const TimeUnit endTime = aSample->GetEndTime();
227 if (mSeekTarget && endTime <= mSeekTarget.value()) {
228 return false;
229 }
230
231 mSeekTarget.reset();
232 mLatestOutputTime = Some(endTime);
233 return true;
234 }
235
IsHardwareAccelerated(nsACString & aFailureReason) const236 bool IsHardwareAccelerated(nsACString& aFailureReason) const override {
237 return mIsHardwareAccelerated;
238 }
239
NeedsConversion() const240 ConversionRequired NeedsConversion() const override {
241 return ConversionRequired::kNeedAnnexB;
242 }
243
244 private:
245 // Param and LocalRef are only valid for the duration of a JNI method call.
246 // Use GlobalRef as the parameter type to keep the Java object referenced
247 // until running.
ProcessOutput(java::Sample::GlobalRef && aSample)248 void ProcessOutput(java::Sample::GlobalRef&& aSample) {
249 if (!mThread->IsOnCurrentThread()) {
250 nsresult rv =
251 mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&>(
252 "RemoteVideoDecoder::ProcessOutput", this,
253 &RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
254 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
255 Unused << rv;
256 return;
257 }
258
259 AssertOnThread();
260 if (GetState() == State::SHUTDOWN) {
261 aSample->Dispose();
262 return;
263 }
264
265 UniquePtr<layers::SurfaceTextureImage::SetCurrentCallback> releaseSample(
266 new CompositeListener(mJavaDecoder, aSample));
267
268 java::sdk::BufferInfo::LocalRef info = aSample->Info();
269 MOZ_ASSERT(info);
270
271 int32_t flags;
272 bool ok = NS_SUCCEEDED(info->Flags(&flags));
273
274 int32_t offset;
275 ok &= NS_SUCCEEDED(info->Offset(&offset));
276
277 int32_t size;
278 ok &= NS_SUCCEEDED(info->Size(&size));
279
280 int64_t presentationTimeUs;
281 ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
282
283 if (!ok) {
284 Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
285 RESULT_DETAIL("VideoCallBack::HandleOutput")));
286 return;
287 }
288
289 InputInfo inputInfo;
290 ok = mInputInfos.Find(presentationTimeUs, inputInfo);
291 bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
292 if (!ok && !isEOS) {
293 // Ignore output with no corresponding input.
294 return;
295 }
296
297 if (ok && (size > 0 || presentationTimeUs >= 0)) {
298 RefPtr<layers::Image> img = new layers::SurfaceTextureImage(
299 mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
300 gl::OriginPos::BottomLeft, mConfig.HasAlpha());
301 img->AsSurfaceTextureImage()->RegisterSetCurrentCallback(
302 std::move(releaseSample));
303
304 RefPtr<VideoData> v = VideoData::CreateFromImage(
305 inputInfo.mDisplaySize, offset,
306 TimeUnit::FromMicroseconds(presentationTimeUs),
307 TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img.forget(),
308 !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME),
309 TimeUnit::FromMicroseconds(presentationTimeUs));
310
311 RemoteDataDecoder::UpdateOutputStatus(std::move(v));
312 }
313
314 if (isEOS) {
315 DrainComplete();
316 }
317 }
318
319 const VideoInfo mConfig;
320 java::GeckoSurface::GlobalRef mSurface;
321 AndroidSurfaceTextureHandle mSurfaceHandle;
322 // Only accessed on reader's task queue.
323 bool mIsCodecSupportAdaptivePlayback = false;
324 // Can be accessed on any thread, but only written on during init.
325 bool mIsHardwareAccelerated = false;
326 // Accessed on mThread and reader's thread. SimpleMap however is
327 // thread-safe, so it's okay to do so.
328 SimpleMap<InputInfo> mInputInfos;
329 // Only accessed on mThread.
330 Maybe<TimeUnit> mSeekTarget;
331 Maybe<TimeUnit> mLatestOutputTime;
332 };
333
334 class RemoteAudioDecoder : public RemoteDataDecoder {
335 public:
RemoteAudioDecoder(const AudioInfo & aConfig,java::sdk::MediaFormat::Param aFormat,const nsString & aDrmStubId)336 RemoteAudioDecoder(const AudioInfo& aConfig,
337 java::sdk::MediaFormat::Param aFormat,
338 const nsString& aDrmStubId)
339 : RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
340 aFormat, aDrmStubId) {
341 JNIEnv* const env = jni::GetEnvForThread();
342
343 bool formatHasCSD = false;
344 NS_ENSURE_SUCCESS_VOID(aFormat->ContainsKey(u"csd-0"_ns, &formatHasCSD));
345
346 if (!formatHasCSD && aConfig.mCodecSpecificConfig->Length() >= 2) {
347 jni::ByteBuffer::LocalRef buffer(env);
348 buffer = jni::ByteBuffer::New(aConfig.mCodecSpecificConfig->Elements(),
349 aConfig.mCodecSpecificConfig->Length());
350 NS_ENSURE_SUCCESS_VOID(aFormat->SetByteBuffer(u"csd-0"_ns, buffer));
351 }
352 }
353
Init()354 RefPtr<InitPromise> Init() override {
355 mThread = GetCurrentSerialEventTarget();
356 java::sdk::BufferInfo::LocalRef bufferInfo;
357 if (NS_FAILED(java::sdk::BufferInfo::New(&bufferInfo)) || !bufferInfo) {
358 return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
359 }
360 mInputBufferInfo = bufferInfo;
361
362 // Register native methods.
363 JavaCallbacksSupport::Init();
364
365 mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
366 if (!mJavaCallbacks) {
367 return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
368 __func__);
369 }
370 JavaCallbacksSupport::AttachNative(
371 mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
372
373 mJavaDecoder = java::CodecProxy::Create(false, mFormat, nullptr,
374 mJavaCallbacks, mDrmStubId);
375 if (mJavaDecoder == nullptr) {
376 return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
377 __func__);
378 }
379
380 return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
381 }
382
Flush()383 RefPtr<FlushPromise> Flush() override {
384 AssertOnThread();
385 mFirstDemuxedSampleTime.reset();
386 return RemoteDataDecoder::Flush();
387 }
388
Decode(MediaRawData * aSample)389 RefPtr<DecodePromise> Decode(MediaRawData* aSample) override {
390 AssertOnThread();
391 if (!mFirstDemuxedSampleTime) {
392 MOZ_ASSERT(aSample->mTime.IsValid());
393 mFirstDemuxedSampleTime.emplace(aSample->mTime);
394 }
395 return RemoteDataDecoder::Decode(aSample);
396 }
397
398 private:
399 class CallbacksSupport final : public JavaCallbacksSupport {
400 public:
CallbacksSupport(RemoteAudioDecoder * aDecoder)401 explicit CallbacksSupport(RemoteAudioDecoder* aDecoder)
402 : mDecoder(aDecoder) {}
403
HandleInput(int64_t aTimestamp,bool aProcessed)404 void HandleInput(int64_t aTimestamp, bool aProcessed) override {
405 mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
406 }
407
HandleOutput(java::Sample::Param aSample,java::SampleBuffer::Param aBuffer)408 void HandleOutput(java::Sample::Param aSample,
409 java::SampleBuffer::Param aBuffer) override {
410 MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
411 // aSample will be implicitly converted into a GlobalRef.
412 mDecoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
413 }
414
HandleOutputFormatChanged(java::sdk::MediaFormat::Param aFormat)415 void HandleOutputFormatChanged(
416 java::sdk::MediaFormat::Param aFormat) override {
417 int32_t outputChannels = 0;
418 aFormat->GetInteger(u"channel-count"_ns, &outputChannels);
419 AudioConfig::ChannelLayout layout(outputChannels);
420 if (!layout.IsValid()) {
421 mDecoder->Error(MediaResult(
422 NS_ERROR_DOM_MEDIA_FATAL_ERR,
423 RESULT_DETAIL("Invalid channel layout:%d", outputChannels)));
424 return;
425 }
426
427 int32_t sampleRate = 0;
428 aFormat->GetInteger(u"sample-rate"_ns, &sampleRate);
429 LOG("Audio output format changed: channels:%d sample rate:%d",
430 outputChannels, sampleRate);
431
432 mDecoder->ProcessOutputFormatChange(outputChannels, sampleRate);
433 }
434
HandleError(const MediaResult & aError)435 void HandleError(const MediaResult& aError) override {
436 mDecoder->Error(aError);
437 }
438
439 private:
440 RemoteAudioDecoder* mDecoder;
441 };
442
IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const443 bool IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const {
444 return mFirstDemuxedSampleTime->ToMicroseconds() > aTime;
445 }
446
ShouldDiscardSample(int64_t aSession) const447 bool ShouldDiscardSample(int64_t aSession) const {
448 AssertOnThread();
449 // HandleOutput() runs on Android binder thread pool and could be preempted
450 // by RemoteDateDecoder task queue. That means ProcessOutput() could be
451 // scheduled after Shutdown() or Flush(). We won't need the
452 // sample which is returned after calling Shutdown() and Flush(). We can
453 // check mFirstDemuxedSampleTime to know whether the Flush() has been
454 // called, becasue it would be reset in Flush().
455 return GetState() == State::SHUTDOWN || !mFirstDemuxedSampleTime ||
456 mSession != aSession;
457 }
458
459 // Param and LocalRef are only valid for the duration of a JNI method call.
460 // Use GlobalRef as the parameter type to keep the Java object referenced
461 // until running.
ProcessOutput(java::Sample::GlobalRef && aSample,java::SampleBuffer::GlobalRef && aBuffer)462 void ProcessOutput(java::Sample::GlobalRef&& aSample,
463 java::SampleBuffer::GlobalRef&& aBuffer) {
464 if (!mThread->IsOnCurrentThread()) {
465 nsresult rv =
466 mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
467 java::SampleBuffer::GlobalRef&&>(
468 "RemoteAudioDecoder::ProcessOutput", this,
469 &RemoteAudioDecoder::ProcessOutput, std::move(aSample),
470 std::move(aBuffer)));
471 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
472 Unused << rv;
473 return;
474 }
475
476 AssertOnThread();
477
478 if (ShouldDiscardSample(aSample->Session()) || !aBuffer->IsValid()) {
479 aSample->Dispose();
480 return;
481 }
482
483 RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
484
485 java::sdk::BufferInfo::LocalRef info = aSample->Info();
486 MOZ_ASSERT(info);
487
488 int32_t flags = 0;
489 bool ok = NS_SUCCEEDED(info->Flags(&flags));
490 bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
491
492 int32_t offset;
493 ok &= NS_SUCCEEDED(info->Offset(&offset));
494
495 int64_t presentationTimeUs;
496 ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
497
498 int32_t size;
499 ok &= NS_SUCCEEDED(info->Size(&size));
500
501 if (!ok ||
502 (IsSampleTimeSmallerThanFirstDemuxedSampleTime(presentationTimeUs) &&
503 !isEOS)) {
504 Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
505 return;
506 }
507
508 if (size > 0) {
509 #ifdef MOZ_SAMPLE_TYPE_S16
510 const int32_t numSamples = size / 2;
511 #else
512 # error We only support 16-bit integer PCM
513 #endif
514
515 AlignedAudioBuffer audio(numSamples);
516 if (!audio) {
517 Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
518 return;
519 }
520
521 jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(audio.get(), size);
522 aBuffer->WriteToByteBuffer(dest, offset, size);
523
524 RefPtr<AudioData> data =
525 new AudioData(0, TimeUnit::FromMicroseconds(presentationTimeUs),
526 std::move(audio), mOutputChannels, mOutputSampleRate);
527
528 UpdateOutputStatus(std::move(data));
529 }
530
531 if (isEOS) {
532 DrainComplete();
533 }
534 }
535
ProcessOutputFormatChange(int32_t aChannels,int32_t aSampleRate)536 void ProcessOutputFormatChange(int32_t aChannels, int32_t aSampleRate) {
537 if (!mThread->IsOnCurrentThread()) {
538 nsresult rv = mThread->Dispatch(NewRunnableMethod<int32_t, int32_t>(
539 "RemoteAudioDecoder::ProcessOutputFormatChange", this,
540 &RemoteAudioDecoder::ProcessOutputFormatChange, aChannels,
541 aSampleRate));
542 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
543 Unused << rv;
544 return;
545 }
546
547 AssertOnThread();
548
549 mOutputChannels = aChannels;
550 mOutputSampleRate = aSampleRate;
551 }
552
553 int32_t mOutputChannels;
554 int32_t mOutputSampleRate;
555 Maybe<TimeUnit> mFirstDemuxedSampleTime;
556 };
557
CreateAudioDecoder(const CreateDecoderParams & aParams,const nsString & aDrmStubId,CDMProxy * aProxy)558 already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
559 const CreateDecoderParams& aParams, const nsString& aDrmStubId,
560 CDMProxy* aProxy) {
561 const AudioInfo& config = aParams.AudioConfig();
562 java::sdk::MediaFormat::LocalRef format;
563 NS_ENSURE_SUCCESS(
564 java::sdk::MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
565 config.mChannels, &format),
566 nullptr);
567
568 RefPtr<MediaDataDecoder> decoder =
569 new RemoteAudioDecoder(config, format, aDrmStubId);
570 if (aProxy) {
571 decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
572 }
573 return decoder.forget();
574 }
575
CreateVideoDecoder(const CreateDecoderParams & aParams,const nsString & aDrmStubId,CDMProxy * aProxy)576 already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
577 const CreateDecoderParams& aParams, const nsString& aDrmStubId,
578 CDMProxy* aProxy) {
579 const VideoInfo& config = aParams.VideoConfig();
580 java::sdk::MediaFormat::LocalRef format;
581 NS_ENSURE_SUCCESS(java::sdk::MediaFormat::CreateVideoFormat(
582 TranslateMimeType(config.mMimeType),
583 config.mImage.width, config.mImage.height, &format),
584 nullptr);
585
586 RefPtr<MediaDataDecoder> decoder =
587 new RemoteVideoDecoder(config, format, aDrmStubId);
588 if (aProxy) {
589 decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
590 }
591 return decoder.forget();
592 }
593
RemoteDataDecoder(MediaData::Type aType,const nsACString & aMimeType,java::sdk::MediaFormat::Param aFormat,const nsString & aDrmStubId)594 RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
595 const nsACString& aMimeType,
596 java::sdk::MediaFormat::Param aFormat,
597 const nsString& aDrmStubId)
598 : mType(aType),
599 mMimeType(aMimeType),
600 mFormat(aFormat),
601 mDrmStubId(aDrmStubId),
602 mSession(0),
603 mNumPendingInputs(0) {}
604
Flush()605 RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::Flush() {
606 AssertOnThread();
607 MOZ_ASSERT(GetState() != State::SHUTDOWN);
608
609 mDecodedData = DecodedData();
610 UpdatePendingInputStatus(PendingOp::CLEAR);
611 mDecodePromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
612 mDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
613 SetState(State::DRAINED);
614 mJavaDecoder->Flush();
615 return FlushPromise::CreateAndResolve(true, __func__);
616 }
617
Drain()618 RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
619 AssertOnThread();
620 if (GetState() == State::SHUTDOWN) {
621 return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_CANCELED,
622 __func__);
623 }
624 RefPtr<DecodePromise> p = mDrainPromise.Ensure(__func__);
625 if (GetState() == State::DRAINED) {
626 // There's no operation to perform other than returning any already
627 // decoded data.
628 ReturnDecodedData();
629 return p;
630 }
631
632 if (GetState() == State::DRAINING) {
633 // Draining operation already pending, let it complete its course.
634 return p;
635 }
636
637 SetState(State::DRAINING);
638 mInputBufferInfo->Set(0, 0, -1,
639 java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
640 mSession = mJavaDecoder->Input(nullptr, mInputBufferInfo, nullptr);
641 return p;
642 }
643
Shutdown()644 RefPtr<ShutdownPromise> RemoteDataDecoder::Shutdown() {
645 LOG("");
646 AssertOnThread();
647 SetState(State::SHUTDOWN);
648 if (mJavaDecoder) {
649 mJavaDecoder->Release();
650 mJavaDecoder = nullptr;
651 }
652
653 if (mJavaCallbacks) {
654 JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
655 JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
656 mJavaCallbacks = nullptr;
657 }
658
659 mFormat = nullptr;
660
661 return ShutdownPromise::CreateAndResolve(true, __func__);
662 }
663
GetCryptoInfoFromSample(const MediaRawData * aSample)664 static java::sdk::CryptoInfo::LocalRef GetCryptoInfoFromSample(
665 const MediaRawData* aSample) {
666 auto& cryptoObj = aSample->mCrypto;
667
668 if (!cryptoObj.IsEncrypted()) {
669 return nullptr;
670 }
671
672 java::sdk::CryptoInfo::LocalRef cryptoInfo;
673 nsresult rv = java::sdk::CryptoInfo::New(&cryptoInfo);
674 NS_ENSURE_SUCCESS(rv, nullptr);
675
676 uint32_t numSubSamples = std::min<uint32_t>(
677 cryptoObj.mPlainSizes.Length(), cryptoObj.mEncryptedSizes.Length());
678
679 uint32_t totalSubSamplesSize = 0;
680 for (auto& size : cryptoObj.mPlainSizes) {
681 totalSubSamplesSize += size;
682 }
683 for (auto& size : cryptoObj.mEncryptedSizes) {
684 totalSubSamplesSize += size;
685 }
686
687 // Deep copy the plain sizes so we can modify them.
688 nsTArray<uint32_t> plainSizes = cryptoObj.mPlainSizes.Clone();
689 uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
690 // Size of codec specific data("CSD") for Android java::sdk::MediaCodec usage
691 // should be included in the 1st plain size if it exists.
692 if (!plainSizes.IsEmpty()) {
693 // This shouldn't overflow as the the plain size should be UINT16_MAX at
694 // most, and the CSD should never be that large. Checked int acts like a
695 // diagnostic assert here to help catch if we ever have insane inputs.
696 CheckedUint32 newLeadingPlainSize{plainSizes[0]};
697 newLeadingPlainSize += codecSpecificDataSize;
698 plainSizes[0] = newLeadingPlainSize.value();
699 }
700
701 static const int kExpectedIVLength = 16;
702 auto tempIV(cryptoObj.mIV);
703 auto tempIVLength = tempIV.Length();
704 MOZ_ASSERT(tempIVLength <= kExpectedIVLength);
705 for (size_t i = tempIVLength; i < kExpectedIVLength; i++) {
706 // Padding with 0
707 tempIV.AppendElement(0);
708 }
709
710 cryptoInfo->Set(numSubSamples, mozilla::jni::IntArray::From(plainSizes),
711 mozilla::jni::IntArray::From(cryptoObj.mEncryptedSizes),
712 mozilla::jni::ByteArray::From(cryptoObj.mKeyId),
713 mozilla::jni::ByteArray::From(tempIV),
714 java::sdk::MediaCodec::CRYPTO_MODE_AES_CTR);
715
716 return cryptoInfo;
717 }
718
Decode(MediaRawData * aSample)719 RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Decode(
720 MediaRawData* aSample) {
721 AssertOnThread();
722 MOZ_ASSERT(GetState() != State::SHUTDOWN);
723 MOZ_ASSERT(aSample != nullptr);
724 jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(
725 const_cast<uint8_t*>(aSample->Data()), aSample->Size());
726
727 SetState(State::DRAINABLE);
728 mInputBufferInfo->Set(0, aSample->Size(), aSample->mTime.ToMicroseconds(), 0);
729 int64_t session = mJavaDecoder->Input(bytes, mInputBufferInfo,
730 GetCryptoInfoFromSample(aSample));
731 if (session == java::CodecProxy::INVALID_SESSION) {
732 return DecodePromise::CreateAndReject(
733 MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
734 }
735 mSession = session;
736 return mDecodePromise.Ensure(__func__);
737 }
738
UpdatePendingInputStatus(PendingOp aOp)739 void RemoteDataDecoder::UpdatePendingInputStatus(PendingOp aOp) {
740 AssertOnThread();
741 switch (aOp) {
742 case PendingOp::INCREASE:
743 mNumPendingInputs++;
744 break;
745 case PendingOp::DECREASE:
746 mNumPendingInputs--;
747 break;
748 case PendingOp::CLEAR:
749 mNumPendingInputs = 0;
750 break;
751 }
752 }
753
UpdateInputStatus(int64_t aTimestamp,bool aProcessed)754 void RemoteDataDecoder::UpdateInputStatus(int64_t aTimestamp, bool aProcessed) {
755 if (!mThread->IsOnCurrentThread()) {
756 nsresult rv = mThread->Dispatch(NewRunnableMethod<int64_t, bool>(
757 "RemoteDataDecoder::UpdateInputStatus", this,
758 &RemoteDataDecoder::UpdateInputStatus, aTimestamp, aProcessed));
759 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
760 Unused << rv;
761 return;
762 }
763 AssertOnThread();
764 if (GetState() == State::SHUTDOWN) {
765 return;
766 }
767
768 if (!aProcessed) {
769 UpdatePendingInputStatus(PendingOp::INCREASE);
770 } else if (HasPendingInputs()) {
771 UpdatePendingInputStatus(PendingOp::DECREASE);
772 }
773
774 if (!HasPendingInputs() || // Input has been processed, request the next one.
775 !mDecodedData.IsEmpty()) { // Previous output arrived before Decode().
776 ReturnDecodedData();
777 }
778 }
779
UpdateOutputStatus(RefPtr<MediaData> && aSample)780 void RemoteDataDecoder::UpdateOutputStatus(RefPtr<MediaData>&& aSample) {
781 AssertOnThread();
782 if (GetState() == State::SHUTDOWN) {
783 return;
784 }
785 if (IsUsefulData(aSample)) {
786 mDecodedData.AppendElement(std::move(aSample));
787 }
788 ReturnDecodedData();
789 }
790
ReturnDecodedData()791 void RemoteDataDecoder::ReturnDecodedData() {
792 AssertOnThread();
793 MOZ_ASSERT(GetState() != State::SHUTDOWN);
794
795 // We only want to clear mDecodedData when we have resolved the promises.
796 if (!mDecodePromise.IsEmpty()) {
797 mDecodePromise.Resolve(std::move(mDecodedData), __func__);
798 mDecodedData = DecodedData();
799 } else if (!mDrainPromise.IsEmpty() &&
800 (!mDecodedData.IsEmpty() || GetState() == State::DRAINED)) {
801 mDrainPromise.Resolve(std::move(mDecodedData), __func__);
802 mDecodedData = DecodedData();
803 }
804 }
805
DrainComplete()806 void RemoteDataDecoder::DrainComplete() {
807 if (!mThread->IsOnCurrentThread()) {
808 nsresult rv = mThread->Dispatch(
809 NewRunnableMethod("RemoteDataDecoder::DrainComplete", this,
810 &RemoteDataDecoder::DrainComplete));
811 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
812 Unused << rv;
813 return;
814 }
815 AssertOnThread();
816 if (GetState() == State::SHUTDOWN) {
817 return;
818 }
819 SetState(State::DRAINED);
820 ReturnDecodedData();
821 }
822
Error(const MediaResult & aError)823 void RemoteDataDecoder::Error(const MediaResult& aError) {
824 if (!mThread->IsOnCurrentThread()) {
825 nsresult rv = mThread->Dispatch(NewRunnableMethod<MediaResult>(
826 "RemoteDataDecoder::Error", this, &RemoteDataDecoder::Error, aError));
827 MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
828 Unused << rv;
829 return;
830 }
831 AssertOnThread();
832 if (GetState() == State::SHUTDOWN) {
833 return;
834 }
835 mDecodePromise.RejectIfExists(aError, __func__);
836 mDrainPromise.RejectIfExists(aError, __func__);
837 }
838
839 } // namespace mozilla
840 #undef LOG
841