1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "mozilla/CheckedInt.h"
8 #include "mozilla/gfx/Point.h"
9 #include "mozilla/SyncRunnable.h"
10 
11 #include "AudioSegment.h"
12 #include "DecodedStream.h"
13 #include "MediaData.h"
14 #include "MediaQueue.h"
15 #include "MediaStreamGraph.h"
16 #include "MediaStreamListener.h"
17 #include "OutputStreamManager.h"
18 #include "SharedBuffer.h"
19 #include "VideoSegment.h"
20 #include "VideoUtils.h"
21 
22 namespace mozilla {
23 
24 #undef DUMP_LOG
25 #define DUMP_LOG(x, ...) NS_DebugBreak(NS_DEBUG_WARNING, nsPrintfCString(x, ##__VA_ARGS__).get(), nullptr, nullptr, -1)
26 
27 /*
28  * A container class to make it easier to pass the playback info all the
29  * way to DecodedStreamGraphListener from DecodedStream.
30  */
31 struct PlaybackInfoInit {
32   int64_t mStartTime;
33   MediaInfo mInfo;
34 };
35 
36 class DecodedStreamGraphListener : public MediaStreamListener {
37 public:
DecodedStreamGraphListener(MediaStream * aStream,MozPromiseHolder<GenericPromise> && aPromise)38   DecodedStreamGraphListener(MediaStream* aStream,
39                              MozPromiseHolder<GenericPromise>&& aPromise)
40     : mMutex("DecodedStreamGraphListener::mMutex")
41     , mStream(aStream)
42   {
43     mFinishPromise = Move(aPromise);
44   }
45 
NotifyOutput(MediaStreamGraph * aGraph,GraphTime aCurrentTime)46   void NotifyOutput(MediaStreamGraph* aGraph, GraphTime aCurrentTime) override
47   {
48     MutexAutoLock lock(mMutex);
49     if (mStream) {
50       int64_t t = mStream->StreamTimeToMicroseconds(
51         mStream->GraphTimeToStreamTime(aCurrentTime));
52       mOnOutput.Notify(t);
53     }
54   }
55 
NotifyEvent(MediaStreamGraph * aGraph,MediaStreamGraphEvent event)56   void NotifyEvent(MediaStreamGraph* aGraph, MediaStreamGraphEvent event) override
57   {
58     if (event == MediaStreamGraphEvent::EVENT_FINISHED) {
59       nsCOMPtr<nsIRunnable> event =
60         NewRunnableMethod(this, &DecodedStreamGraphListener::DoNotifyFinished);
61       aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
62     }
63   }
64 
DoNotifyFinished()65   void DoNotifyFinished()
66   {
67     MOZ_ASSERT(NS_IsMainThread());
68     mFinishPromise.ResolveIfExists(true, __func__);
69   }
70 
Forget()71   void Forget()
72   {
73     RefPtr<DecodedStreamGraphListener> self = this;
74     AbstractThread::MainThread()->Dispatch(NS_NewRunnableFunction([self] () {
75       MOZ_ASSERT(NS_IsMainThread());
76       self->mFinishPromise.ResolveIfExists(true, __func__);
77     }));
78     MutexAutoLock lock(mMutex);
79     mStream = nullptr;
80   }
81 
OnOutput()82   MediaEventSource<int64_t>& OnOutput()
83   {
84     return mOnOutput;
85   }
86 
87 private:
88   MediaEventProducer<int64_t> mOnOutput;
89 
90   Mutex mMutex;
91   // Members below are protected by mMutex.
92   RefPtr<MediaStream> mStream;
93   // Main thread only.
94   MozPromiseHolder<GenericPromise> mFinishPromise;
95 };
96 
97 static void
UpdateStreamSuspended(MediaStream * aStream,bool aBlocking)98 UpdateStreamSuspended(MediaStream* aStream, bool aBlocking)
99 {
100   if (NS_IsMainThread()) {
101     if (aBlocking) {
102       aStream->Suspend();
103     } else {
104       aStream->Resume();
105     }
106   } else {
107     nsCOMPtr<nsIRunnable> r;
108     if (aBlocking) {
109       r = NewRunnableMethod(aStream, &MediaStream::Suspend);
110     } else {
111       r = NewRunnableMethod(aStream, &MediaStream::Resume);
112     }
113     AbstractThread::MainThread()->Dispatch(r.forget());
114   }
115 }
116 
117 /*
118  * All MediaStream-related data is protected by the decoder's monitor.
119  * We have at most one DecodedStreamDaata per MediaDecoder. Its stream
120  * is used as the input for each ProcessedMediaStream created by calls to
121  * captureStream(UntilEnded). Seeking creates a new source stream, as does
122  * replaying after the input as ended. In the latter case, the new source is
123  * not connected to streams created by captureStreamUntilEnded.
124  */
125 class DecodedStreamData {
126 public:
127   DecodedStreamData(OutputStreamManager* aOutputStreamManager,
128                     PlaybackInfoInit&& aInit,
129                     MozPromiseHolder<GenericPromise>&& aPromise);
130   ~DecodedStreamData();
131   void SetPlaying(bool aPlaying);
132   MediaEventSource<int64_t>& OnOutput();
133   void Forget();
134   void DumpDebugInfo();
135 
136   /* The following group of fields are protected by the decoder's monitor
137    * and can be read or written on any thread.
138    */
139   // Count of audio frames written to the stream
140   int64_t mAudioFramesWritten;
141   // mNextVideoTime is the end timestamp for the last packet sent to the stream.
142   // Therefore video packets starting at or after this time need to be copied
143   // to the output stream.
144   int64_t mNextVideoTime; // microseconds
145   int64_t mNextAudioTime; // microseconds
146   // The last video image sent to the stream. Useful if we need to replicate
147   // the image.
148   RefPtr<layers::Image> mLastVideoImage;
149   gfx::IntSize mLastVideoImageDisplaySize;
150   bool mHaveSentFinish;
151   bool mHaveSentFinishAudio;
152   bool mHaveSentFinishVideo;
153 
154   // The decoder is responsible for calling Destroy() on this stream.
155   const RefPtr<SourceMediaStream> mStream;
156   const RefPtr<DecodedStreamGraphListener> mListener;
157   bool mPlaying;
158   // True if we need to send a compensation video frame to ensure the
159   // StreamTime going forward.
160   bool mEOSVideoCompensation;
161 
162   const RefPtr<OutputStreamManager> mOutputStreamManager;
163 };
164 
DecodedStreamData(OutputStreamManager * aOutputStreamManager,PlaybackInfoInit && aInit,MozPromiseHolder<GenericPromise> && aPromise)165 DecodedStreamData::DecodedStreamData(OutputStreamManager* aOutputStreamManager,
166                                      PlaybackInfoInit&& aInit,
167                                      MozPromiseHolder<GenericPromise>&& aPromise)
168   : mAudioFramesWritten(0)
169   , mNextVideoTime(aInit.mStartTime)
170   , mNextAudioTime(aInit.mStartTime)
171   , mHaveSentFinish(false)
172   , mHaveSentFinishAudio(false)
173   , mHaveSentFinishVideo(false)
174   , mStream(aOutputStreamManager->Graph()->CreateSourceStream())
175   // DecodedStreamGraphListener will resolve this promise.
176   , mListener(new DecodedStreamGraphListener(mStream, Move(aPromise)))
177   // mPlaying is initially true because MDSM won't start playback until playing
178   // becomes true. This is consistent with the settings of AudioSink.
179   , mPlaying(true)
180   , mEOSVideoCompensation(false)
181   , mOutputStreamManager(aOutputStreamManager)
182 {
183   mStream->AddListener(mListener);
184   mOutputStreamManager->Connect(mStream);
185 
186   // Initialize tracks.
187   if (aInit.mInfo.HasAudio()) {
188     mStream->AddAudioTrack(aInit.mInfo.mAudio.mTrackId,
189                            aInit.mInfo.mAudio.mRate,
190                            0, new AudioSegment());
191   }
192   if (aInit.mInfo.HasVideo()) {
193     mStream->AddTrack(aInit.mInfo.mVideo.mTrackId, 0, new VideoSegment());
194   }
195 }
196 
~DecodedStreamData()197 DecodedStreamData::~DecodedStreamData()
198 {
199   mOutputStreamManager->Disconnect();
200   mStream->Destroy();
201 }
202 
203 MediaEventSource<int64_t>&
OnOutput()204 DecodedStreamData::OnOutput()
205 {
206   return mListener->OnOutput();
207 }
208 
209 void
SetPlaying(bool aPlaying)210 DecodedStreamData::SetPlaying(bool aPlaying)
211 {
212   if (mPlaying != aPlaying) {
213     mPlaying = aPlaying;
214     UpdateStreamSuspended(mStream, !mPlaying);
215   }
216 }
217 
218 void
Forget()219 DecodedStreamData::Forget()
220 {
221   mListener->Forget();
222 }
223 
224 void
DumpDebugInfo()225 DecodedStreamData::DumpDebugInfo()
226 {
227   DUMP_LOG(
228     "DecodedStreamData=%p mPlaying=%d mAudioFramesWritten=%lld"
229     "mNextAudioTime=%lld mNextVideoTime=%lld mHaveSentFinish=%d"
230     "mHaveSentFinishAudio=%d mHaveSentFinishVideo=%d",
231     this, mPlaying, mAudioFramesWritten, mNextAudioTime, mNextVideoTime,
232     mHaveSentFinish, mHaveSentFinishAudio, mHaveSentFinishVideo);
233 }
234 
DecodedStream(AbstractThread * aOwnerThread,MediaQueue<MediaData> & aAudioQueue,MediaQueue<MediaData> & aVideoQueue,OutputStreamManager * aOutputStreamManager,const bool & aSameOrigin,const PrincipalHandle & aPrincipalHandle)235 DecodedStream::DecodedStream(AbstractThread* aOwnerThread,
236                              MediaQueue<MediaData>& aAudioQueue,
237                              MediaQueue<MediaData>& aVideoQueue,
238                              OutputStreamManager* aOutputStreamManager,
239                              const bool& aSameOrigin,
240                              const PrincipalHandle& aPrincipalHandle)
241   : mOwnerThread(aOwnerThread)
242   , mOutputStreamManager(aOutputStreamManager)
243   , mPlaying(false)
244   , mSameOrigin(aSameOrigin)
245   , mPrincipalHandle(aPrincipalHandle)
246   , mAudioQueue(aAudioQueue)
247   , mVideoQueue(aVideoQueue)
248 {
249 }
250 
~DecodedStream()251 DecodedStream::~DecodedStream()
252 {
253   MOZ_ASSERT(mStartTime.isNothing(), "playback should've ended.");
254 }
255 
256 const media::MediaSink::PlaybackParams&
GetPlaybackParams() const257 DecodedStream::GetPlaybackParams() const
258 {
259   AssertOwnerThread();
260   return mParams;
261 }
262 
263 void
SetPlaybackParams(const PlaybackParams & aParams)264 DecodedStream::SetPlaybackParams(const PlaybackParams& aParams)
265 {
266   AssertOwnerThread();
267   mParams = aParams;
268 }
269 
270 RefPtr<GenericPromise>
OnEnded(TrackType aType)271 DecodedStream::OnEnded(TrackType aType)
272 {
273   AssertOwnerThread();
274   MOZ_ASSERT(mStartTime.isSome());
275 
276   if (aType == TrackInfo::kAudioTrack && mInfo.HasAudio()) {
277     // TODO: we should return a promise which is resolved when the audio track
278     // is finished. For now this promise is resolved when the whole stream is
279     // finished.
280     return mFinishPromise;
281   } else if (aType == TrackInfo::kVideoTrack && mInfo.HasVideo()) {
282     return mFinishPromise;
283   }
284   return nullptr;
285 }
286 
287 void
Start(int64_t aStartTime,const MediaInfo & aInfo)288 DecodedStream::Start(int64_t aStartTime, const MediaInfo& aInfo)
289 {
290   AssertOwnerThread();
291   MOZ_ASSERT(mStartTime.isNothing(), "playback already started.");
292 
293   mStartTime.emplace(aStartTime);
294   mLastOutputTime = 0;
295   mInfo = aInfo;
296   mPlaying = true;
297   ConnectListener();
298 
299   class R : public Runnable {
300     typedef MozPromiseHolder<GenericPromise> Promise;
301   public:
302     R(PlaybackInfoInit&& aInit, Promise&& aPromise, OutputStreamManager* aManager)
303       : mInit(Move(aInit)), mOutputStreamManager(aManager)
304     {
305       mPromise = Move(aPromise);
306     }
307     NS_IMETHOD Run() override
308     {
309       MOZ_ASSERT(NS_IsMainThread());
310       // No need to create a source stream when there are no output streams. This
311       // happens when RemoveOutput() is called immediately after StartPlayback().
312       if (!mOutputStreamManager->Graph()) {
313         // Resolve the promise to indicate the end of playback.
314         mPromise.Resolve(true, __func__);
315         return NS_OK;
316       }
317       mData = MakeUnique<DecodedStreamData>(
318         mOutputStreamManager, Move(mInit), Move(mPromise));
319       return NS_OK;
320     }
321     UniquePtr<DecodedStreamData> ReleaseData()
322     {
323       return Move(mData);
324     }
325   private:
326     PlaybackInfoInit mInit;
327     Promise mPromise;
328     RefPtr<OutputStreamManager> mOutputStreamManager;
329     UniquePtr<DecodedStreamData> mData;
330   };
331 
332   MozPromiseHolder<GenericPromise> promise;
333   mFinishPromise = promise.Ensure(__func__);
334   PlaybackInfoInit init {
335     aStartTime, aInfo
336   };
337   nsCOMPtr<nsIRunnable> r = new R(Move(init), Move(promise), mOutputStreamManager);
338   nsCOMPtr<nsIThread> mainThread = do_GetMainThread();
339   SyncRunnable::DispatchToThread(mainThread, r);
340   mData = static_cast<R*>(r.get())->ReleaseData();
341 
342   if (mData) {
343     mOutputListener = mData->OnOutput().Connect(
344       mOwnerThread, this, &DecodedStream::NotifyOutput);
345     mData->SetPlaying(mPlaying);
346     SendData();
347   }
348 }
349 
350 void
Stop()351 DecodedStream::Stop()
352 {
353   AssertOwnerThread();
354   MOZ_ASSERT(mStartTime.isSome(), "playback not started.");
355 
356   mStartTime.reset();
357   DisconnectListener();
358   mFinishPromise = nullptr;
359 
360   // Clear mData immediately when this playback session ends so we won't
361   // send data to the wrong stream in SendData() in next playback session.
362   DestroyData(Move(mData));
363 }
364 
365 bool
IsStarted() const366 DecodedStream::IsStarted() const
367 {
368   AssertOwnerThread();
369   return mStartTime.isSome();
370 }
371 
372 bool
IsPlaying() const373 DecodedStream::IsPlaying() const
374 {
375   AssertOwnerThread();
376   return IsStarted() && mPlaying;
377 }
378 
379 void
DestroyData(UniquePtr<DecodedStreamData> aData)380 DecodedStream::DestroyData(UniquePtr<DecodedStreamData> aData)
381 {
382   AssertOwnerThread();
383 
384   if (!aData) {
385     return;
386   }
387 
388   mOutputListener.Disconnect();
389 
390   DecodedStreamData* data = aData.release();
391   data->Forget();
392   nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction([=] () {
393     delete data;
394   });
395   AbstractThread::MainThread()->Dispatch(r.forget());
396 }
397 
398 void
SetPlaying(bool aPlaying)399 DecodedStream::SetPlaying(bool aPlaying)
400 {
401   AssertOwnerThread();
402 
403   // Resume/pause matters only when playback started.
404   if (mStartTime.isNothing()) {
405     return;
406   }
407 
408   mPlaying = aPlaying;
409   if (mData) {
410     mData->SetPlaying(aPlaying);
411   }
412 }
413 
414 void
SetVolume(double aVolume)415 DecodedStream::SetVolume(double aVolume)
416 {
417   AssertOwnerThread();
418   mParams.mVolume = aVolume;
419 }
420 
421 void
SetPlaybackRate(double aPlaybackRate)422 DecodedStream::SetPlaybackRate(double aPlaybackRate)
423 {
424   AssertOwnerThread();
425   mParams.mPlaybackRate = aPlaybackRate;
426 }
427 
428 void
SetPreservesPitch(bool aPreservesPitch)429 DecodedStream::SetPreservesPitch(bool aPreservesPitch)
430 {
431   AssertOwnerThread();
432   mParams.mPreservesPitch = aPreservesPitch;
433 }
434 
435 static void
SendStreamAudio(DecodedStreamData * aStream,int64_t aStartTime,MediaData * aData,AudioSegment * aOutput,uint32_t aRate,const PrincipalHandle & aPrincipalHandle)436 SendStreamAudio(DecodedStreamData* aStream, int64_t aStartTime,
437                 MediaData* aData, AudioSegment* aOutput, uint32_t aRate,
438                 const PrincipalHandle& aPrincipalHandle)
439 {
440   // The amount of audio frames that is used to fuzz rounding errors.
441   static const int64_t AUDIO_FUZZ_FRAMES = 1;
442 
443   MOZ_ASSERT(aData);
444   AudioData* audio = aData->As<AudioData>();
445   // This logic has to mimic AudioSink closely to make sure we write
446   // the exact same silences
447   CheckedInt64 audioWrittenOffset = aStream->mAudioFramesWritten +
448                                     UsecsToFrames(aStartTime, aRate);
449   CheckedInt64 frameOffset = UsecsToFrames(audio->mTime, aRate);
450 
451   if (!audioWrittenOffset.isValid() ||
452       !frameOffset.isValid() ||
453       // ignore packet that we've already processed
454       audio->GetEndTime() <= aStream->mNextAudioTime) {
455     return;
456   }
457 
458   if (audioWrittenOffset.value() + AUDIO_FUZZ_FRAMES < frameOffset.value()) {
459     int64_t silentFrames = frameOffset.value() - audioWrittenOffset.value();
460     // Write silence to catch up
461     AudioSegment silence;
462     silence.InsertNullDataAtStart(silentFrames);
463     aStream->mAudioFramesWritten += silentFrames;
464     audioWrittenOffset += silentFrames;
465     aOutput->AppendFrom(&silence);
466   }
467 
468   // Always write the whole sample without truncation to be consistent with
469   // DecodedAudioDataSink::PlayFromAudioQueue()
470   audio->EnsureAudioBuffer();
471   RefPtr<SharedBuffer> buffer = audio->mAudioBuffer;
472   AudioDataValue* bufferData = static_cast<AudioDataValue*>(buffer->Data());
473   AutoTArray<const AudioDataValue*, 2> channels;
474   for (uint32_t i = 0; i < audio->mChannels; ++i) {
475     channels.AppendElement(bufferData + i * audio->mFrames);
476   }
477   aOutput->AppendFrames(buffer.forget(), channels, audio->mFrames, aPrincipalHandle);
478   aStream->mAudioFramesWritten += audio->mFrames;
479 
480   aStream->mNextAudioTime = audio->GetEndTime();
481 }
482 
483 void
SendAudio(double aVolume,bool aIsSameOrigin,const PrincipalHandle & aPrincipalHandle)484 DecodedStream::SendAudio(double aVolume, bool aIsSameOrigin,
485                          const PrincipalHandle& aPrincipalHandle)
486 {
487   AssertOwnerThread();
488 
489   if (!mInfo.HasAudio()) {
490     return;
491   }
492 
493   AudioSegment output;
494   uint32_t rate = mInfo.mAudio.mRate;
495   AutoTArray<RefPtr<MediaData>,10> audio;
496   TrackID audioTrackId = mInfo.mAudio.mTrackId;
497   SourceMediaStream* sourceStream = mData->mStream;
498 
499   // It's OK to hold references to the AudioData because AudioData
500   // is ref-counted.
501   mAudioQueue.GetElementsAfter(mData->mNextAudioTime, &audio);
502   for (uint32_t i = 0; i < audio.Length(); ++i) {
503     SendStreamAudio(mData.get(), mStartTime.ref(), audio[i], &output, rate,
504                     aPrincipalHandle);
505   }
506 
507   output.ApplyVolume(aVolume);
508 
509   if (!aIsSameOrigin) {
510     output.ReplaceWithDisabled();
511   }
512 
513   // |mNextAudioTime| is updated as we process each audio sample in
514   // SendStreamAudio(). This is consistent with how |mNextVideoTime|
515   // is updated for video samples.
516   if (output.GetDuration() > 0) {
517     sourceStream->AppendToTrack(audioTrackId, &output);
518   }
519 
520   if (mAudioQueue.IsFinished() && !mData->mHaveSentFinishAudio) {
521     sourceStream->EndTrack(audioTrackId);
522     mData->mHaveSentFinishAudio = true;
523   }
524 }
525 
526 static void
WriteVideoToMediaStream(MediaStream * aStream,layers::Image * aImage,int64_t aEndMicroseconds,int64_t aStartMicroseconds,const mozilla::gfx::IntSize & aIntrinsicSize,const TimeStamp & aTimeStamp,VideoSegment * aOutput,const PrincipalHandle & aPrincipalHandle)527 WriteVideoToMediaStream(MediaStream* aStream,
528                         layers::Image* aImage,
529                         int64_t aEndMicroseconds,
530                         int64_t aStartMicroseconds,
531                         const mozilla::gfx::IntSize& aIntrinsicSize,
532                         const TimeStamp& aTimeStamp,
533                         VideoSegment* aOutput,
534                         const PrincipalHandle& aPrincipalHandle)
535 {
536   RefPtr<layers::Image> image = aImage;
537   StreamTime duration =
538       aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
539       aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
540   aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize,
541                        aPrincipalHandle, false, aTimeStamp);
542 }
543 
544 static bool
ZeroDurationAtLastChunk(VideoSegment & aInput)545 ZeroDurationAtLastChunk(VideoSegment& aInput)
546 {
547   // Get the last video frame's start time in VideoSegment aInput.
548   // If the start time is equal to the duration of aInput, means the last video
549   // frame's duration is zero.
550   StreamTime lastVideoStratTime;
551   aInput.GetLastFrame(&lastVideoStratTime);
552   return lastVideoStratTime == aInput.GetDuration();
553 }
554 
555 void
SendVideo(bool aIsSameOrigin,const PrincipalHandle & aPrincipalHandle)556 DecodedStream::SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHandle)
557 {
558   AssertOwnerThread();
559 
560   if (!mInfo.HasVideo()) {
561     return;
562   }
563 
564   VideoSegment output;
565   TrackID videoTrackId = mInfo.mVideo.mTrackId;
566   AutoTArray<RefPtr<MediaData>, 10> video;
567   SourceMediaStream* sourceStream = mData->mStream;
568 
569   // It's OK to hold references to the VideoData because VideoData
570   // is ref-counted.
571   mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
572 
573   // tracksStartTimeStamp might be null when the SourceMediaStream not yet
574   // be added to MediaStreamGraph.
575   TimeStamp tracksStartTimeStamp = sourceStream->GetStreamTracksStrartTimeStamp();
576   if (tracksStartTimeStamp.IsNull()) {
577     tracksStartTimeStamp = TimeStamp::Now();
578   }
579 
580   for (uint32_t i = 0; i < video.Length(); ++i) {
581     VideoData* v = video[i]->As<VideoData>();
582 
583     if (mData->mNextVideoTime < v->mTime) {
584       // Write last video frame to catch up. mLastVideoImage can be null here
585       // which is fine, it just means there's no video.
586 
587       // TODO: |mLastVideoImage| should come from the last image rendered
588       // by the state machine. This will avoid the black frame when capture
589       // happens in the middle of playback (especially in th middle of a
590       // video frame). E.g. if we have a video frame that is 30 sec long
591       // and capture happens at 15 sec, we'll have to append a black frame
592       // that is 15 sec long.
593       WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage, v->mTime,
594           mData->mNextVideoTime, mData->mLastVideoImageDisplaySize,
595           tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->mTime),
596           &output, aPrincipalHandle);
597       mData->mNextVideoTime = v->mTime;
598     }
599 
600     if (mData->mNextVideoTime < v->GetEndTime()) {
601       WriteVideoToMediaStream(sourceStream, v->mImage, v->GetEndTime(),
602           mData->mNextVideoTime, v->mDisplay,
603           tracksStartTimeStamp + TimeDuration::FromMicroseconds(v->GetEndTime()),
604           &output, aPrincipalHandle);
605       mData->mNextVideoTime = v->GetEndTime();
606       mData->mLastVideoImage = v->mImage;
607       mData->mLastVideoImageDisplaySize = v->mDisplay;
608     }
609   }
610 
611   // Check the output is not empty.
612   if (output.GetLastFrame()) {
613     mData->mEOSVideoCompensation = ZeroDurationAtLastChunk(output);
614   }
615 
616   if (!aIsSameOrigin) {
617     output.ReplaceWithDisabled();
618   }
619 
620   if (output.GetDuration() > 0) {
621     sourceStream->AppendToTrack(videoTrackId, &output);
622   }
623 
624   if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
625     if (mData->mEOSVideoCompensation) {
626       VideoSegment endSegment;
627       // Calculate the deviation clock time from DecodedStream.
628       int64_t deviation_usec = sourceStream->StreamTimeToMicroseconds(1);
629       WriteVideoToMediaStream(sourceStream, mData->mLastVideoImage,
630           mData->mNextVideoTime + deviation_usec, mData->mNextVideoTime,
631           mData->mLastVideoImageDisplaySize,
632           tracksStartTimeStamp + TimeDuration::FromMicroseconds(mData->mNextVideoTime + deviation_usec),
633           &endSegment, aPrincipalHandle);
634       mData->mNextVideoTime += deviation_usec;
635       MOZ_ASSERT(endSegment.GetDuration() > 0);
636       if (!aIsSameOrigin) {
637         endSegment.ReplaceWithDisabled();
638       }
639       sourceStream->AppendToTrack(videoTrackId, &endSegment);
640     }
641     sourceStream->EndTrack(videoTrackId);
642     mData->mHaveSentFinishVideo = true;
643   }
644 }
645 
646 void
AdvanceTracks()647 DecodedStream::AdvanceTracks()
648 {
649   AssertOwnerThread();
650 
651   StreamTime endPosition = 0;
652 
653   if (mInfo.HasAudio()) {
654     StreamTime audioEnd = mData->mStream->TicksToTimeRoundDown(
655         mInfo.mAudio.mRate, mData->mAudioFramesWritten);
656     endPosition = std::max(endPosition, audioEnd);
657   }
658 
659   if (mInfo.HasVideo()) {
660     StreamTime videoEnd = mData->mStream->MicrosecondsToStreamTimeRoundDown(
661         mData->mNextVideoTime - mStartTime.ref());
662     endPosition = std::max(endPosition, videoEnd);
663   }
664 
665   if (!mData->mHaveSentFinish) {
666     mData->mStream->AdvanceKnownTracksTime(endPosition);
667   }
668 }
669 
670 void
SendData()671 DecodedStream::SendData()
672 {
673   AssertOwnerThread();
674   MOZ_ASSERT(mStartTime.isSome(), "Must be called after StartPlayback()");
675 
676   // Not yet created on the main thread. MDSM will try again later.
677   if (!mData) {
678     return;
679   }
680 
681   // Nothing to do when the stream is finished.
682   if (mData->mHaveSentFinish) {
683     return;
684   }
685 
686   SendAudio(mParams.mVolume, mSameOrigin, mPrincipalHandle);
687   SendVideo(mSameOrigin, mPrincipalHandle);
688   AdvanceTracks();
689 
690   bool finished = (!mInfo.HasAudio() || mAudioQueue.IsFinished()) &&
691                   (!mInfo.HasVideo() || mVideoQueue.IsFinished());
692 
693   if (finished && !mData->mHaveSentFinish) {
694     mData->mHaveSentFinish = true;
695     mData->mStream->Finish();
696   }
697 }
698 
699 int64_t
GetEndTime(TrackType aType) const700 DecodedStream::GetEndTime(TrackType aType) const
701 {
702   AssertOwnerThread();
703   if (aType == TrackInfo::kAudioTrack && mInfo.HasAudio() && mData) {
704     CheckedInt64 t = mStartTime.ref() +
705       FramesToUsecs(mData->mAudioFramesWritten, mInfo.mAudio.mRate);
706     if (t.isValid()) {
707       return t.value();
708     }
709   } else if (aType == TrackInfo::kVideoTrack && mData) {
710     return mData->mNextVideoTime;
711   }
712   return -1;
713 }
714 
715 int64_t
GetPosition(TimeStamp * aTimeStamp) const716 DecodedStream::GetPosition(TimeStamp* aTimeStamp) const
717 {
718   AssertOwnerThread();
719   // This is only called after MDSM starts playback. So mStartTime is
720   // guaranteed to be something.
721   MOZ_ASSERT(mStartTime.isSome());
722   if (aTimeStamp) {
723     *aTimeStamp = TimeStamp::Now();
724   }
725   return mStartTime.ref() + mLastOutputTime;
726 }
727 
728 void
NotifyOutput(int64_t aTime)729 DecodedStream::NotifyOutput(int64_t aTime)
730 {
731   AssertOwnerThread();
732   mLastOutputTime = aTime;
733   int64_t currentTime = GetPosition();
734 
735   // Remove audio samples that have been played by MSG from the queue.
736   RefPtr<MediaData> a = mAudioQueue.PeekFront();
737   for (; a && a->mTime < currentTime;) {
738     RefPtr<MediaData> releaseMe = mAudioQueue.PopFront();
739     a = mAudioQueue.PeekFront();
740   }
741 }
742 
743 void
ConnectListener()744 DecodedStream::ConnectListener()
745 {
746   AssertOwnerThread();
747 
748   mAudioPushListener = mAudioQueue.PushEvent().Connect(
749     mOwnerThread, this, &DecodedStream::SendData);
750   mAudioFinishListener = mAudioQueue.FinishEvent().Connect(
751     mOwnerThread, this, &DecodedStream::SendData);
752   mVideoPushListener = mVideoQueue.PushEvent().Connect(
753     mOwnerThread, this, &DecodedStream::SendData);
754   mVideoFinishListener = mVideoQueue.FinishEvent().Connect(
755     mOwnerThread, this, &DecodedStream::SendData);
756 }
757 
758 void
DisconnectListener()759 DecodedStream::DisconnectListener()
760 {
761   AssertOwnerThread();
762 
763   mAudioPushListener.Disconnect();
764   mVideoPushListener.Disconnect();
765   mAudioFinishListener.Disconnect();
766   mVideoFinishListener.Disconnect();
767 }
768 
769 void
DumpDebugInfo()770 DecodedStream::DumpDebugInfo()
771 {
772   AssertOwnerThread();
773   DUMP_LOG(
774     "DecodedStream=%p mStartTime=%lld mLastOutputTime=%lld mPlaying=%d mData=%p",
775     this, mStartTime.valueOr(-1), mLastOutputTime, mPlaying, mData.get());
776   if (mData) {
777     mData->DumpDebugInfo();
778   }
779 }
780 
781 } // namespace mozilla
782