1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 /*
7 
8 Each media element for a media file has one thread called the "audio thread".
9 
10 The audio thread  writes the decoded audio data to the audio
11 hardware. This is done in a separate thread to ensure that the
12 audio hardware gets a constant stream of data without
13 interruption due to decoding or display. At some point
14 AudioStream will be refactored to have a callback interface
15 where it asks for data and this thread will no longer be
16 needed.
17 
18 The element/state machine also has a TaskQueue which runs in a
19 SharedThreadPool that is shared with all other elements/decoders. The state
20 machine dispatches tasks to this to call into the MediaDecoderReader to
21 request decoded audio or video data. The Reader will callback with decoded
22 sampled when it has them available, and the state machine places the decoded
23 samples into its queues for the consuming threads to pull from.
24 
25 The MediaDecoderReader can choose to decode asynchronously, or synchronously
26 and return requested samples synchronously inside it's Request*Data()
27 functions via callback. Asynchronous decoding is preferred, and should be
28 used for any new readers.
29 
30 Synchronisation of state between the thread is done via a monitor owned
31 by MediaDecoder.
32 
33 The lifetime of the audio thread is controlled by the state machine when
34 it runs on the shared state machine thread. When playback needs to occur
35 the audio thread is created and an event dispatched to run it. The audio
36 thread exits when audio playback is completed or no longer required.
37 
38 A/V synchronisation is handled by the state machine. It examines the audio
39 playback time and compares this to the next frame in the queue of video
40 frames. If it is time to play the video frame it is then displayed, otherwise
41 it schedules the state machine to run again at the time of the next frame.
42 
43 Frame skipping is done in the following ways:
44 
45   1) The state machine will skip all frames in the video queue whose
46      display time is less than the current audio time. This ensures
47      the correct frame for the current time is always displayed.
48 
49   2) The decode tasks will stop decoding interframes and read to the
50      next keyframe if it determines that decoding the remaining
51      interframes will cause playback issues. It detects this by:
52        a) If the amount of audio data in the audio queue drops
53           below a threshold whereby audio may start to skip.
54        b) If the video queue drops below a threshold where it
55           will be decoding video data that won't be displayed due
56           to the decode thread dropping the frame immediately.
57      TODO: In future we should only do this when the Reader is decoding
58            synchronously.
59 
60 When hardware accelerated graphics is not available, YCbCr conversion
61 is done on the decode task queue when video frames are decoded.
62 
63 The decode task queue pushes decoded audio and videos frames into two
64 separate queues - one for audio and one for video. These are kept
65 separate to make it easy to constantly feed audio data to the audio
66 hardware while allowing frame skipping of video data. These queues are
67 threadsafe, and neither the decode, audio, or state machine should
68 be able to monopolize them, and cause starvation of the other threads.
69 
70 Both queues are bounded by a maximum size. When this size is reached
71 the decode tasks will no longer request video or audio depending on the
72 queue that has reached the threshold. If both queues are full, no more
73 decode tasks will be dispatched to the decode task queue, so other
74 decoders will have an opportunity to run.
75 
76 During playback the audio thread will be idle (via a Wait() on the
77 monitor) if the audio queue is empty. Otherwise it constantly pops
78 audio data off the queue and plays it with a blocking write to the audio
79 hardware (via AudioStream).
80 
81 */
82 #if !defined(MediaDecoderStateMachine_h__)
83 #define MediaDecoderStateMachine_h__
84 
85 #include "mozilla/Attributes.h"
86 #include "mozilla/ReentrantMonitor.h"
87 #include "mozilla/StateMirroring.h"
88 
89 #include "nsAutoPtr.h"
90 #include "nsThreadUtils.h"
91 #include "MediaDecoder.h"
92 #include "MediaDecoderOwner.h"
93 #include "MediaEventSource.h"
94 #include "MediaFormatReader.h"
95 #include "MediaMetadataManager.h"
96 #include "MediaQueue.h"
97 #include "MediaStatistics.h"
98 #include "MediaTimer.h"
99 #include "ImageContainer.h"
100 #include "SeekJob.h"
101 
102 namespace mozilla {
103 
104 namespace media {
105 class MediaSink;
106 }
107 
108 class AbstractThread;
109 class AudioSegment;
110 class DecodedStream;
111 class OutputStreamManager;
112 class ReaderProxy;
113 class TaskQueue;
114 
115 extern LazyLogModule gMediaDecoderLog;
116 
117 struct MediaPlaybackEvent {
118   enum EventType {
119     PlaybackStarted,
120     PlaybackStopped,
121     PlaybackProgressed,
122     PlaybackEnded,
123     SeekStarted,
124     Loop,
125     Invalidate,
126     EnterVideoSuspend,
127     ExitVideoSuspend,
128     StartVideoSuspendTimer,
129     CancelVideoSuspendTimer,
130     VideoOnlySeekBegin,
131     VideoOnlySeekCompleted,
132   } mType;
133 
134   using DataType = Variant<Nothing, int64_t>;
135   DataType mData;
136 
MediaPlaybackEventMediaPlaybackEvent137   MOZ_IMPLICIT MediaPlaybackEvent(EventType aType)
138       : mType(aType), mData(Nothing{}) {}
139 
140   template <typename T>
MediaPlaybackEventMediaPlaybackEvent141   MediaPlaybackEvent(EventType aType, T&& aArg)
142       : mType(aType), mData(Forward<T>(aArg)) {}
143 };
144 
145 enum class VideoDecodeMode : uint8_t { Normal, Suspend };
146 
147 DDLoggedTypeDeclName(MediaDecoderStateMachine);
148 
149 /*
150   The state machine class. This manages the decoding and seeking in the
151   MediaDecoderReader on the decode task queue, and A/V sync on the shared
152   state machine thread, and controls the audio "push" thread.
153 
154   All internal state is synchronised via the decoder monitor. State changes
155   are propagated by scheduling the state machine to run another cycle on the
156   shared state machine thread.
157 
158   See MediaDecoder.h for more details.
159 */
160 class MediaDecoderStateMachine
161     : public DecoderDoctorLifeLogger<MediaDecoderStateMachine> {
162   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDecoderStateMachine)
163 
164   using TrackSet = MediaFormatReader::TrackSet;
165 
166  public:
167   typedef MediaDecoderOwner::NextFrameStatus NextFrameStatus;
168   typedef mozilla::layers::ImageContainer::FrameID FrameID;
169   MediaDecoderStateMachine(MediaDecoder* aDecoder, MediaFormatReader* aReader);
170 
171   nsresult Init(MediaDecoder* aDecoder);
172 
173   // Enumeration for the valid decoding states
174   enum State {
175     DECODER_STATE_DECODING_METADATA,
176     DECODER_STATE_DORMANT,
177     DECODER_STATE_DECODING_FIRSTFRAME,
178     DECODER_STATE_DECODING,
179     DECODER_STATE_SEEKING,
180     DECODER_STATE_BUFFERING,
181     DECODER_STATE_COMPLETED,
182     DECODER_STATE_SHUTDOWN
183   };
184 
185   // Returns the state machine task queue.
OwnerThread()186   TaskQueue* OwnerThread() const { return mTaskQueue; }
187 
188   RefPtr<MediaDecoder::DebugInfoPromise> RequestDebugInfo();
189 
190   void AddOutputStream(ProcessedMediaStream* aStream,
191                        TrackID aNextAvailableTrackID, bool aFinishWhenEnded);
192   // Remove an output stream added with AddOutputStream.
193   void RemoveOutputStream(MediaStream* aStream);
194   TrackID NextAvailableTrackIDFor(MediaStream* aOutputStream) const;
195 
196   // Seeks to the decoder to aTarget asynchronously.
197   RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
198 
DispatchSetPlaybackRate(double aPlaybackRate)199   void DispatchSetPlaybackRate(double aPlaybackRate) {
200     OwnerThread()->DispatchStateChange(NewRunnableMethod<double>(
201         "MediaDecoderStateMachine::SetPlaybackRate", this,
202         &MediaDecoderStateMachine::SetPlaybackRate, aPlaybackRate));
203   }
204 
205   RefPtr<ShutdownPromise> BeginShutdown();
206 
207   // Set the media fragment end time.
DispatchSetFragmentEndTime(const media::TimeUnit & aEndTime)208   void DispatchSetFragmentEndTime(const media::TimeUnit& aEndTime) {
209     RefPtr<MediaDecoderStateMachine> self = this;
210     nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
211         "MediaDecoderStateMachine::DispatchSetFragmentEndTime",
212         [self, aEndTime]() {
213           // A negative number means we don't have a fragment end time at all.
214           self->mFragmentEndTime = aEndTime >= media::TimeUnit::Zero()
215                                        ? aEndTime
216                                        : media::TimeUnit::Invalid();
217         });
218     nsresult rv = OwnerThread()->Dispatch(r.forget());
219     MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
220     Unused << rv;
221   }
222 
DispatchCanPlayThrough(bool aCanPlayThrough)223   void DispatchCanPlayThrough(bool aCanPlayThrough) {
224     RefPtr<MediaDecoderStateMachine> self = this;
225     nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
226         "MediaDecoderStateMachine::DispatchCanPlayThrough",
227         [self, aCanPlayThrough]() { self->mCanPlayThrough = aCanPlayThrough; });
228     OwnerThread()->DispatchStateChange(r.forget());
229   }
230 
DispatchIsLiveStream(bool aIsLiveStream)231   void DispatchIsLiveStream(bool aIsLiveStream) {
232     RefPtr<MediaDecoderStateMachine> self = this;
233     nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
234         "MediaDecoderStateMachine::DispatchIsLiveStream",
235         [self, aIsLiveStream]() { self->mIsLiveStream = aIsLiveStream; });
236     OwnerThread()->DispatchStateChange(r.forget());
237   }
238 
TimedMetadataEvent()239   TimedMetadataEventSource& TimedMetadataEvent() {
240     return mMetadataManager.TimedMetadataEvent();
241   }
242 
243   MediaEventSource<void>& OnMediaNotSeekable() const;
244 
245   MediaEventSourceExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
246                       MediaDecoderEventVisibility>&
MetadataLoadedEvent()247   MetadataLoadedEvent() {
248     return mMetadataLoadedEvent;
249   }
250 
251   MediaEventSourceExc<nsAutoPtr<MediaInfo>, MediaDecoderEventVisibility>&
FirstFrameLoadedEvent()252   FirstFrameLoadedEvent() {
253     return mFirstFrameLoadedEvent;
254   }
255 
OnPlaybackEvent()256   MediaEventSource<MediaPlaybackEvent>& OnPlaybackEvent() {
257     return mOnPlaybackEvent;
258   }
OnPlaybackErrorEvent()259   MediaEventSource<MediaResult>& OnPlaybackErrorEvent() {
260     return mOnPlaybackErrorEvent;
261   }
262 
OnDecoderDoctorEvent()263   MediaEventSource<DecoderDoctorEvent>& OnDecoderDoctorEvent() {
264     return mOnDecoderDoctorEvent;
265   }
266 
OnNextFrameStatus()267   MediaEventSource<NextFrameStatus>& OnNextFrameStatus() {
268     return mOnNextFrameStatus;
269   }
270 
271   size_t SizeOfVideoQueue() const;
272 
273   size_t SizeOfAudioQueue() const;
274 
275   // Sets the video decode mode. Used by the suspend-video-decoder feature.
276   void SetVideoDecodeMode(VideoDecodeMode aMode);
277 
278  private:
279   class StateObject;
280   class DecodeMetadataState;
281   class DormantState;
282   class DecodingFirstFrameState;
283   class DecodingState;
284   class SeekingState;
285   class AccurateSeekingState;
286   class NextFrameSeekingState;
287   class NextFrameSeekingFromDormantState;
288   class VideoOnlySeekingState;
289   class BufferingState;
290   class CompletedState;
291   class ShutdownState;
292 
293   static const char* ToStateStr(State aState);
294   const char* ToStateStr();
295 
296   nsCString GetDebugInfo();
297 
298   // Functions used by assertions to ensure we're calling things
299   // on the appropriate threads.
300   bool OnTaskQueue() const;
301 
302   // Initialization that needs to happen on the task queue. This is the first
303   // task that gets run on the task queue, and is dispatched from the MDSM
304   // constructor immediately after the task queue is created.
305   void InitializationTask(MediaDecoder* aDecoder);
306 
307   void SetAudioCaptured(bool aCaptured);
308 
309   RefPtr<MediaDecoder::SeekPromise> Seek(const SeekTarget& aTarget);
310 
311   RefPtr<ShutdownPromise> Shutdown();
312 
313   RefPtr<ShutdownPromise> FinishShutdown();
314 
315   // Update the playback position. This can result in a timeupdate event
316   // and an invalidate of the frame being dispatched asynchronously if
317   // there is no such event currently queued.
318   // Only called on the decoder thread. Must be called with
319   // the decode monitor held.
320   void UpdatePlaybackPosition(const media::TimeUnit& aTime);
321 
HasAudio()322   bool HasAudio() const { return mInfo.ref().HasAudio(); }
HasVideo()323   bool HasVideo() const { return mInfo.ref().HasVideo(); }
Info()324   const MediaInfo& Info() const { return mInfo.ref(); }
325 
326   // Schedules the shared state machine thread to run the state machine.
327   void ScheduleStateMachine();
328 
329   // Invokes ScheduleStateMachine to run in |aTime|,
330   // unless it's already scheduled to run earlier, in which case the
331   // request is discarded.
332   void ScheduleStateMachineIn(const media::TimeUnit& aTime);
333 
334   bool HaveEnoughDecodedAudio();
335   bool HaveEnoughDecodedVideo();
336 
337   // Returns true if we're currently playing. The decoder monitor must
338   // be held.
339   bool IsPlaying() const;
340 
341   // Sets mMediaSeekable to false.
342   void SetMediaNotSeekable();
343 
344   // Resets all states related to decoding and aborts all pending requests
345   // to the decoders.
346   void ResetDecode(TrackSet aTracks = TrackSet(TrackInfo::kAudioTrack,
347                                                TrackInfo::kVideoTrack));
348 
349   void SetVideoDecodeModeInternal(VideoDecodeMode aMode);
350 
351  protected:
352   virtual ~MediaDecoderStateMachine();
353 
354   void BufferedRangeUpdated();
355 
356   void ReaderSuspendedChanged();
357 
358   // Inserts a sample into the Audio/Video queue.
359   // aSample must not be null.
360   void PushAudio(AudioData* aSample);
361   void PushVideo(VideoData* aSample);
362 
363   void OnAudioPopped(const RefPtr<AudioData>& aSample);
364   void OnVideoPopped(const RefPtr<VideoData>& aSample);
365 
366   void AudioAudibleChanged(bool aAudible);
367 
368   void VolumeChanged();
369   void SetPlaybackRate(double aPlaybackRate);
370   void PreservesPitchChanged();
371   void LoopingChanged();
372 
AudioQueue()373   MediaQueue<AudioData>& AudioQueue() { return mAudioQueue; }
VideoQueue()374   MediaQueue<VideoData>& VideoQueue() { return mVideoQueue; }
375 
376   // True if we are low in decoded audio/video data.
377   // May not be invoked when mReader->UseBufferingHeuristics() is false.
378   bool HasLowDecodedData();
379 
380   bool HasLowDecodedAudio();
381 
382   bool HasLowDecodedVideo();
383 
384   bool OutOfDecodedAudio();
385 
OutOfDecodedVideo()386   bool OutOfDecodedVideo() {
387     MOZ_ASSERT(OnTaskQueue());
388     return IsVideoDecoding() && VideoQueue().GetSize() <= 1;
389   }
390 
391   // Returns true if we're running low on buffered data.
392   bool HasLowBufferedData();
393 
394   // Returns true if we have less than aThreshold of buffered data available.
395   bool HasLowBufferedData(const media::TimeUnit& aThreshold);
396 
397   // Return the current time, either the audio clock if available (if the media
398   // has audio, and the playback is possible), or a clock for the video.
399   // Called on the state machine thread.
400   // If aTimeStamp is non-null, set *aTimeStamp to the TimeStamp corresponding
401   // to the returned stream time.
402   media::TimeUnit GetClock(TimeStamp* aTimeStamp = nullptr) const;
403 
404   // Update only the state machine's current playback position (and duration,
405   // if unknown).  Does not update the playback position on the decoder or
406   // media element -- use UpdatePlaybackPosition for that.  Called on the state
407   // machine thread, caller must hold the decoder lock.
408   void UpdatePlaybackPositionInternal(const media::TimeUnit& aTime);
409 
410   // Update playback position and trigger next update by default time period.
411   // Called on the state machine thread.
412   void UpdatePlaybackPositionPeriodically();
413 
414   media::MediaSink* CreateAudioSink();
415 
416   // Always create mediasink which contains an AudioSink or StreamSink inside.
417   already_AddRefed<media::MediaSink> CreateMediaSink(bool aAudioCaptured);
418 
419   // Stops the media sink and shut it down.
420   // The decoder monitor must be held with exactly one lock count.
421   // Called on the state machine thread.
422   void StopMediaSink();
423 
424   // Create and start the media sink.
425   // The decoder monitor must be held with exactly one lock count.
426   // Called on the state machine thread.
427   void StartMediaSink();
428 
429   // Notification method invoked when mPlayState changes.
430   void PlayStateChanged();
431 
432   // Notification method invoked when mIsVisible changes.
433   void VisibilityChanged();
434 
435   // Sets internal state which causes playback of media to pause.
436   // The decoder monitor must be held.
437   void StopPlayback();
438 
439   // If the conditions are right, sets internal state which causes playback
440   // of media to begin or resume.
441   // Must be called with the decode monitor held.
442   void MaybeStartPlayback();
443 
444   // Moves the decoder into the shutdown state, and dispatches an error
445   // event to the media element. This begins shutting down the decoder.
446   // The decoder monitor must be held. This is only called on the
447   // decode thread.
448   void DecodeError(const MediaResult& aError);
449 
450   void EnqueueFirstFrameLoadedEvent();
451 
452   // Start a task to decode audio.
453   void RequestAudioData();
454 
455   // Start a task to decode video.
456   void RequestVideoData(const media::TimeUnit& aCurrentTime);
457 
458   void WaitForData(MediaData::Type aType);
459 
IsRequestingAudioData()460   bool IsRequestingAudioData() const { return mAudioDataRequest.Exists(); }
IsRequestingVideoData()461   bool IsRequestingVideoData() const { return mVideoDataRequest.Exists(); }
IsWaitingAudioData()462   bool IsWaitingAudioData() const { return mAudioWaitRequest.Exists(); }
IsWaitingVideoData()463   bool IsWaitingVideoData() const { return mVideoWaitRequest.Exists(); }
464 
465   // Returns the "media time". This is the absolute time which the media
466   // playback has reached. i.e. this returns values in the range
467   // [mStartTime, mEndTime], and mStartTime will not be 0 if the media does
468   // not start at 0. Note this is different than the "current playback
469   // position", which is in the range [0,duration].
GetMediaTime()470   media::TimeUnit GetMediaTime() const {
471     MOZ_ASSERT(OnTaskQueue());
472     return mCurrentPosition;
473   }
474 
475   // Returns an upper bound on the number of microseconds of audio that is
476   // decoded and playable. This is the sum of the number of usecs of audio which
477   // is decoded and in the reader's audio queue, and the usecs of unplayed audio
478   // which has been pushed to the audio hardware for playback. Note that after
479   // calling this, the audio hardware may play some of the audio pushed to
480   // hardware, so this can only be used as a upper bound. The decoder monitor
481   // must be held when calling this. Called on the decode thread.
482   media::TimeUnit GetDecodedAudioDuration();
483 
484   void FinishDecodeFirstFrame();
485 
486   // Performs one "cycle" of the state machine.
487   void RunStateMachine();
488 
489   bool IsStateMachineScheduled() const;
490 
491   // These return true if the respective stream's decode has not yet reached
492   // the end of stream.
493   bool IsAudioDecoding();
494   bool IsVideoDecoding();
495 
496  private:
497   // Resolved by the MediaSink to signal that all audio/video outstanding
498   // work is complete and identify which part(a/v) of the sink is shutting down.
499   void OnMediaSinkAudioComplete();
500   void OnMediaSinkVideoComplete();
501 
502   // Rejected by the MediaSink to signal errors for audio/video.
503   void OnMediaSinkAudioError(nsresult aResult);
504   void OnMediaSinkVideoError();
505 
506   void* const mDecoderID;
507   const RefPtr<AbstractThread> mAbstractMainThread;
508   const RefPtr<FrameStatistics> mFrameStats;
509   const RefPtr<VideoFrameContainer> mVideoFrameContainer;
510 
511   // Task queue for running the state machine.
512   RefPtr<TaskQueue> mTaskQueue;
513 
514   // State-watching manager.
515   WatchManager<MediaDecoderStateMachine> mWatchManager;
516 
517   // True if we've dispatched a task to run the state machine but the task has
518   // yet to run.
519   bool mDispatchedStateMachine;
520 
521   // Used to dispatch another round schedule with specific target time.
522   DelayedScheduler mDelayedScheduler;
523 
524   // Queue of audio frames. This queue is threadsafe, and is accessed from
525   // the audio, decoder, state machine, and main threads.
526   MediaQueue<AudioData> mAudioQueue;
527   // Queue of video frames. This queue is threadsafe, and is accessed from
528   // the decoder, state machine, and main threads.
529   MediaQueue<VideoData> mVideoQueue;
530 
531   UniquePtr<StateObject> mStateObj;
532 
Duration()533   media::TimeUnit Duration() const {
534     MOZ_ASSERT(OnTaskQueue());
535     return mDuration.Ref().ref();
536   }
537 
538   // FrameID which increments every time a frame is pushed to our queue.
539   FrameID mCurrentFrameID;
540 
541   // Media Fragment end time.
542   media::TimeUnit mFragmentEndTime = media::TimeUnit::Invalid();
543 
544   // The media sink resource.  Used on the state machine thread.
545   RefPtr<media::MediaSink> mMediaSink;
546 
547   const RefPtr<ReaderProxy> mReader;
548 
549   // The end time of the last audio frame that's been pushed onto the media sink
550   // in microseconds. This will approximately be the end time
551   // of the audio stream, unless another frame is pushed to the hardware.
552   media::TimeUnit AudioEndTime() const;
553 
554   // The end time of the last rendered video frame that's been sent to
555   // compositor.
556   media::TimeUnit VideoEndTime() const;
557 
558   // The end time of the last decoded audio frame. This signifies the end of
559   // decoded audio data. Used to check if we are low in decoded data.
560   media::TimeUnit mDecodedAudioEndTime;
561 
562   // The end time of the last decoded video frame. Used to check if we are low
563   // on decoded video data.
564   media::TimeUnit mDecodedVideoEndTime;
565 
566   // Playback rate. 1.0 : normal speed, 0.5 : two times slower.
567   double mPlaybackRate;
568 
569   // If we've got more than this number of decoded video frames waiting in
570   // the video queue, we will not decode any more video frames until some have
571   // been consumed by the play state machine thread.
572   // Must hold monitor.
573   uint32_t GetAmpleVideoFrames() const;
574 
575   // Our "ample" audio threshold. Once we've this much audio decoded, we
576   // pause decoding.
577   media::TimeUnit mAmpleAudioThreshold;
578 
579   // Only one of a given pair of ({Audio,Video}DataPromise, WaitForDataPromise)
580   // should exist at any given moment.
581   using AudioDataPromise = MediaFormatReader::AudioDataPromise;
582   using VideoDataPromise = MediaFormatReader::VideoDataPromise;
583   using WaitForDataPromise = MediaFormatReader::WaitForDataPromise;
584   MozPromiseRequestHolder<AudioDataPromise> mAudioDataRequest;
585   MozPromiseRequestHolder<VideoDataPromise> mVideoDataRequest;
586   MozPromiseRequestHolder<WaitForDataPromise> mAudioWaitRequest;
587   MozPromiseRequestHolder<WaitForDataPromise> mVideoWaitRequest;
588 
589   const char* AudioRequestStatus() const;
590   const char* VideoRequestStatus() const;
591 
592   void OnSuspendTimerResolved();
593   void CancelSuspendTimer();
594 
595   bool mCanPlayThrough = false;
596 
597   bool mIsLiveStream = false;
598 
599   // True if we shouldn't play our audio (but still write it to any capturing
600   // streams). When this is true, the audio thread will never start again after
601   // it has stopped.
602   bool mAudioCaptured;
603 
604   // True if all audio frames are already rendered.
605   bool mAudioCompleted = false;
606 
607   // True if all video frames are already rendered.
608   bool mVideoCompleted = false;
609 
610   // True if we should not decode/preroll unnecessary samples, unless we're
611   // played. "Prerolling" in this context refers to when we decode and
612   // buffer decoded samples in advance of when they're needed for playback.
613   // This flag is set for preload=metadata media, and means we won't
614   // decode more than the first video frame and first block of audio samples
615   // for that media when we startup, or after a seek. When Play() is called,
616   // we reset this flag, as we assume the user is playing the media, so
617   // prerolling is appropriate then. This flag is used to reduce the overhead
618   // of prerolling samples for media elements that may not play, both
619   // memory and CPU overhead.
620   bool mMinimizePreroll;
621 
622   // Stores presentation info required for playback.
623   Maybe<MediaInfo> mInfo;
624 
625   mozilla::MediaMetadataManager mMetadataManager;
626 
627   // True if we've decoded first frames (thus having the start time) and
628   // notified the FirstFrameLoaded event. Note we can't initiate seek until the
629   // start time is known which happens when the first frames are decoded or we
630   // are playing an MSE stream (the start time is always assumed 0).
631   bool mSentFirstFrameLoadedEvent;
632 
633   // True if video decoding is suspended.
634   bool mVideoDecodeSuspended;
635 
636   // True if the media is seekable (i.e. supports random access).
637   bool mMediaSeekable = true;
638 
639   // True if the media is seekable only in buffered ranges.
640   bool mMediaSeekableOnlyInBufferedRanges = false;
641 
642   // Track enabling video decode suspension via timer
643   DelayedScheduler mVideoDecodeSuspendTimer;
644 
645   // Data about MediaStreams that are being fed by the decoder.
646   const RefPtr<OutputStreamManager> mOutputStreamManager;
647 
648   // Track the current video decode mode.
649   VideoDecodeMode mVideoDecodeMode;
650 
651   // Track the complete & error for audio/video separately
652   MozPromiseRequestHolder<GenericPromise> mMediaSinkAudioPromise;
653   MozPromiseRequestHolder<GenericPromise> mMediaSinkVideoPromise;
654 
655   MediaEventListener mAudioQueueListener;
656   MediaEventListener mVideoQueueListener;
657   MediaEventListener mAudibleListener;
658   MediaEventListener mOnMediaNotSeekable;
659 
660   MediaEventProducerExc<UniquePtr<MediaInfo>, UniquePtr<MetadataTags>,
661                         MediaDecoderEventVisibility>
662       mMetadataLoadedEvent;
663   MediaEventProducerExc<nsAutoPtr<MediaInfo>, MediaDecoderEventVisibility>
664       mFirstFrameLoadedEvent;
665 
666   MediaEventProducer<MediaPlaybackEvent> mOnPlaybackEvent;
667   MediaEventProducer<MediaResult> mOnPlaybackErrorEvent;
668 
669   MediaEventProducer<DecoderDoctorEvent> mOnDecoderDoctorEvent;
670 
671   MediaEventProducer<NextFrameStatus> mOnNextFrameStatus;
672 
673   const bool mIsMSE;
674 
675   bool mSeamlessLoopingAllowed;
676 
677   // Current playback position in the stream in bytes.
678   int64_t mPlaybackOffset = 0;
679 
680  private:
681   // The buffered range. Mirrored from the decoder thread.
682   Mirror<media::TimeIntervals> mBuffered;
683 
684   // The current play state, mirrored from the main thread.
685   Mirror<MediaDecoder::PlayState> mPlayState;
686 
687   // Volume of playback. 0.0 = muted. 1.0 = full volume.
688   Mirror<double> mVolume;
689 
690   // Pitch preservation for the playback rate.
691   Mirror<bool> mPreservesPitch;
692 
693   // Whether to seek back to the start of the media resource
694   // upon reaching the end.
695   Mirror<bool> mLooping;
696 
697   // True if the media is same-origin with the element. Data can only be
698   // passed to MediaStreams when this is true.
699   Mirror<bool> mSameOriginMedia;
700 
701   // An identifier for the principal of the media. Used to track when
702   // main-thread induced principal changes get reflected on MSG thread.
703   Mirror<PrincipalHandle> mMediaPrincipalHandle;
704 
705   // Duration of the media. This is guaranteed to be non-null after we finish
706   // decoding the first frame.
707   Canonical<media::NullableTimeUnit> mDuration;
708 
709   // The time of the current frame, corresponding to the "current
710   // playback position" in HTML5. This is referenced from 0, which is the
711   // initial playback position.
712   Canonical<media::TimeUnit> mCurrentPosition;
713 
714   // Used to distinguish whether the audio is producing sound.
715   Canonical<bool> mIsAudioDataAudible;
716 
717  public:
718   AbstractCanonical<media::TimeIntervals>* CanonicalBuffered() const;
719 
CanonicalDuration()720   AbstractCanonical<media::NullableTimeUnit>* CanonicalDuration() {
721     return &mDuration;
722   }
CanonicalCurrentPosition()723   AbstractCanonical<media::TimeUnit>* CanonicalCurrentPosition() {
724     return &mCurrentPosition;
725   }
CanonicalIsAudioDataAudible()726   AbstractCanonical<bool>* CanonicalIsAudioDataAudible() {
727     return &mIsAudioDataAudible;
728   }
729 
730 #ifdef XP_WIN
731   // Whether we've called timeBeginPeriod(1) to request high resolution
732   // timers. We request high resolution timers when playback starts, and
733   // turn them off when playback is paused. Enabling high resolution
734   // timers can cause higher CPU usage and battery drain on Windows 7.
735   bool mHiResTimersRequested = false;
736   // Whether we should enable high resolution timers. This is initialized at
737   // MDSM construction, and mirrors the value of media.hi-res-timers.enabled.
738   const bool mShouldUseHiResTimers;
739 #endif
740 };
741 
742 }  // namespace mozilla
743 
744 #endif
745