1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #ifndef AudioContext_h_
8 #define AudioContext_h_
9 
10 #include "X11UndefineNone.h"
11 #include "AudioParamDescriptorMap.h"
12 #include "mozilla/dom/OfflineAudioContextBinding.h"
13 #include "mozilla/dom/AudioContextBinding.h"
14 #include "MediaBufferDecoder.h"
15 #include "mozilla/Attributes.h"
16 #include "mozilla/DOMEventTargetHelper.h"
17 #include "mozilla/MemoryReporting.h"
18 #include "mozilla/dom/TypedArray.h"
19 #include "mozilla/RelativeTimeline.h"
20 #include "mozilla/TypedEnumBits.h"
21 #include "mozilla/UniquePtr.h"
22 #include "nsCOMPtr.h"
23 #include "nsCycleCollectionParticipant.h"
24 #include "nsHashKeys.h"
25 #include "nsTHashtable.h"
26 #include "js/TypeDecls.h"
27 #include "nsIMemoryReporter.h"
28 
29 namespace WebCore {
30 class PeriodicWave;
31 }  // namespace WebCore
32 
33 class nsPIDOMWindowInner;
34 
35 namespace mozilla {
36 
37 class DOMMediaStream;
38 class ErrorResult;
39 class MediaTrack;
40 class MediaTrackGraph;
41 class AudioNodeTrack;
42 
43 namespace dom {
44 
45 enum class AudioContextState : uint8_t;
46 class AnalyserNode;
47 class AudioBuffer;
48 class AudioBufferSourceNode;
49 class AudioDestinationNode;
50 class AudioListener;
51 class AudioNode;
52 class BiquadFilterNode;
53 class ChannelMergerNode;
54 class ChannelSplitterNode;
55 class ConstantSourceNode;
56 class ConvolverNode;
57 class DelayNode;
58 class DynamicsCompressorNode;
59 class GainNode;
60 class GlobalObject;
61 class HTMLMediaElement;
62 class IIRFilterNode;
63 class MediaElementAudioSourceNode;
64 class MediaStreamAudioDestinationNode;
65 class MediaStreamAudioSourceNode;
66 class MediaStreamTrack;
67 class MediaStreamTrackAudioSourceNode;
68 class OscillatorNode;
69 class PannerNode;
70 class ScriptProcessorNode;
71 class StereoPannerNode;
72 class WaveShaperNode;
73 class Worklet;
74 class PeriodicWave;
75 struct PeriodicWaveConstraints;
76 class Promise;
77 enum class OscillatorType : uint8_t;
78 
79 // This is addrefed by the OscillatorNodeEngine on the main thread
80 // and then used from the MTG thread.
81 // It can be released either from the graph thread or the main thread.
82 class BasicWaveFormCache {
83  public:
84   explicit BasicWaveFormCache(uint32_t aSampleRate);
85   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(BasicWaveFormCache)
86   WebCore::PeriodicWave* GetBasicWaveForm(OscillatorType aType);
87 
88  private:
89   ~BasicWaveFormCache();
90   RefPtr<WebCore::PeriodicWave> mSawtooth;
91   RefPtr<WebCore::PeriodicWave> mSquare;
92   RefPtr<WebCore::PeriodicWave> mTriangle;
93   uint32_t mSampleRate;
94 };
95 
96 /* This runnable allows the MTG to notify the main thread when audio is actually
97  * flowing */
98 class StateChangeTask final : public Runnable {
99  public:
100   /* This constructor should be used when this event is sent from the main
101    * thread. */
102   StateChangeTask(AudioContext* aAudioContext, void* aPromise,
103                   AudioContextState aNewState);
104 
105   /* This constructor should be used when this event is sent from the audio
106    * thread. */
107   StateChangeTask(AudioNodeTrack* aTrack, void* aPromise,
108                   AudioContextState aNewState);
109 
110   NS_IMETHOD Run() override;
111 
112  private:
113   RefPtr<AudioContext> mAudioContext;
114   void* mPromise;
115   RefPtr<AudioNodeTrack> mAudioNodeTrack;
116   AudioContextState mNewState;
117 };
118 
119 enum class AudioContextOperation { Suspend, Resume, Close };
120 // When suspending or resuming an AudioContext, some operations have to notify
121 // the main thread, so that the Promise is resolved, the state is modified, and
122 // the statechanged event is sent. Some other operations don't go back to the
123 // main thread, for example when the AudioContext is paused by something that is
124 // not caused by the page itself: opening a debugger, breaking on a breakpoint,
125 // reloading a document.
126 enum class AudioContextOperationFlags { None, SendStateChange };
127 MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(AudioContextOperationFlags);
128 
129 struct AudioContextOptions;
130 
131 class AudioContext final : public DOMEventTargetHelper,
132                            public nsIMemoryReporter,
133                            public RelativeTimeline {
134   AudioContext(nsPIDOMWindowInner* aParentWindow, bool aIsOffline,
135                uint32_t aNumberOfChannels = 0, uint32_t aLength = 0,
136                float aSampleRate = 0.0f);
137   ~AudioContext();
138 
139   nsresult Init();
140 
141  public:
142   typedef uint64_t AudioContextId;
143 
144   NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext,DOMEventTargetHelper)145   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioContext, DOMEventTargetHelper)
146   MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf)
147 
148   nsPIDOMWindowInner* GetParentObject() const { return GetOwner(); }
149 
150   nsISerialEventTarget* GetMainThread() const;
151 
152   virtual void DisconnectFromOwner() override;
153 
154   void Shutdown();  // idempotent
155 
156   JSObject* WrapObject(JSContext* aCx,
157                        JS::Handle<JSObject*> aGivenProto) override;
158 
159   using DOMEventTargetHelper::DispatchTrustedEvent;
160 
161   // Constructor for regular AudioContext
162   static already_AddRefed<AudioContext> Constructor(
163       const GlobalObject& aGlobal, const AudioContextOptions& aOptions,
164       ErrorResult& aRv);
165 
166   // Constructor for offline AudioContext with options object
167   static already_AddRefed<AudioContext> Constructor(
168       const GlobalObject& aGlobal, const OfflineAudioContextOptions& aOptions,
169       ErrorResult& aRv);
170 
171   // Constructor for offline AudioContext
172   static already_AddRefed<AudioContext> Constructor(const GlobalObject& aGlobal,
173                                                     uint32_t aNumberOfChannels,
174                                                     uint32_t aLength,
175                                                     float aSampleRate,
176                                                     ErrorResult& aRv);
177 
178   // AudioContext methods
179 
Destination()180   AudioDestinationNode* Destination() const { return mDestination; }
181 
SampleRate()182   float SampleRate() const { return mSampleRate; }
183 
ShouldSuspendNewTrack()184   bool ShouldSuspendNewTrack() const { return mSuspendCalled || mCloseCalled; }
185 
186   double CurrentTime();
187 
188   AudioListener* Listener();
189 
State()190   AudioContextState State() const { return mAudioContextState; }
191 
BaseLatency()192   double BaseLatency() const {
193     // Gecko does not do any buffering between rendering the audio and sending
194     // it to the audio subsystem.
195     return 0.0;
196   }
197 
198   double OutputLatency();
199 
200   void GetOutputTimestamp(AudioTimestamp& aTimeStamp);
201 
202   Worklet* GetAudioWorklet(ErrorResult& aRv);
203 
204   bool IsRunning() const;
205 
206   // Called when an AudioScheduledSourceNode started or the source node starts,
207   // this method might resume the AudioContext if it was not allowed to start.
208   void StartBlockedAudioContextIfAllowed();
209 
210   // Those three methods return a promise to content, that is resolved when an
211   // (possibly long) operation is completed on the MTG (and possibly other)
212   // thread(s). To avoid having to match the calls and asychronous result when
213   // the operation is completed, we keep a reference to the promises on the main
214   // thread, and then send the promises pointers down the MTG thread, as a void*
215   // (to make it very clear that the pointer is to merely be treated as an ID).
216   // When back on the main thread, we can resolve or reject the promise, by
217   // casting it back to a `Promise*` while asserting we're back on the main
218   // thread and removing the reference we added.
219   already_AddRefed<Promise> Suspend(ErrorResult& aRv);
220   already_AddRefed<Promise> Resume(ErrorResult& aRv);
221   already_AddRefed<Promise> Close(ErrorResult& aRv);
222   IMPL_EVENT_HANDLER(statechange)
223 
224   // These two functions are similar with Suspend() and Resume(), the difference
225   // is they are designed for calling from chrome side, not content side. eg.
226   // calling from inner window, so we won't need to return promise for caller.
227   void SuspendFromChrome();
228   void ResumeFromChrome();
229   // Called on completion of offline rendering:
230   void OfflineClose();
231 
232   already_AddRefed<AudioBufferSourceNode> CreateBufferSource();
233 
234   already_AddRefed<ConstantSourceNode> CreateConstantSource();
235 
236   already_AddRefed<AudioBuffer> CreateBuffer(uint32_t aNumberOfChannels,
237                                              uint32_t aLength,
238                                              float aSampleRate,
239                                              ErrorResult& aRv);
240 
241   already_AddRefed<MediaStreamAudioDestinationNode>
242   CreateMediaStreamDestination(ErrorResult& aRv);
243 
244   already_AddRefed<ScriptProcessorNode> CreateScriptProcessor(
245       uint32_t aBufferSize, uint32_t aNumberOfInputChannels,
246       uint32_t aNumberOfOutputChannels, ErrorResult& aRv);
247 
248   already_AddRefed<StereoPannerNode> CreateStereoPanner(ErrorResult& aRv);
249 
250   already_AddRefed<AnalyserNode> CreateAnalyser(ErrorResult& aRv);
251 
252   already_AddRefed<GainNode> CreateGain(ErrorResult& aRv);
253 
254   already_AddRefed<WaveShaperNode> CreateWaveShaper(ErrorResult& aRv);
255 
256   already_AddRefed<MediaElementAudioSourceNode> CreateMediaElementSource(
257       HTMLMediaElement& aMediaElement, ErrorResult& aRv);
258   already_AddRefed<MediaStreamAudioSourceNode> CreateMediaStreamSource(
259       DOMMediaStream& aMediaStream, ErrorResult& aRv);
260   already_AddRefed<MediaStreamTrackAudioSourceNode>
261   CreateMediaStreamTrackSource(MediaStreamTrack& aMediaStreamTrack,
262                                ErrorResult& aRv);
263 
264   already_AddRefed<DelayNode> CreateDelay(double aMaxDelayTime,
265                                           ErrorResult& aRv);
266 
267   already_AddRefed<PannerNode> CreatePanner(ErrorResult& aRv);
268 
269   already_AddRefed<ConvolverNode> CreateConvolver(ErrorResult& aRv);
270 
271   already_AddRefed<ChannelSplitterNode> CreateChannelSplitter(
272       uint32_t aNumberOfOutputs, ErrorResult& aRv);
273 
274   already_AddRefed<ChannelMergerNode> CreateChannelMerger(
275       uint32_t aNumberOfInputs, ErrorResult& aRv);
276 
277   already_AddRefed<DynamicsCompressorNode> CreateDynamicsCompressor(
278       ErrorResult& aRv);
279 
280   already_AddRefed<BiquadFilterNode> CreateBiquadFilter(ErrorResult& aRv);
281 
282   already_AddRefed<IIRFilterNode> CreateIIRFilter(
283       const Sequence<double>& aFeedforward, const Sequence<double>& aFeedback,
284       mozilla::ErrorResult& aRv);
285 
286   already_AddRefed<OscillatorNode> CreateOscillator(ErrorResult& aRv);
287 
288   already_AddRefed<PeriodicWave> CreatePeriodicWave(
289       const Float32Array& aRealData, const Float32Array& aImagData,
290       const PeriodicWaveConstraints& aConstraints, ErrorResult& aRv);
291 
292   already_AddRefed<Promise> DecodeAudioData(
293       const ArrayBuffer& aBuffer,
294       const Optional<OwningNonNull<DecodeSuccessCallback>>& aSuccessCallback,
295       const Optional<OwningNonNull<DecodeErrorCallback>>& aFailureCallback,
296       ErrorResult& aRv);
297 
298   // OfflineAudioContext methods
299   already_AddRefed<Promise> StartRendering(ErrorResult& aRv);
300   IMPL_EVENT_HANDLER(complete)
301   unsigned long Length();
302 
IsOffline()303   bool IsOffline() const { return mIsOffline; }
304 
305   MediaTrackGraph* Graph() const;
306   AudioNodeTrack* DestinationTrack() const;
307 
308   // Nodes register here if they will produce sound even if they have silent
309   // or no input connections.  The AudioContext will keep registered nodes
310   // alive until the context is collected.  This takes care of "playing"
311   // references and "tail-time" references.
312   void RegisterActiveNode(AudioNode* aNode);
313   // Nodes unregister when they have finished producing sound for the
314   // foreseeable future.
315   // Do NOT call UnregisterActiveNode from an AudioNode destructor.
316   // If the destructor is called, then the Node has already been unregistered.
317   // The destructor may be called during hashtable enumeration, during which
318   // unregistering would not be safe.
319   void UnregisterActiveNode(AudioNode* aNode);
320 
321   uint32_t MaxChannelCount() const;
322 
323   uint32_t ActiveNodeCount() const;
324 
325   void Mute() const;
326   void Unmute() const;
327 
328   void RegisterNode(AudioNode* aNode);
329   void UnregisterNode(AudioNode* aNode);
330 
331   void OnStateChanged(void* aPromise, AudioContextState aNewState);
332 
333   BasicWaveFormCache* GetBasicWaveFormCache();
334 
335   void ShutdownWorklet();
336   // Steals from |aParamMap|
337   void SetParamMapForWorkletName(const nsAString& aName,
338                                  AudioParamDescriptorMap* aParamMap);
GetParamMapForWorkletName(const nsAString & aName)339   const AudioParamDescriptorMap* GetParamMapForWorkletName(
340       const nsAString& aName) {
341     return mWorkletParamDescriptors.GetValue(aName);
342   }
343 
344   void Dispatch(already_AddRefed<nsIRunnable>&& aRunnable);
345 
346  private:
347   void DisconnectFromWindow();
348   void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
349   void ShutdownDecoder();
350 
351   size_t SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf) const;
352   NS_DECL_NSIMEMORYREPORTER
353 
354   friend struct ::mozilla::WebAudioDecodeJob;
355 
356   nsTArray<mozilla::MediaTrack*> GetAllTracks() const;
357 
358   void ResumeInternal(AudioContextOperationFlags aFlags);
359   void SuspendInternal(void* aPromise, AudioContextOperationFlags aFlags);
360   void CloseInternal(void* aPromise, AudioContextOperationFlags aFlags);
361 
362   // Will report error message to console and dispatch testing event if needed
363   // when AudioContext is blocked by autoplay policy.
364   void ReportBlocked();
365 
366   void ReportToConsole(uint32_t aErrorFlags, const char* aMsg) const;
367 
368   // This function should be called everytime we decide whether allow to start
369   // audio context, it's used to update Telemetry related variables.
370   void UpdateAutoplayAssumptionStatus();
371 
372   // These functions are used for updating Telemetry.
373   // - MaybeUpdateAutoplayTelemetry: update category 'AllowedAfterBlocked'
374   // - MaybeUpdateAutoplayTelemetryWhenShutdown: update category 'NeverBlocked'
375   //   and 'NeverAllowed', so we need to call it when shutdown AudioContext
376   void MaybeUpdateAutoplayTelemetry();
377   void MaybeUpdateAutoplayTelemetryWhenShutdown();
378 
379  private:
380   // Each AudioContext has an id, that is passed down the MediaTracks that
381   // back the AudioNodes, so we can easily compute the set of all the
382   // MediaTracks for a given context, on the MediasTrackGraph side.
383   const AudioContextId mId;
384   // Note that it's important for mSampleRate to be initialized before
385   // mDestination, as mDestination's constructor needs to access it!
386   const float mSampleRate;
387   AudioContextState mAudioContextState;
388   RefPtr<AudioDestinationNode> mDestination;
389   RefPtr<AudioListener> mListener;
390   RefPtr<Worklet> mWorklet;
391   nsTArray<UniquePtr<WebAudioDecodeJob>> mDecodeJobs;
392   // This array is used to keep the suspend/close promises alive until
393   // they are resolved, so we can safely pass them accross threads.
394   nsTArray<RefPtr<Promise>> mPromiseGripArray;
395   // This array is used to onlly keep the resume promises alive until they are
396   // resolved, so we can safely pass them accross threads. If the audio context
397   // is not allowed to play, the promise would be pending in this array and be
398   // resolved until audio context has been allowed and user call resume() again.
399   nsTArray<RefPtr<Promise>> mPendingResumePromises;
400   // See RegisterActiveNode.  These will keep the AudioContext alive while it
401   // is rendering and the window remains alive.
402   nsTHashtable<nsRefPtrHashKey<AudioNode>> mActiveNodes;
403   // Raw (non-owning) references to all AudioNodes for this AudioContext.
404   nsTHashtable<nsPtrHashKey<AudioNode>> mAllNodes;
405   nsDataHashtable<nsStringHashKey, AudioParamDescriptorMap>
406       mWorkletParamDescriptors;
407   // Cache to avoid recomputing basic waveforms all the time.
408   RefPtr<BasicWaveFormCache> mBasicWaveFormCache;
409   // Number of channels passed in the OfflineAudioContext ctor.
410   uint32_t mNumberOfChannels;
411   bool mIsOffline;
412   bool mIsStarted;
413   bool mIsShutDown;
414   // Close has been called, reject suspend and resume call.
415   bool mCloseCalled;
416   // Suspend has been called with no following resume.
417   bool mSuspendCalled;
418   bool mIsDisconnecting;
419   // This flag stores the value of previous status of `allowed-to-start`.
420   bool mWasAllowedToStart;
421 
422   // True if this AudioContext has been suspended by the page.
423   bool mSuspendedByContent;
424 
425   // These variables are used for telemetry, they're not reflect the actual
426   // status of AudioContext, they are based on the "assumption" of enabling
427   // blocking web audio. Because we want to record Telemetry no matter user
428   // enable blocking autoplay or not.
429   // - 'mWasEverAllowedToStart' would be true when AudioContext had ever been
430   //   allowed to start if we enable blocking web audio.
431   // - 'mWasEverBlockedToStart' would be true when AudioContext had ever been
432   //   blocked to start if we enable blocking web audio.
433   // - 'mWouldBeAllowedToStart' stores the value of previous status of
434   //   `allowed-to-start` if we enable blocking web audio.
435   bool mWasEverAllowedToStart;
436   bool mWasEverBlockedToStart;
437   bool mWouldBeAllowedToStart;
438 };
439 
440 static const dom::AudioContext::AudioContextId NO_AUDIO_CONTEXT = 0;
441 
442 }  // namespace dom
443 }  // namespace mozilla
444 
ToSupports(mozilla::dom::AudioContext * p)445 inline nsISupports* ToSupports(mozilla::dom::AudioContext* p) {
446   return NS_CYCLE_COLLECTION_CLASSNAME(mozilla::dom::AudioContext)::Upcast(p);
447 }
448 
449 #endif
450