1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "AudioDestinationNode.h"
8 #include "AudioContext.h"
9 #include "AlignmentUtils.h"
10 #include "AudioContext.h"
11 #include "mozilla/dom/AudioDestinationNodeBinding.h"
12 #include "mozilla/dom/OfflineAudioCompletionEvent.h"
13 #include "mozilla/dom/ScriptSettings.h"
14 #include "mozilla/dom/BaseAudioContextBinding.h"
15 #include "mozilla/Services.h"
16 #include "AudioChannelAgent.h"
17 #include "AudioChannelService.h"
18 #include "AudioNodeEngine.h"
19 #include "AudioNodeStream.h"
20 #include "MediaStreamGraph.h"
21 #include "nsContentUtils.h"
22 #include "nsIInterfaceRequestorUtils.h"
23 #include "nsIDocShell.h"
24 #include "nsIPermissionManager.h"
25 #include "nsIScriptObjectPrincipal.h"
26 #include "nsServiceManagerUtils.h"
27 #include "mozilla/dom/Promise.h"
28
29 namespace mozilla {
30 namespace dom {
31
32 static uint8_t gWebAudioOutputKey;
33
34 class OfflineDestinationNodeEngine final : public AudioNodeEngine {
35 public:
OfflineDestinationNodeEngine(AudioDestinationNode * aNode,uint32_t aNumberOfChannels,uint32_t aLength,float aSampleRate)36 OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
37 uint32_t aNumberOfChannels, uint32_t aLength,
38 float aSampleRate)
39 : AudioNodeEngine(aNode),
40 mWriteIndex(0),
41 mNumberOfChannels(aNumberOfChannels),
42 mLength(aLength),
43 mSampleRate(aSampleRate),
44 mBufferAllocated(false) {}
45
ProcessBlock(AudioNodeStream * aStream,GraphTime aFrom,const AudioBlock & aInput,AudioBlock * aOutput,bool * aFinished)46 void ProcessBlock(AudioNodeStream* aStream, GraphTime aFrom,
47 const AudioBlock& aInput, AudioBlock* aOutput,
48 bool* aFinished) override {
49 // Do this just for the sake of political correctness; this output
50 // will not go anywhere.
51 *aOutput = aInput;
52
53 // The output buffer is allocated lazily, on the rendering thread, when
54 // non-null input is received.
55 if (!mBufferAllocated && !aInput.IsNull()) {
56 // These allocations might fail if content provides a huge number of
57 // channels or size, but it's OK since we'll deal with the failure
58 // gracefully.
59 mBuffer = ThreadSharedFloatArrayBufferList::Create(mNumberOfChannels,
60 mLength, fallible);
61 if (mBuffer && mWriteIndex) {
62 // Zero leading for any null chunks that were skipped.
63 for (uint32_t i = 0; i < mNumberOfChannels; ++i) {
64 float* channelData = mBuffer->GetDataForWrite(i);
65 PodZero(channelData, mWriteIndex);
66 }
67 }
68
69 mBufferAllocated = true;
70 }
71
72 // Skip copying if there is no buffer.
73 uint32_t outputChannelCount = mBuffer ? mNumberOfChannels : 0;
74
75 // Record our input buffer
76 MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
77 const uint32_t duration =
78 std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
79 const uint32_t inputChannelCount = aInput.ChannelCount();
80 for (uint32_t i = 0; i < outputChannelCount; ++i) {
81 float* outputData = mBuffer->GetDataForWrite(i) + mWriteIndex;
82 if (aInput.IsNull() || i >= inputChannelCount) {
83 PodZero(outputData, duration);
84 } else {
85 const float* inputBuffer =
86 static_cast<const float*>(aInput.mChannelData[i]);
87 if (duration == WEBAUDIO_BLOCK_SIZE && IS_ALIGNED16(inputBuffer)) {
88 // Use the optimized version of the copy with scale operation
89 AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume,
90 outputData);
91 } else {
92 if (aInput.mVolume == 1.0f) {
93 PodCopy(outputData, inputBuffer, duration);
94 } else {
95 for (uint32_t j = 0; j < duration; ++j) {
96 outputData[j] = aInput.mVolume * inputBuffer[j];
97 }
98 }
99 }
100 }
101 }
102 mWriteIndex += duration;
103
104 if (mWriteIndex >= mLength) {
105 NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
106 // Go to finished state. When the graph's current time eventually reaches
107 // the end of the stream, then the main thread will be notified and we'll
108 // shut down the AudioContext.
109 *aFinished = true;
110 }
111 }
112
IsActive() const113 bool IsActive() const override {
114 // Keep processing to track stream time, which is used for all timelines
115 // associated with the same AudioContext.
116 return true;
117 }
118
119 class OnCompleteTask final : public Runnable {
120 public:
OnCompleteTask(AudioContext * aAudioContext,AudioBuffer * aRenderedBuffer)121 OnCompleteTask(AudioContext* aAudioContext, AudioBuffer* aRenderedBuffer)
122 : Runnable("dom::OfflineDestinationNodeEngine::OnCompleteTask"),
123 mAudioContext(aAudioContext),
124 mRenderedBuffer(aRenderedBuffer) {}
125
Run()126 NS_IMETHOD Run() override {
127 OfflineAudioCompletionEventInit param;
128 param.mRenderedBuffer = mRenderedBuffer;
129
130 RefPtr<OfflineAudioCompletionEvent> event =
131 OfflineAudioCompletionEvent::Constructor(
132 mAudioContext, NS_LITERAL_STRING("complete"), param);
133 mAudioContext->DispatchTrustedEvent(event);
134
135 return NS_OK;
136 }
137
138 private:
139 RefPtr<AudioContext> mAudioContext;
140 RefPtr<AudioBuffer> mRenderedBuffer;
141 };
142
FireOfflineCompletionEvent(AudioDestinationNode * aNode)143 void FireOfflineCompletionEvent(AudioDestinationNode* aNode) {
144 AudioContext* context = aNode->Context();
145 context->Shutdown();
146 // Shutdown drops self reference, but the context is still referenced by
147 // aNode, which is strongly referenced by the runnable that called
148 // AudioDestinationNode::FireOfflineCompletionEvent.
149
150 // Create the input buffer
151 ErrorResult rv;
152 RefPtr<AudioBuffer> renderedBuffer =
153 AudioBuffer::Create(context->GetOwner(), mNumberOfChannels, mLength,
154 mSampleRate, mBuffer.forget(), rv);
155 if (rv.Failed()) {
156 rv.SuppressException();
157 return;
158 }
159
160 aNode->ResolvePromise(renderedBuffer);
161
162 context->Dispatch(do_AddRef(new OnCompleteTask(context, renderedBuffer)));
163
164 context->OnStateChanged(nullptr, AudioContextState::Closed);
165 }
166
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const167 size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override {
168 size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
169 if (mBuffer) {
170 amount += mBuffer->SizeOfIncludingThis(aMallocSizeOf);
171 }
172 return amount;
173 }
174
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const175 size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
176 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
177 }
178
179 private:
180 // The input to the destination node is recorded in mBuffer.
181 // When this buffer fills up with mLength frames, the buffered input is sent
182 // to the main thread in order to dispatch OfflineAudioCompletionEvent.
183 RefPtr<ThreadSharedFloatArrayBufferList> mBuffer;
184 // An index representing the next offset in mBuffer to be written to.
185 uint32_t mWriteIndex;
186 uint32_t mNumberOfChannels;
187 // How many frames the OfflineAudioContext intends to produce.
188 uint32_t mLength;
189 float mSampleRate;
190 bool mBufferAllocated;
191 };
192
193 class InputMutedRunnable final : public Runnable {
194 public:
InputMutedRunnable(AudioNodeStream * aStream,bool aInputMuted)195 InputMutedRunnable(AudioNodeStream* aStream, bool aInputMuted)
196 : Runnable("dom::InputMutedRunnable"),
197 mStream(aStream),
198 mInputMuted(aInputMuted) {}
199
Run()200 NS_IMETHOD Run() override {
201 MOZ_ASSERT(NS_IsMainThread());
202 RefPtr<AudioNode> node = mStream->Engine()->NodeMainThread();
203
204 if (node) {
205 RefPtr<AudioDestinationNode> destinationNode =
206 static_cast<AudioDestinationNode*>(node.get());
207 destinationNode->InputMuted(mInputMuted);
208 }
209 return NS_OK;
210 }
211
212 private:
213 RefPtr<AudioNodeStream> mStream;
214 bool mInputMuted;
215 };
216
217 class DestinationNodeEngine final : public AudioNodeEngine {
218 public:
DestinationNodeEngine(AudioDestinationNode * aNode)219 explicit DestinationNodeEngine(AudioDestinationNode* aNode)
220 : AudioNodeEngine(aNode),
221 mVolume(1.0f),
222 mLastInputMuted(true),
223 mSuspended(false) {
224 MOZ_ASSERT(aNode);
225 }
226
ProcessBlock(AudioNodeStream * aStream,GraphTime aFrom,const AudioBlock & aInput,AudioBlock * aOutput,bool * aFinished)227 void ProcessBlock(AudioNodeStream* aStream, GraphTime aFrom,
228 const AudioBlock& aInput, AudioBlock* aOutput,
229 bool* aFinished) override {
230 *aOutput = aInput;
231 aOutput->mVolume *= mVolume;
232
233 if (mSuspended) {
234 return;
235 }
236
237 bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
238 if (newInputMuted != mLastInputMuted) {
239 mLastInputMuted = newInputMuted;
240
241 RefPtr<InputMutedRunnable> runnable =
242 new InputMutedRunnable(aStream, newInputMuted);
243 aStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
244 runnable.forget());
245 }
246 }
247
IsActive() const248 bool IsActive() const override {
249 // Keep processing to track stream time, which is used for all timelines
250 // associated with the same AudioContext. If there are no other engines
251 // for the AudioContext, then this could return false to suspend the
252 // stream, but the stream is blocked anyway through
253 // AudioDestinationNode::SetIsOnlyNodeForContext().
254 return true;
255 }
256
SetDoubleParameter(uint32_t aIndex,double aParam)257 void SetDoubleParameter(uint32_t aIndex, double aParam) override {
258 if (aIndex == VOLUME) {
259 mVolume = aParam;
260 }
261 }
262
SetInt32Parameter(uint32_t aIndex,int32_t aParam)263 void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override {
264 if (aIndex == SUSPENDED) {
265 mSuspended = !!aParam;
266 if (mSuspended) {
267 mLastInputMuted = true;
268 }
269 }
270 }
271
272 enum Parameters {
273 VOLUME,
274 SUSPENDED,
275 };
276
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const277 size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
278 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
279 }
280
281 private:
282 float mVolume;
283 bool mLastInputMuted;
284 bool mSuspended;
285 };
286
NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode,AudioNode,mAudioChannelAgent,mOfflineRenderingPromise)287 NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
288 mAudioChannelAgent, mOfflineRenderingPromise)
289
290 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AudioDestinationNode)
291 NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback)
292 NS_INTERFACE_MAP_END_INHERITING(AudioNode)
293
294 NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
295 NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
296
297 AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
298 bool aIsOffline,
299 uint32_t aNumberOfChannels,
300 uint32_t aLength, float aSampleRate)
301 : AudioNode(aContext, aNumberOfChannels, ChannelCountMode::Explicit,
302 ChannelInterpretation::Speakers),
303 mFramesToProduce(aLength),
304 mIsOffline(aIsOffline),
305 mAudioChannelSuspended(false),
306 mCaptured(false),
307 mAudible(AudioChannelService::AudibleState::eAudible) {
308 nsPIDOMWindowInner* window = aContext->GetParentObject();
309 MediaStreamGraph* graph =
310 aIsOffline
311 ? MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate, window)
312 : MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
313 window);
314 AudioNodeEngine* engine =
315 aIsOffline
316 ? new OfflineDestinationNodeEngine(this, aNumberOfChannels, aLength,
317 aSampleRate)
318 : static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
319
320 AudioNodeStream::Flags flags =
321 AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
322 AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
323 AudioNodeStream::EXTERNAL_OUTPUT;
324 mStream = AudioNodeStream::Create(aContext, engine, flags, graph);
325 mStream->AddMainThreadListener(this);
326 mStream->AddAudioOutput(&gWebAudioOutputKey);
327
328 if (!aIsOffline) {
329 graph->NotifyWhenGraphStarted(mStream);
330 }
331 }
332
~AudioDestinationNode()333 AudioDestinationNode::~AudioDestinationNode() {}
334
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const335 size_t AudioDestinationNode::SizeOfExcludingThis(
336 MallocSizeOf aMallocSizeOf) const {
337 size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
338 // Might be useful in the future:
339 // - mAudioChannelAgent
340 return amount;
341 }
342
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const343 size_t AudioDestinationNode::SizeOfIncludingThis(
344 MallocSizeOf aMallocSizeOf) const {
345 return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
346 }
347
DestroyAudioChannelAgent()348 void AudioDestinationNode::DestroyAudioChannelAgent() {
349 if (mAudioChannelAgent && !Context()->IsOffline()) {
350 mAudioChannelAgent->NotifyStoppedPlaying();
351 mAudioChannelAgent = nullptr;
352 // Reset the state, and it would always be regard as audible.
353 mAudible = AudioChannelService::AudibleState::eAudible;
354 }
355 }
356
DestroyMediaStream()357 void AudioDestinationNode::DestroyMediaStream() {
358 DestroyAudioChannelAgent();
359
360 if (!mStream) return;
361
362 mStream->RemoveMainThreadListener(this);
363 MediaStreamGraph* graph = mStream->Graph();
364 if (graph->IsNonRealtime()) {
365 MediaStreamGraph::DestroyNonRealtimeInstance(graph);
366 }
367 AudioNode::DestroyMediaStream();
368 }
369
NotifyMainThreadStreamFinished()370 void AudioDestinationNode::NotifyMainThreadStreamFinished() {
371 MOZ_ASSERT(NS_IsMainThread());
372 MOZ_ASSERT(mStream->IsFinished());
373
374 if (mIsOffline) {
375 AbstractMainThread()->Dispatch(NewRunnableMethod(
376 "dom::AudioDestinationNode::FireOfflineCompletionEvent", this,
377 &AudioDestinationNode::FireOfflineCompletionEvent));
378 }
379 }
380
FireOfflineCompletionEvent()381 void AudioDestinationNode::FireOfflineCompletionEvent() {
382 OfflineDestinationNodeEngine* engine =
383 static_cast<OfflineDestinationNodeEngine*>(Stream()->Engine());
384 engine->FireOfflineCompletionEvent(this);
385 }
386
ResolvePromise(AudioBuffer * aRenderedBuffer)387 void AudioDestinationNode::ResolvePromise(AudioBuffer* aRenderedBuffer) {
388 MOZ_ASSERT(NS_IsMainThread());
389 MOZ_ASSERT(mIsOffline);
390 mOfflineRenderingPromise->MaybeResolve(aRenderedBuffer);
391 }
392
MaxChannelCount() const393 uint32_t AudioDestinationNode::MaxChannelCount() const {
394 return Context()->MaxChannelCount();
395 }
396
SetChannelCount(uint32_t aChannelCount,ErrorResult & aRv)397 void AudioDestinationNode::SetChannelCount(uint32_t aChannelCount,
398 ErrorResult& aRv) {
399 if (aChannelCount > MaxChannelCount()) {
400 aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
401 return;
402 }
403
404 AudioNode::SetChannelCount(aChannelCount, aRv);
405 }
406
Mute()407 void AudioDestinationNode::Mute() {
408 MOZ_ASSERT(Context() && !Context()->IsOffline());
409 SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f);
410 }
411
Unmute()412 void AudioDestinationNode::Unmute() {
413 MOZ_ASSERT(Context() && !Context()->IsOffline());
414 SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f);
415 }
416
Suspend()417 void AudioDestinationNode::Suspend() {
418 DestroyAudioChannelAgent();
419 SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 1);
420 }
421
Resume()422 void AudioDestinationNode::Resume() {
423 CreateAudioChannelAgent();
424 SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 0);
425 }
426
OfflineShutdown()427 void AudioDestinationNode::OfflineShutdown() {
428 MOZ_ASSERT(Context() && Context()->IsOffline(),
429 "Should only be called on a valid OfflineAudioContext");
430
431 MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph());
432 mOfflineRenderingRef.Drop(this);
433 }
434
WrapObject(JSContext * aCx,JS::Handle<JSObject * > aGivenProto)435 JSObject* AudioDestinationNode::WrapObject(JSContext* aCx,
436 JS::Handle<JSObject*> aGivenProto) {
437 return AudioDestinationNodeBinding::Wrap(aCx, this, aGivenProto);
438 }
439
StartRendering(Promise * aPromise)440 void AudioDestinationNode::StartRendering(Promise* aPromise) {
441 mOfflineRenderingPromise = aPromise;
442 mOfflineRenderingRef.Take(this);
443 mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
444 }
445
446 NS_IMETHODIMP
WindowVolumeChanged(float aVolume,bool aMuted)447 AudioDestinationNode::WindowVolumeChanged(float aVolume, bool aMuted) {
448 if (!mStream) {
449 return NS_OK;
450 }
451
452 MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
453 ("AudioDestinationNode, WindowVolumeChanged, "
454 "this = %p, aVolume = %f, aMuted = %s\n",
455 this, aVolume, aMuted ? "true" : "false"));
456
457 float volume = aMuted ? 0.0 : aVolume;
458 mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume);
459
460 AudioChannelService::AudibleState audible =
461 volume > 0.0 ? AudioChannelService::AudibleState::eAudible
462 : AudioChannelService::AudibleState::eNotAudible;
463 if (mAudible != audible) {
464 mAudible = audible;
465 mAudioChannelAgent->NotifyStartedAudible(
466 mAudible, AudioChannelService::AudibleChangedReasons::eVolumeChanged);
467 }
468 return NS_OK;
469 }
470
471 NS_IMETHODIMP
WindowSuspendChanged(nsSuspendedTypes aSuspend)472 AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend) {
473 if (!mStream) {
474 return NS_OK;
475 }
476
477 bool suspended = (aSuspend != nsISuspendedTypes::NONE_SUSPENDED);
478 if (mAudioChannelSuspended == suspended) {
479 return NS_OK;
480 }
481
482 MOZ_LOG(AudioChannelService::GetAudioChannelLog(), LogLevel::Debug,
483 ("AudioDestinationNode, WindowSuspendChanged, "
484 "this = %p, aSuspend = %s\n",
485 this, SuspendTypeToStr(aSuspend)));
486
487 mAudioChannelSuspended = suspended;
488
489 DisabledTrackMode disabledMode =
490 suspended ? DisabledTrackMode::SILENCE_BLACK : DisabledTrackMode::ENABLED;
491 mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, disabledMode);
492
493 AudioChannelService::AudibleState audible =
494 aSuspend == nsISuspendedTypes::NONE_SUSPENDED
495 ? AudioChannelService::AudibleState::eAudible
496 : AudioChannelService::AudibleState::eNotAudible;
497 if (mAudible != audible) {
498 mAudible = audible;
499 mAudioChannelAgent->NotifyStartedAudible(
500 audible,
501 AudioChannelService::AudibleChangedReasons::ePauseStateChanged);
502 }
503 return NS_OK;
504 }
505
506 NS_IMETHODIMP
WindowAudioCaptureChanged(bool aCapture)507 AudioDestinationNode::WindowAudioCaptureChanged(bool aCapture) {
508 MOZ_ASSERT(mAudioChannelAgent);
509
510 if (!mStream || Context()->IsOffline()) {
511 return NS_OK;
512 }
513
514 nsCOMPtr<nsPIDOMWindowInner> ownerWindow = GetOwner();
515 if (!ownerWindow) {
516 return NS_OK;
517 }
518
519 if (aCapture != mCaptured) {
520 if (aCapture) {
521 nsCOMPtr<nsPIDOMWindowInner> window = Context()->GetParentObject();
522 uint64_t id = window->WindowID();
523 mCaptureStreamPort =
524 mStream->Graph()->ConnectToCaptureStream(id, mStream);
525 } else {
526 mCaptureStreamPort->Destroy();
527 }
528 mCaptured = aCapture;
529 }
530
531 return NS_OK;
532 }
533
CreateAudioChannelAgent()534 nsresult AudioDestinationNode::CreateAudioChannelAgent() {
535 if (mIsOffline) {
536 return NS_OK;
537 }
538
539 nsresult rv = NS_OK;
540 if (mAudioChannelAgent) {
541 rv = mAudioChannelAgent->NotifyStoppedPlaying();
542 if (NS_WARN_IF(NS_FAILED(rv))) {
543 return rv;
544 }
545 }
546
547 mAudioChannelAgent = new AudioChannelAgent();
548 rv = mAudioChannelAgent->InitWithWeakCallback(GetOwner(), this);
549 if (NS_WARN_IF(NS_FAILED(rv))) {
550 return rv;
551 }
552
553 return NS_OK;
554 }
555
InputMuted(bool aMuted)556 void AudioDestinationNode::InputMuted(bool aMuted) {
557 MOZ_ASSERT(Context() && !Context()->IsOffline());
558
559 if (!mAudioChannelAgent) {
560 if (aMuted) {
561 return;
562 }
563 CreateAudioChannelAgent();
564 }
565
566 if (aMuted) {
567 mAudioChannelAgent->NotifyStoppedPlaying();
568 // Reset the state, and it would always be regard as audible.
569 mAudible = AudioChannelService::AudibleState::eAudible;
570 return;
571 }
572
573 AudioPlaybackConfig config;
574 nsresult rv = mAudioChannelAgent->NotifyStartedPlaying(&config, mAudible);
575 if (NS_WARN_IF(NS_FAILED(rv))) {
576 return;
577 }
578
579 WindowVolumeChanged(config.mVolume, config.mMuted);
580 WindowSuspendChanged(config.mSuspend);
581 }
582
583 } // namespace dom
584 } // namespace mozilla
585