1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "AudioDestinationNode.h"
8 #include "AlignmentUtils.h"
9 #include "AudioContext.h"
10 #include "mozilla/dom/AudioDestinationNodeBinding.h"
11 #include "mozilla/dom/ScriptSettings.h"
12 #include "mozilla/Services.h"
13 #include "AudioChannelAgent.h"
14 #include "AudioChannelService.h"
15 #include "AudioNodeEngine.h"
16 #include "AudioNodeStream.h"
17 #include "MediaStreamGraph.h"
18 #include "OfflineAudioCompletionEvent.h"
19 #include "nsContentUtils.h"
20 #include "nsIInterfaceRequestorUtils.h"
21 #include "nsIDocShell.h"
22 #include "nsIPermissionManager.h"
23 #include "nsIScriptObjectPrincipal.h"
24 #include "nsServiceManagerUtils.h"
25 #include "mozilla/dom/Promise.h"
26 
27 namespace mozilla {
28 namespace dom {
29 
30 static uint8_t gWebAudioOutputKey;
31 
32 class OfflineDestinationNodeEngine final : public AudioNodeEngine
33 {
34 public:
OfflineDestinationNodeEngine(AudioDestinationNode * aNode,uint32_t aNumberOfChannels,uint32_t aLength,float aSampleRate)35   OfflineDestinationNodeEngine(AudioDestinationNode* aNode,
36                                uint32_t aNumberOfChannels,
37                                uint32_t aLength,
38                                float aSampleRate)
39     : AudioNodeEngine(aNode)
40     , mWriteIndex(0)
41     , mNumberOfChannels(aNumberOfChannels)
42     , mLength(aLength)
43     , mSampleRate(aSampleRate)
44     , mBufferAllocated(false)
45   {
46   }
47 
ProcessBlock(AudioNodeStream * aStream,GraphTime aFrom,const AudioBlock & aInput,AudioBlock * aOutput,bool * aFinished)48   void ProcessBlock(AudioNodeStream* aStream,
49                     GraphTime aFrom,
50                     const AudioBlock& aInput,
51                     AudioBlock* aOutput,
52                     bool* aFinished) override
53   {
54     // Do this just for the sake of political correctness; this output
55     // will not go anywhere.
56     *aOutput = aInput;
57 
58     // The output buffer is allocated lazily, on the rendering thread, when
59     // non-null input is received.
60     if (!mBufferAllocated && !aInput.IsNull()) {
61       // These allocations might fail if content provides a huge number of
62       // channels or size, but it's OK since we'll deal with the failure
63       // gracefully.
64       mBuffer = ThreadSharedFloatArrayBufferList::
65         Create(mNumberOfChannels, mLength, fallible);
66       if (mBuffer && mWriteIndex) {
67         // Zero leading for any null chunks that were skipped.
68         for (uint32_t i = 0; i < mNumberOfChannels; ++i) {
69           float* channelData = mBuffer->GetDataForWrite(i);
70           PodZero(channelData, mWriteIndex);
71         }
72       }
73 
74       mBufferAllocated = true;
75     }
76 
77     // Skip copying if there is no buffer.
78     uint32_t outputChannelCount = mBuffer ? mNumberOfChannels : 0;
79 
80     // Record our input buffer
81     MOZ_ASSERT(mWriteIndex < mLength, "How did this happen?");
82     const uint32_t duration = std::min(WEBAUDIO_BLOCK_SIZE, mLength - mWriteIndex);
83     const uint32_t inputChannelCount = aInput.ChannelCount();
84     for (uint32_t i = 0; i < outputChannelCount; ++i) {
85       float* outputData = mBuffer->GetDataForWrite(i) + mWriteIndex;
86       if (aInput.IsNull() || i >= inputChannelCount) {
87         PodZero(outputData, duration);
88       } else {
89         const float* inputBuffer = static_cast<const float*>(aInput.mChannelData[i]);
90         if (duration == WEBAUDIO_BLOCK_SIZE && IS_ALIGNED16(inputBuffer)) {
91           // Use the optimized version of the copy with scale operation
92           AudioBlockCopyChannelWithScale(inputBuffer, aInput.mVolume,
93                                          outputData);
94         } else {
95           if (aInput.mVolume == 1.0f) {
96             PodCopy(outputData, inputBuffer, duration);
97           } else {
98             for (uint32_t j = 0; j < duration; ++j) {
99               outputData[j] = aInput.mVolume * inputBuffer[j];
100             }
101           }
102         }
103       }
104     }
105     mWriteIndex += duration;
106 
107     if (mWriteIndex >= mLength) {
108       NS_ASSERTION(mWriteIndex == mLength, "Overshot length");
109       // Go to finished state. When the graph's current time eventually reaches
110       // the end of the stream, then the main thread will be notified and we'll
111       // shut down the AudioContext.
112       *aFinished = true;
113     }
114   }
115 
IsActive() const116   bool IsActive() const override
117   {
118     // Keep processing to track stream time, which is used for all timelines
119     // associated with the same AudioContext.
120     return true;
121   }
122 
123 
124   class OnCompleteTask final : public Runnable
125   {
126   public:
OnCompleteTask(AudioContext * aAudioContext,AudioBuffer * aRenderedBuffer)127     OnCompleteTask(AudioContext* aAudioContext, AudioBuffer* aRenderedBuffer)
128       : mAudioContext(aAudioContext)
129       , mRenderedBuffer(aRenderedBuffer)
130     {}
131 
Run()132     NS_IMETHOD Run() override
133     {
134       RefPtr<OfflineAudioCompletionEvent> event =
135           new OfflineAudioCompletionEvent(mAudioContext, nullptr, nullptr);
136       event->InitEvent(mRenderedBuffer);
137       mAudioContext->DispatchTrustedEvent(event);
138 
139       return NS_OK;
140     }
141   private:
142     RefPtr<AudioContext> mAudioContext;
143     RefPtr<AudioBuffer> mRenderedBuffer;
144   };
145 
FireOfflineCompletionEvent(AudioDestinationNode * aNode)146   void FireOfflineCompletionEvent(AudioDestinationNode* aNode)
147   {
148     AudioContext* context = aNode->Context();
149     context->Shutdown();
150     // Shutdown drops self reference, but the context is still referenced by aNode,
151     // which is strongly referenced by the runnable that called
152     // AudioDestinationNode::FireOfflineCompletionEvent.
153 
154     // Create the input buffer
155     ErrorResult rv;
156     RefPtr<AudioBuffer> renderedBuffer =
157       AudioBuffer::Create(context, mNumberOfChannels, mLength, mSampleRate,
158                           mBuffer.forget(), rv);
159     if (rv.Failed()) {
160       rv.SuppressException();
161       return;
162     }
163 
164     aNode->ResolvePromise(renderedBuffer);
165 
166     RefPtr<OnCompleteTask> onCompleteTask =
167       new OnCompleteTask(context, renderedBuffer);
168     NS_DispatchToMainThread(onCompleteTask);
169 
170     context->OnStateChanged(nullptr, AudioContextState::Closed);
171   }
172 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const173   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
174   {
175     size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
176     if (mBuffer) {
177       amount += mBuffer->SizeOfIncludingThis(aMallocSizeOf);
178     }
179     return amount;
180   }
181 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const182   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
183   {
184     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
185   }
186 
187 private:
188   // The input to the destination node is recorded in mBuffer.
189   // When this buffer fills up with mLength frames, the buffered input is sent
190   // to the main thread in order to dispatch OfflineAudioCompletionEvent.
191   RefPtr<ThreadSharedFloatArrayBufferList> mBuffer;
192   // An index representing the next offset in mBuffer to be written to.
193   uint32_t mWriteIndex;
194   uint32_t mNumberOfChannels;
195   // How many frames the OfflineAudioContext intends to produce.
196   uint32_t mLength;
197   float mSampleRate;
198   bool mBufferAllocated;
199 };
200 
201 class InputMutedRunnable final : public Runnable
202 {
203 public:
InputMutedRunnable(AudioNodeStream * aStream,bool aInputMuted)204   InputMutedRunnable(AudioNodeStream* aStream,
205                      bool aInputMuted)
206     : mStream(aStream)
207     , mInputMuted(aInputMuted)
208   {
209   }
210 
Run()211   NS_IMETHOD Run() override
212   {
213     MOZ_ASSERT(NS_IsMainThread());
214     RefPtr<AudioNode> node = mStream->Engine()->NodeMainThread();
215 
216     if (node) {
217       RefPtr<AudioDestinationNode> destinationNode =
218         static_cast<AudioDestinationNode*>(node.get());
219       destinationNode->InputMuted(mInputMuted);
220     }
221     return NS_OK;
222   }
223 
224 private:
225   RefPtr<AudioNodeStream> mStream;
226   bool mInputMuted;
227 };
228 
229 class DestinationNodeEngine final : public AudioNodeEngine
230 {
231 public:
DestinationNodeEngine(AudioDestinationNode * aNode)232   explicit DestinationNodeEngine(AudioDestinationNode* aNode)
233     : AudioNodeEngine(aNode)
234     , mVolume(1.0f)
235     , mLastInputMuted(true)
236     , mSuspended(false)
237   {
238     MOZ_ASSERT(aNode);
239   }
240 
ProcessBlock(AudioNodeStream * aStream,GraphTime aFrom,const AudioBlock & aInput,AudioBlock * aOutput,bool * aFinished)241   void ProcessBlock(AudioNodeStream* aStream,
242                     GraphTime aFrom,
243                     const AudioBlock& aInput,
244                     AudioBlock* aOutput,
245                     bool* aFinished) override
246   {
247     *aOutput = aInput;
248     aOutput->mVolume *= mVolume;
249 
250     if (mSuspended) {
251       return;
252     }
253 
254     bool newInputMuted = aInput.IsNull() || aInput.IsMuted();
255     if (newInputMuted != mLastInputMuted) {
256       mLastInputMuted = newInputMuted;
257 
258       RefPtr<InputMutedRunnable> runnable =
259         new InputMutedRunnable(aStream, newInputMuted);
260       aStream->Graph()->
261         DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
262     }
263   }
264 
IsActive() const265   bool IsActive() const override
266   {
267     // Keep processing to track stream time, which is used for all timelines
268     // associated with the same AudioContext.  If there are no other engines
269     // for the AudioContext, then this could return false to suspend the
270     // stream, but the stream is blocked anyway through
271     // AudioDestinationNode::SetIsOnlyNodeForContext().
272     return true;
273   }
274 
SetDoubleParameter(uint32_t aIndex,double aParam)275   void SetDoubleParameter(uint32_t aIndex, double aParam) override
276   {
277     if (aIndex == VOLUME) {
278       mVolume = aParam;
279     }
280   }
281 
SetInt32Parameter(uint32_t aIndex,int32_t aParam)282   void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override
283   {
284     if (aIndex == SUSPENDED) {
285       mSuspended = !!aParam;
286       if (mSuspended) {
287         mLastInputMuted = true;
288       }
289     }
290   }
291 
292   enum Parameters {
293     VOLUME,
294     SUSPENDED,
295   };
296 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const297   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override
298   {
299     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
300   }
301 
302 private:
303   float mVolume;
304   bool mLastInputMuted;
305   bool mSuspended;
306 };
307 
NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode,AudioNode,mAudioChannelAgent,mOfflineRenderingPromise)308 NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioDestinationNode, AudioNode,
309                                    mAudioChannelAgent,
310                                    mOfflineRenderingPromise)
311 
312 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(AudioDestinationNode)
313   NS_INTERFACE_MAP_ENTRY(nsIAudioChannelAgentCallback)
314 NS_INTERFACE_MAP_END_INHERITING(AudioNode)
315 
316 NS_IMPL_ADDREF_INHERITED(AudioDestinationNode, AudioNode)
317 NS_IMPL_RELEASE_INHERITED(AudioDestinationNode, AudioNode)
318 
319 AudioDestinationNode::AudioDestinationNode(AudioContext* aContext,
320                                            bool aIsOffline,
321                                            AudioChannel aChannel,
322                                            uint32_t aNumberOfChannels,
323                                            uint32_t aLength, float aSampleRate)
324   : AudioNode(aContext, aIsOffline ? aNumberOfChannels : 2,
325               ChannelCountMode::Explicit, ChannelInterpretation::Speakers)
326   , mFramesToProduce(aLength)
327   , mAudioChannel(AudioChannel::Normal)
328   , mIsOffline(aIsOffline)
329   , mAudioChannelSuspended(false)
330   , mCaptured(false)
331 {
332   MediaStreamGraph* graph = aIsOffline ?
333                             MediaStreamGraph::CreateNonRealtimeInstance(aSampleRate) :
334                             MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, aChannel);
335   AudioNodeEngine* engine = aIsOffline ?
336                             new OfflineDestinationNodeEngine(this, aNumberOfChannels,
337                                                              aLength, aSampleRate) :
338                             static_cast<AudioNodeEngine*>(new DestinationNodeEngine(this));
339 
340   AudioNodeStream::Flags flags =
341     AudioNodeStream::NEED_MAIN_THREAD_CURRENT_TIME |
342     AudioNodeStream::NEED_MAIN_THREAD_FINISHED |
343     AudioNodeStream::EXTERNAL_OUTPUT;
344   mStream = AudioNodeStream::Create(aContext, engine, flags, graph);
345   mStream->AddMainThreadListener(this);
346   mStream->AddAudioOutput(&gWebAudioOutputKey);
347 
348   if (!aIsOffline) {
349     graph->NotifyWhenGraphStarted(mStream);
350   }
351 
352   if (aChannel != AudioChannel::Normal) {
353     ErrorResult rv;
354     SetMozAudioChannelType(aChannel, rv);
355   }
356 }
357 
~AudioDestinationNode()358 AudioDestinationNode::~AudioDestinationNode()
359 {
360 }
361 
362 size_t
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const363 AudioDestinationNode::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
364 {
365   size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
366   // Might be useful in the future:
367   // - mAudioChannelAgent
368   return amount;
369 }
370 
371 size_t
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const372 AudioDestinationNode::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
373 {
374   return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
375 }
376 
377 void
DestroyAudioChannelAgent()378 AudioDestinationNode::DestroyAudioChannelAgent()
379 {
380   if (mAudioChannelAgent && !Context()->IsOffline()) {
381     mAudioChannelAgent->NotifyStoppedPlaying();
382     mAudioChannelAgent = nullptr;
383   }
384 }
385 
386 void
DestroyMediaStream()387 AudioDestinationNode::DestroyMediaStream()
388 {
389   DestroyAudioChannelAgent();
390 
391   if (!mStream)
392     return;
393 
394   mStream->RemoveMainThreadListener(this);
395   MediaStreamGraph* graph = mStream->Graph();
396   if (graph->IsNonRealtime()) {
397     MediaStreamGraph::DestroyNonRealtimeInstance(graph);
398   }
399   AudioNode::DestroyMediaStream();
400 }
401 
402 void
NotifyMainThreadStreamFinished()403 AudioDestinationNode::NotifyMainThreadStreamFinished()
404 {
405   MOZ_ASSERT(mStream->IsFinished());
406 
407   if (mIsOffline) {
408     NS_DispatchToCurrentThread(NewRunnableMethod(this,
409                                                  &AudioDestinationNode::FireOfflineCompletionEvent));
410   }
411 }
412 
413 void
FireOfflineCompletionEvent()414 AudioDestinationNode::FireOfflineCompletionEvent()
415 {
416   OfflineDestinationNodeEngine* engine =
417     static_cast<OfflineDestinationNodeEngine*>(Stream()->Engine());
418   engine->FireOfflineCompletionEvent(this);
419 }
420 
421 void
ResolvePromise(AudioBuffer * aRenderedBuffer)422 AudioDestinationNode::ResolvePromise(AudioBuffer* aRenderedBuffer)
423 {
424   MOZ_ASSERT(NS_IsMainThread());
425   MOZ_ASSERT(mIsOffline);
426   mOfflineRenderingPromise->MaybeResolve(aRenderedBuffer);
427 }
428 
429 uint32_t
MaxChannelCount() const430 AudioDestinationNode::MaxChannelCount() const
431 {
432   return Context()->MaxChannelCount();
433 }
434 
435 void
SetChannelCount(uint32_t aChannelCount,ErrorResult & aRv)436 AudioDestinationNode::SetChannelCount(uint32_t aChannelCount, ErrorResult& aRv)
437 {
438   if (aChannelCount > MaxChannelCount()) {
439     aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR);
440     return;
441   }
442 
443   AudioNode::SetChannelCount(aChannelCount, aRv);
444 }
445 
446 void
Mute()447 AudioDestinationNode::Mute()
448 {
449   MOZ_ASSERT(Context() && !Context()->IsOffline());
450   SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 0.0f);
451 }
452 
453 void
Unmute()454 AudioDestinationNode::Unmute()
455 {
456   MOZ_ASSERT(Context() && !Context()->IsOffline());
457   SendDoubleParameterToStream(DestinationNodeEngine::VOLUME, 1.0f);
458 }
459 
460 void
Suspend()461 AudioDestinationNode::Suspend()
462 {
463   DestroyAudioChannelAgent();
464   SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 1);
465 }
466 
467 void
Resume()468 AudioDestinationNode::Resume()
469 {
470   CreateAudioChannelAgent();
471   SendInt32ParameterToStream(DestinationNodeEngine::SUSPENDED, 0);
472 }
473 
474 void
OfflineShutdown()475 AudioDestinationNode::OfflineShutdown()
476 {
477   MOZ_ASSERT(Context() && Context()->IsOffline(),
478              "Should only be called on a valid OfflineAudioContext");
479 
480   MediaStreamGraph::DestroyNonRealtimeInstance(mStream->Graph());
481   mOfflineRenderingRef.Drop(this);
482 }
483 
484 JSObject*
WrapObject(JSContext * aCx,JS::Handle<JSObject * > aGivenProto)485 AudioDestinationNode::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
486 {
487   return AudioDestinationNodeBinding::Wrap(aCx, this, aGivenProto);
488 }
489 
490 void
StartRendering(Promise * aPromise)491 AudioDestinationNode::StartRendering(Promise* aPromise)
492 {
493   mOfflineRenderingPromise = aPromise;
494   mOfflineRenderingRef.Take(this);
495   mStream->Graph()->StartNonRealtimeProcessing(mFramesToProduce);
496 }
497 
498 NS_IMETHODIMP
WindowVolumeChanged(float aVolume,bool aMuted)499 AudioDestinationNode::WindowVolumeChanged(float aVolume, bool aMuted)
500 {
501   if (!mStream) {
502     return NS_OK;
503   }
504 
505   float volume = aMuted ? 0.0 : aVolume;
506   mStream->SetAudioOutputVolume(&gWebAudioOutputKey, volume);
507   return NS_OK;
508 }
509 
510 NS_IMETHODIMP
WindowSuspendChanged(nsSuspendedTypes aSuspend)511 AudioDestinationNode::WindowSuspendChanged(nsSuspendedTypes aSuspend)
512 {
513   if (!mStream) {
514     return NS_OK;
515   }
516 
517   bool suspended = (aSuspend != nsISuspendedTypes::NONE_SUSPENDED);
518   if (mAudioChannelSuspended == suspended) {
519     return NS_OK;
520   }
521 
522   mAudioChannelSuspended = suspended;
523   Context()->DispatchTrustedEvent(!suspended ?
524     NS_LITERAL_STRING("mozinterruptend") :
525     NS_LITERAL_STRING("mozinterruptbegin"));
526 
527   DisabledTrackMode disabledMode = suspended ? DisabledTrackMode::SILENCE_BLACK
528                                              : DisabledTrackMode::ENABLED;
529   mStream->SetTrackEnabled(AudioNodeStream::AUDIO_TRACK, disabledMode);
530   return NS_OK;
531 }
532 
533 NS_IMETHODIMP
WindowAudioCaptureChanged(bool aCapture)534 AudioDestinationNode::WindowAudioCaptureChanged(bool aCapture)
535 {
536   MOZ_ASSERT(mAudioChannelAgent);
537 
538   if (!mStream || Context()->IsOffline()) {
539     return NS_OK;
540   }
541 
542   nsCOMPtr<nsPIDOMWindowInner> ownerWindow = GetOwner();
543   if (!ownerWindow) {
544     return NS_OK;
545   }
546 
547   if (aCapture != mCaptured) {
548     if (aCapture) {
549       nsCOMPtr<nsPIDOMWindowInner> window = Context()->GetParentObject();
550       uint64_t id = window->WindowID();
551       mCaptureStreamPort =
552         mStream->Graph()->ConnectToCaptureStream(id, mStream);
553     } else {
554       mCaptureStreamPort->Destroy();
555     }
556     mCaptured = aCapture;
557   }
558 
559   return NS_OK;
560 }
561 
562 AudioChannel
MozAudioChannelType() const563 AudioDestinationNode::MozAudioChannelType() const
564 {
565   return mAudioChannel;
566 }
567 
568 void
SetMozAudioChannelType(AudioChannel aValue,ErrorResult & aRv)569 AudioDestinationNode::SetMozAudioChannelType(AudioChannel aValue, ErrorResult& aRv)
570 {
571   if (Context()->IsOffline()) {
572     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
573     return;
574   }
575 
576   if (aValue != mAudioChannel &&
577       CheckAudioChannelPermissions(aValue)) {
578     mAudioChannel = aValue;
579 
580     if (mStream) {
581       mStream->SetAudioChannelType(mAudioChannel);
582     }
583 
584     if (mAudioChannelAgent) {
585       CreateAudioChannelAgent();
586     }
587   }
588 }
589 
590 bool
CheckAudioChannelPermissions(AudioChannel aValue)591 AudioDestinationNode::CheckAudioChannelPermissions(AudioChannel aValue)
592 {
593   // Only normal channel doesn't need permission.
594   if (aValue == AudioChannel::Normal) {
595     return true;
596   }
597 
598   // Maybe this audio channel is equal to the default one.
599   if (aValue == AudioChannelService::GetDefaultAudioChannel()) {
600     return true;
601   }
602 
603   nsCOMPtr<nsIPermissionManager> permissionManager =
604     services::GetPermissionManager();
605   if (!permissionManager) {
606     return false;
607   }
608 
609   nsCOMPtr<nsIScriptObjectPrincipal> sop = do_QueryInterface(GetOwner());
610   NS_ASSERTION(sop, "Window didn't QI to nsIScriptObjectPrincipal!");
611   nsCOMPtr<nsIPrincipal> principal = sop->GetPrincipal();
612 
613   uint32_t perm = nsIPermissionManager::UNKNOWN_ACTION;
614 
615   nsCString channel;
616   channel.AssignASCII(AudioChannelValues::strings[uint32_t(aValue)].value,
617                       AudioChannelValues::strings[uint32_t(aValue)].length);
618   permissionManager->TestExactPermissionFromPrincipal(principal,
619     nsCString(NS_LITERAL_CSTRING("audio-channel-") + channel).get(),
620     &perm);
621 
622   return perm == nsIPermissionManager::ALLOW_ACTION;
623 }
624 
625 nsresult
CreateAudioChannelAgent()626 AudioDestinationNode::CreateAudioChannelAgent()
627 {
628   if (mIsOffline) {
629     return NS_OK;
630   }
631 
632   nsresult rv = NS_OK;
633   if (mAudioChannelAgent) {
634     rv = mAudioChannelAgent->NotifyStoppedPlaying();
635     if (NS_WARN_IF(NS_FAILED(rv))) {
636       return rv;
637     }
638   }
639 
640   mAudioChannelAgent = new AudioChannelAgent();
641   rv = mAudioChannelAgent->InitWithWeakCallback(GetOwner(),
642                                            static_cast<int32_t>(mAudioChannel),
643                                            this);
644   if (NS_WARN_IF(NS_FAILED(rv))) {
645     return rv;
646   }
647 
648   return NS_OK;
649 }
650 
651 void
InputMuted(bool aMuted)652 AudioDestinationNode::InputMuted(bool aMuted)
653 {
654   MOZ_ASSERT(Context() && !Context()->IsOffline());
655 
656   if (!mAudioChannelAgent) {
657     if (aMuted) {
658       return;
659     }
660     CreateAudioChannelAgent();
661   }
662 
663   if (aMuted) {
664     mAudioChannelAgent->NotifyStoppedPlaying();
665     return;
666   }
667 
668   AudioPlaybackConfig config;
669   nsresult rv = mAudioChannelAgent->NotifyStartedPlaying(&config,
670                                                          AudioChannelService::AudibleState::eAudible);
671   if (NS_WARN_IF(NS_FAILED(rv))) {
672     return;
673   }
674 
675   WindowVolumeChanged(config.mVolume, config.mMuted);
676   WindowSuspendChanged(config.mSuspend);
677 }
678 
679 } // namespace dom
680 } // namespace mozilla
681