1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4  * License, v. 2.0. If a copy of the MPL was not distributed with this
5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 
7 #include "ScriptProcessorNode.h"
8 #include "mozilla/dom/ScriptProcessorNodeBinding.h"
9 #include "AudioBuffer.h"
10 #include "AudioDestinationNode.h"
11 #include "AudioNodeEngine.h"
12 #include "AudioNodeTrack.h"
13 #include "AudioProcessingEvent.h"
14 #include "WebAudioUtils.h"
15 #include "mozilla/dom/ScriptSettings.h"
16 #include "mozilla/Mutex.h"
17 #include "mozilla/PodOperations.h"
18 #include <deque>
19 
20 namespace mozilla::dom {
21 
22 // The maximum latency, in seconds, that we can live with before dropping
23 // buffers.
24 static const float MAX_LATENCY_S = 0.5;
25 
26 // This class manages a queue of output buffers shared between
27 // the main thread and the Media Track Graph thread.
28 class SharedBuffers final {
29  private:
30   class OutputQueue final {
31    public:
OutputQueue(const char * aName)32     explicit OutputQueue(const char* aName) : mMutex(aName) {}
33 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const34     size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
35       mMutex.AssertCurrentThreadOwns();
36 
37       size_t amount = 0;
38       for (size_t i = 0; i < mBufferList.size(); i++) {
39         amount += mBufferList[i].SizeOfExcludingThis(aMallocSizeOf, false);
40       }
41 
42       return amount;
43     }
44 
Lock() const45     Mutex& Lock() const { return const_cast<OutputQueue*>(this)->mMutex; }
46 
ReadyToConsume() const47     size_t ReadyToConsume() const {
48       // Accessed on both main thread and media graph thread.
49       mMutex.AssertCurrentThreadOwns();
50       return mBufferList.size();
51     }
52 
53     // Produce one buffer
Produce()54     AudioChunk& Produce() {
55       mMutex.AssertCurrentThreadOwns();
56       MOZ_ASSERT(NS_IsMainThread());
57       mBufferList.push_back(AudioChunk());
58       return mBufferList.back();
59     }
60 
61     // Consumes one buffer.
Consume()62     AudioChunk Consume() {
63       mMutex.AssertCurrentThreadOwns();
64       MOZ_ASSERT(!NS_IsMainThread());
65       MOZ_ASSERT(ReadyToConsume() > 0);
66       AudioChunk front = mBufferList.front();
67       mBufferList.pop_front();
68       return front;
69     }
70 
71     // Empties the buffer queue.
Clear()72     void Clear() {
73       mMutex.AssertCurrentThreadOwns();
74       mBufferList.clear();
75     }
76 
77    private:
78     typedef std::deque<AudioChunk> BufferList;
79 
80     // Synchronizes access to mBufferList.  Note that it's the responsibility
81     // of the callers to perform the required locking, and we assert that every
82     // time we access mBufferList.
83     Mutex mMutex;
84     // The list representing the queue.
85     BufferList mBufferList;
86   };
87 
88  public:
SharedBuffers(float aSampleRate)89   explicit SharedBuffers(float aSampleRate)
90       : mOutputQueue("SharedBuffers::outputQueue"),
91         mDelaySoFar(TRACK_TIME_MAX),
92         mSampleRate(aSampleRate),
93         mLatency(0.0),
94         mDroppingBuffers(false) {}
95 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const96   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
97     size_t amount = aMallocSizeOf(this);
98 
99     {
100       MutexAutoLock lock(mOutputQueue.Lock());
101       amount += mOutputQueue.SizeOfExcludingThis(aMallocSizeOf);
102     }
103 
104     return amount;
105   }
106 
107   // main thread
108 
109   // NotifyNodeIsConnected() may be called even when the state has not
110   // changed.
NotifyNodeIsConnected(bool aIsConnected)111   void NotifyNodeIsConnected(bool aIsConnected) {
112     MOZ_ASSERT(NS_IsMainThread());
113     if (!aIsConnected) {
114       // Reset main thread state for FinishProducingOutputBuffer().
115       mLatency = 0.0f;
116       mLastEventTime = TimeStamp();
117       mDroppingBuffers = false;
118       // Don't flush the output buffer here because the graph thread may be
119       // using it now.  The graph thread will flush when it knows it is
120       // disconnected.
121     }
122     mNodeIsConnected = aIsConnected;
123   }
124 
FinishProducingOutputBuffer(const AudioChunk & aBuffer)125   void FinishProducingOutputBuffer(const AudioChunk& aBuffer) {
126     MOZ_ASSERT(NS_IsMainThread());
127 
128     if (!mNodeIsConnected) {
129       // The output buffer is not used, and mLastEventTime will not be
130       // initialized until the node is re-connected.
131       return;
132     }
133 
134     TimeStamp now = TimeStamp::Now();
135 
136     if (mLastEventTime.IsNull()) {
137       mLastEventTime = now;
138     } else {
139       // When main thread blocking has built up enough so
140       // |mLatency > MAX_LATENCY_S|, frame dropping starts. It continues until
141       // the output buffer is completely empty, at which point the accumulated
142       // latency is also reset to 0.
143       // It could happen that the output queue becomes empty before the input
144       // node has fully caught up. In this case there will be events where
145       // |(now - mLastEventTime)| is very short, making mLatency negative.
146       // As this happens and the size of |mLatency| becomes greater than
147       // MAX_LATENCY_S, frame dropping starts again to maintain an as short
148       // output queue as possible.
149       float latency = (now - mLastEventTime).ToSeconds();
150       float bufferDuration = aBuffer.mDuration / mSampleRate;
151       mLatency += latency - bufferDuration;
152       mLastEventTime = now;
153       if (fabs(mLatency) > MAX_LATENCY_S) {
154         mDroppingBuffers = true;
155       }
156     }
157 
158     MutexAutoLock lock(mOutputQueue.Lock());
159     if (mDroppingBuffers) {
160       if (mOutputQueue.ReadyToConsume()) {
161         return;
162       }
163       mDroppingBuffers = false;
164       mLatency = 0;
165     }
166 
167     for (uint32_t offset = 0; offset < aBuffer.mDuration;
168          offset += WEBAUDIO_BLOCK_SIZE) {
169       AudioChunk& chunk = mOutputQueue.Produce();
170       chunk = aBuffer;
171       chunk.SliceTo(offset, offset + WEBAUDIO_BLOCK_SIZE);
172     }
173   }
174 
175   // graph thread
176 
GetOutputBuffer()177   AudioChunk GetOutputBuffer() {
178     MOZ_ASSERT(!NS_IsMainThread());
179     AudioChunk buffer;
180 
181     {
182       MutexAutoLock lock(mOutputQueue.Lock());
183       if (mOutputQueue.ReadyToConsume() > 0) {
184         if (mDelaySoFar == TRACK_TIME_MAX) {
185           mDelaySoFar = 0;
186         }
187         buffer = mOutputQueue.Consume();
188       } else {
189         // If we're out of buffers to consume, just output silence
190         buffer.SetNull(WEBAUDIO_BLOCK_SIZE);
191         if (mDelaySoFar != TRACK_TIME_MAX) {
192           // Remember the delay that we just hit
193           mDelaySoFar += WEBAUDIO_BLOCK_SIZE;
194         }
195       }
196     }
197 
198     return buffer;
199   }
200 
DelaySoFar() const201   TrackTime DelaySoFar() const {
202     MOZ_ASSERT(!NS_IsMainThread());
203     return mDelaySoFar == TRACK_TIME_MAX ? 0 : mDelaySoFar;
204   }
205 
Flush()206   void Flush() {
207     MOZ_ASSERT(!NS_IsMainThread());
208     mDelaySoFar = TRACK_TIME_MAX;
209     {
210       MutexAutoLock lock(mOutputQueue.Lock());
211       mOutputQueue.Clear();
212     }
213   }
214 
215  private:
216   OutputQueue mOutputQueue;
217   // How much delay we've seen so far.  This measures the amount of delay
218   // caused by the main thread lagging behind in producing output buffers.
219   // TRACK_TIME_MAX means that we have not received our first buffer yet.
220   // Graph thread only.
221   TrackTime mDelaySoFar;
222   // The samplerate of the context.
223   const float mSampleRate;
224   // The remaining members are main thread only.
225   // This is the latency caused by the buffering. If this grows too high, we
226   // will drop buffers until it is acceptable.
227   float mLatency;
228   // This is the time at which we last produced a buffer, to detect if the main
229   // thread has been blocked.
230   TimeStamp mLastEventTime;
231   // True if we should be dropping buffers.
232   bool mDroppingBuffers;
233   // True iff the AudioNode has at least one input or output connected.
234   bool mNodeIsConnected;
235 };
236 
237 class ScriptProcessorNodeEngine final : public AudioNodeEngine {
238  public:
ScriptProcessorNodeEngine(ScriptProcessorNode * aNode,AudioDestinationNode * aDestination,uint32_t aBufferSize,uint32_t aNumberOfInputChannels)239   ScriptProcessorNodeEngine(ScriptProcessorNode* aNode,
240                             AudioDestinationNode* aDestination,
241                             uint32_t aBufferSize,
242                             uint32_t aNumberOfInputChannels)
243       : AudioNodeEngine(aNode),
244         mDestination(aDestination->Track()),
245         mSharedBuffers(new SharedBuffers(mDestination->mSampleRate)),
246         mBufferSize(aBufferSize),
247         mInputChannelCount(aNumberOfInputChannels),
248         mInputWriteIndex(0) {}
249 
GetSharedBuffers() const250   SharedBuffers* GetSharedBuffers() const { return mSharedBuffers.get(); }
251 
252   enum {
253     IS_CONNECTED,
254   };
255 
SetInt32Parameter(uint32_t aIndex,int32_t aParam)256   void SetInt32Parameter(uint32_t aIndex, int32_t aParam) override {
257     switch (aIndex) {
258       case IS_CONNECTED:
259         mIsConnected = aParam;
260         break;
261       default:
262         NS_ERROR("Bad Int32Parameter");
263     }  // End index switch.
264   }
265 
ProcessBlock(AudioNodeTrack * aTrack,GraphTime aFrom,const AudioBlock & aInput,AudioBlock * aOutput,bool * aFinished)266   void ProcessBlock(AudioNodeTrack* aTrack, GraphTime aFrom,
267                     const AudioBlock& aInput, AudioBlock* aOutput,
268                     bool* aFinished) override {
269     // This node is not connected to anything. Per spec, we don't fire the
270     // onaudioprocess event. We also want to clear out the input and output
271     // buffer queue, and output a null buffer.
272     if (!mIsConnected) {
273       aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
274       mSharedBuffers->Flush();
275       mInputWriteIndex = 0;
276       return;
277     }
278 
279     // The input buffer is allocated lazily when non-null input is received.
280     if (!aInput.IsNull() && !mInputBuffer) {
281       mInputBuffer = ThreadSharedFloatArrayBufferList::Create(
282           mInputChannelCount, mBufferSize, fallible);
283       if (mInputBuffer && mInputWriteIndex) {
284         // Zero leading for null chunks that were skipped.
285         for (uint32_t i = 0; i < mInputChannelCount; ++i) {
286           float* channelData = mInputBuffer->GetDataForWrite(i);
287           PodZero(channelData, mInputWriteIndex);
288         }
289       }
290     }
291 
292     // First, record our input buffer, if its allocation succeeded.
293     uint32_t inputChannelCount = mInputBuffer ? mInputBuffer->GetChannels() : 0;
294     for (uint32_t i = 0; i < inputChannelCount; ++i) {
295       float* writeData = mInputBuffer->GetDataForWrite(i) + mInputWriteIndex;
296       if (aInput.IsNull()) {
297         PodZero(writeData, aInput.GetDuration());
298       } else {
299         MOZ_ASSERT(aInput.GetDuration() == WEBAUDIO_BLOCK_SIZE, "sanity check");
300         MOZ_ASSERT(aInput.ChannelCount() == inputChannelCount);
301         AudioBlockCopyChannelWithScale(
302             static_cast<const float*>(aInput.mChannelData[i]), aInput.mVolume,
303             writeData);
304       }
305     }
306     mInputWriteIndex += aInput.GetDuration();
307 
308     // Now, see if we have data to output
309     // Note that we need to do this before sending the buffer to the main
310     // thread so that our delay time is updated.
311     *aOutput = mSharedBuffers->GetOutputBuffer();
312 
313     if (mInputWriteIndex >= mBufferSize) {
314       SendBuffersToMainThread(aTrack, aFrom);
315       mInputWriteIndex -= mBufferSize;
316     }
317   }
318 
IsActive() const319   bool IsActive() const override {
320     // Could return false when !mIsConnected after all output chunks produced
321     // by main thread events calling
322     // SharedBuffers::FinishProducingOutputBuffer() have been processed.
323     return true;
324   }
325 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const326   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override {
327     // Not owned:
328     // - mDestination (probably)
329     size_t amount = AudioNodeEngine::SizeOfExcludingThis(aMallocSizeOf);
330     amount += mSharedBuffers->SizeOfIncludingThis(aMallocSizeOf);
331     if (mInputBuffer) {
332       amount += mInputBuffer->SizeOfIncludingThis(aMallocSizeOf);
333     }
334 
335     return amount;
336   }
337 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const338   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override {
339     return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
340   }
341 
342  private:
SendBuffersToMainThread(AudioNodeTrack * aTrack,GraphTime aFrom)343   void SendBuffersToMainThread(AudioNodeTrack* aTrack, GraphTime aFrom) {
344     MOZ_ASSERT(!NS_IsMainThread());
345 
346     // we now have a full input buffer ready to be sent to the main thread.
347     TrackTime playbackTick = mDestination->GraphTimeToTrackTime(aFrom);
348     // Add the duration of the current sample
349     playbackTick += WEBAUDIO_BLOCK_SIZE;
350     // Add the delay caused by the main thread
351     playbackTick += mSharedBuffers->DelaySoFar();
352     // Compute the playback time in the coordinate system of the destination
353     double playbackTime = mDestination->TrackTimeToSeconds(playbackTick);
354 
355     class Command final : public Runnable {
356      public:
357       Command(AudioNodeTrack* aTrack,
358               already_AddRefed<ThreadSharedFloatArrayBufferList> aInputBuffer,
359               double aPlaybackTime)
360           : mozilla::Runnable("Command"),
361             mTrack(aTrack),
362             mInputBuffer(aInputBuffer),
363             mPlaybackTime(aPlaybackTime) {}
364 
365       NS_IMETHOD Run() override {
366         auto engine = static_cast<ScriptProcessorNodeEngine*>(mTrack->Engine());
367         AudioChunk output;
368         output.SetNull(engine->mBufferSize);
369         {
370           auto node =
371               static_cast<ScriptProcessorNode*>(engine->NodeMainThread());
372           if (!node) {
373             return NS_OK;
374           }
375 
376           if (node->HasListenersFor(nsGkAtoms::onaudioprocess)) {
377             DispatchAudioProcessEvent(node, &output);
378           }
379           // The node may have been destroyed during event dispatch.
380         }
381 
382         // Append it to our output buffer queue
383         engine->GetSharedBuffers()->FinishProducingOutputBuffer(output);
384 
385         return NS_OK;
386       }
387 
388       // Sets up |output| iff buffers are set in event handlers.
389       void DispatchAudioProcessEvent(ScriptProcessorNode* aNode,
390                                      AudioChunk* aOutput) {
391         AudioContext* context = aNode->Context();
392         if (!context) {
393           return;
394         }
395 
396         AutoJSAPI jsapi;
397         if (NS_WARN_IF(!jsapi.Init(aNode->GetOwner()))) {
398           return;
399         }
400         JSContext* cx = jsapi.cx();
401         uint32_t inputChannelCount = aNode->ChannelCount();
402 
403         // Create the input buffer
404         RefPtr<AudioBuffer> inputBuffer;
405         if (mInputBuffer) {
406           ErrorResult rv;
407           inputBuffer = AudioBuffer::Create(
408               context->GetOwner(), inputChannelCount, aNode->BufferSize(),
409               context->SampleRate(), mInputBuffer.forget(), rv);
410           if (rv.Failed()) {
411             rv.SuppressException();
412             return;
413           }
414         }
415 
416         // Ask content to produce data in the output buffer
417         // Note that we always avoid creating the output buffer here, and we try
418         // to avoid creating the input buffer as well.  The AudioProcessingEvent
419         // class knows how to lazily create them if needed once the script tries
420         // to access them.  Otherwise, we may be able to get away without
421         // creating them!
422         RefPtr<AudioProcessingEvent> event =
423             new AudioProcessingEvent(aNode, nullptr, nullptr);
424         event->InitEvent(inputBuffer, inputChannelCount, mPlaybackTime);
425         aNode->DispatchTrustedEvent(event);
426 
427         // Steal the output buffers if they have been set.
428         // Don't create a buffer if it hasn't been used to return output;
429         // FinishProducingOutputBuffer() will optimize output = null.
430         // GetThreadSharedChannelsForRate() may also return null after OOM.
431         if (event->HasOutputBuffer()) {
432           ErrorResult rv;
433           AudioBuffer* buffer = event->GetOutputBuffer(rv);
434           // HasOutputBuffer() returning true means that GetOutputBuffer()
435           // will not fail.
436           MOZ_ASSERT(!rv.Failed());
437           *aOutput = buffer->GetThreadSharedChannelsForRate(cx);
438           MOZ_ASSERT(aOutput->IsNull() ||
439                          aOutput->mBufferFormat == AUDIO_FORMAT_FLOAT32,
440                      "AudioBuffers initialized from JS have float data");
441         }
442       }
443 
444      private:
445       RefPtr<AudioNodeTrack> mTrack;
446       RefPtr<ThreadSharedFloatArrayBufferList> mInputBuffer;
447       double mPlaybackTime;
448     };
449 
450     RefPtr<Command> command =
451         new Command(aTrack, mInputBuffer.forget(), playbackTime);
452     mAbstractMainThread->Dispatch(command.forget());
453   }
454 
455   friend class ScriptProcessorNode;
456 
457   RefPtr<AudioNodeTrack> mDestination;
458   UniquePtr<SharedBuffers> mSharedBuffers;
459   RefPtr<ThreadSharedFloatArrayBufferList> mInputBuffer;
460   const uint32_t mBufferSize;
461   const uint32_t mInputChannelCount;
462   // The write index into the current input buffer
463   uint32_t mInputWriteIndex;
464   bool mIsConnected = false;
465 };
466 
ScriptProcessorNode(AudioContext * aContext,uint32_t aBufferSize,uint32_t aNumberOfInputChannels,uint32_t aNumberOfOutputChannels)467 ScriptProcessorNode::ScriptProcessorNode(AudioContext* aContext,
468                                          uint32_t aBufferSize,
469                                          uint32_t aNumberOfInputChannels,
470                                          uint32_t aNumberOfOutputChannels)
471     : AudioNode(aContext, aNumberOfInputChannels,
472                 mozilla::dom::ChannelCountMode::Explicit,
473                 mozilla::dom::ChannelInterpretation::Speakers),
474       mBufferSize(aBufferSize ? aBufferSize
475                               :  // respect what the web developer requested
476                       4096)      // choose our own buffer size -- 4KB for now
477       ,
478       mNumberOfOutputChannels(aNumberOfOutputChannels) {
479   MOZ_ASSERT(BufferSize() % WEBAUDIO_BLOCK_SIZE == 0, "Invalid buffer size");
480   ScriptProcessorNodeEngine* engine = new ScriptProcessorNodeEngine(
481       this, aContext->Destination(), BufferSize(), aNumberOfInputChannels);
482   mTrack = AudioNodeTrack::Create(
483       aContext, engine, AudioNodeTrack::NO_TRACK_FLAGS, aContext->Graph());
484 }
485 
486 ScriptProcessorNode::~ScriptProcessorNode() = default;
487 
SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const488 size_t ScriptProcessorNode::SizeOfExcludingThis(
489     MallocSizeOf aMallocSizeOf) const {
490   size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
491   return amount;
492 }
493 
SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const494 size_t ScriptProcessorNode::SizeOfIncludingThis(
495     MallocSizeOf aMallocSizeOf) const {
496   return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
497 }
498 
EventListenerAdded(nsAtom * aType)499 void ScriptProcessorNode::EventListenerAdded(nsAtom* aType) {
500   AudioNode::EventListenerAdded(aType);
501   if (aType == nsGkAtoms::onaudioprocess) {
502     UpdateConnectedStatus();
503   }
504 }
505 
EventListenerRemoved(nsAtom * aType)506 void ScriptProcessorNode::EventListenerRemoved(nsAtom* aType) {
507   AudioNode::EventListenerRemoved(aType);
508   if (aType == nsGkAtoms::onaudioprocess && mTrack) {
509     UpdateConnectedStatus();
510   }
511 }
512 
WrapObject(JSContext * aCx,JS::Handle<JSObject * > aGivenProto)513 JSObject* ScriptProcessorNode::WrapObject(JSContext* aCx,
514                                           JS::Handle<JSObject*> aGivenProto) {
515   return ScriptProcessorNode_Binding::Wrap(aCx, this, aGivenProto);
516 }
517 
UpdateConnectedStatus()518 void ScriptProcessorNode::UpdateConnectedStatus() {
519   bool isConnected =
520       mHasPhantomInput || !(OutputNodes().IsEmpty() &&
521                             OutputParams().IsEmpty() && InputNodes().IsEmpty());
522 
523   // Events are queued even when there is no listener because a listener
524   // may be added while events are in the queue.
525   SendInt32ParameterToTrack(ScriptProcessorNodeEngine::IS_CONNECTED,
526                             isConnected);
527 
528   if (isConnected && HasListenersFor(nsGkAtoms::onaudioprocess)) {
529     MarkActive();
530   } else {
531     MarkInactive();
532   }
533 
534   auto engine = static_cast<ScriptProcessorNodeEngine*>(mTrack->Engine());
535   engine->GetSharedBuffers()->NotifyNodeIsConnected(isConnected);
536 }
537 
538 }  // namespace mozilla::dom
539