1 /*
2  * Copyright (C) 2012, Google Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1.  Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2.  Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
14  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
16  * ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE
17  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
18  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
19  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
20  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
21  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
22  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
23  * DAMAGE.
24  */
25 
26 #include "third_party/blink/renderer/modules/webaudio/offline_audio_context.h"
27 
28 #include "base/metrics/histogram_functions.h"
29 #include "third_party/blink/public/platform/platform.h"
30 #include "third_party/blink/renderer/bindings/modules/v8/v8_offline_audio_context_options.h"
31 #include "third_party/blink/renderer/core/dom/document.h"
32 #include "third_party/blink/renderer/core/dom/dom_exception.h"
33 #include "third_party/blink/renderer/core/execution_context/execution_context.h"
34 #include "third_party/blink/renderer/modules/webaudio/audio_listener.h"
35 #include "third_party/blink/renderer/modules/webaudio/deferred_task_handler.h"
36 #include "third_party/blink/renderer/modules/webaudio/offline_audio_completion_event.h"
37 #include "third_party/blink/renderer/modules/webaudio/offline_audio_destination_node.h"
38 #include "third_party/blink/renderer/platform/audio/audio_utilities.h"
39 #include "third_party/blink/renderer/platform/bindings/exception_messages.h"
40 #include "third_party/blink/renderer/platform/bindings/exception_state.h"
41 #include "third_party/blink/renderer/platform/bindings/script_state.h"
42 #include "third_party/blink/renderer/platform/heap/heap.h"
43 #include "third_party/blink/renderer/platform/wtf/cross_thread_functional.h"
44 
45 namespace blink {
46 
Create(ExecutionContext * context,unsigned number_of_channels,unsigned number_of_frames,float sample_rate,ExceptionState & exception_state)47 OfflineAudioContext* OfflineAudioContext::Create(
48     ExecutionContext* context,
49     unsigned number_of_channels,
50     unsigned number_of_frames,
51     float sample_rate,
52     ExceptionState& exception_state) {
53   // FIXME: add support for workers.
54   auto* document = Document::DynamicFrom(context);
55   if (!document) {
56     exception_state.ThrowDOMException(DOMExceptionCode::kNotSupportedError,
57                                       "Workers are not supported.");
58     return nullptr;
59   }
60 
61   if (document->IsDetached()) {
62     exception_state.ThrowDOMException(
63         DOMExceptionCode::kNotSupportedError,
64         "Cannot create OfflineAudioContext on a detached document.");
65     return nullptr;
66   }
67 
68   if (!number_of_frames) {
69     exception_state.ThrowDOMException(
70         DOMExceptionCode::kNotSupportedError,
71         ExceptionMessages::IndexExceedsMinimumBound<unsigned>(
72             "number of frames", number_of_frames, 1));
73     return nullptr;
74   }
75 
76   if (number_of_channels == 0 ||
77       number_of_channels > BaseAudioContext::MaxNumberOfChannels()) {
78     exception_state.ThrowDOMException(
79         DOMExceptionCode::kNotSupportedError,
80         ExceptionMessages::IndexOutsideRange<unsigned>(
81             "number of channels", number_of_channels, 1,
82             ExceptionMessages::kInclusiveBound,
83             BaseAudioContext::MaxNumberOfChannels(),
84             ExceptionMessages::kInclusiveBound));
85     return nullptr;
86   }
87 
88   if (!audio_utilities::IsValidAudioBufferSampleRate(sample_rate)) {
89     exception_state.ThrowDOMException(
90         DOMExceptionCode::kNotSupportedError,
91         ExceptionMessages::IndexOutsideRange(
92             "sampleRate", sample_rate,
93             audio_utilities::MinAudioBufferSampleRate(),
94             ExceptionMessages::kInclusiveBound,
95             audio_utilities::MaxAudioBufferSampleRate(),
96             ExceptionMessages::kInclusiveBound));
97     return nullptr;
98   }
99 
100   OfflineAudioContext* audio_context =
101       MakeGarbageCollected<OfflineAudioContext>(document, number_of_channels,
102                                                 number_of_frames, sample_rate,
103                                                 exception_state);
104   audio_context->UpdateStateIfNeeded();
105 
106 #if DEBUG_AUDIONODE_REFERENCES
107   fprintf(stderr, "[%16p]: OfflineAudioContext::OfflineAudioContext()\n",
108           audio_context);
109 #endif
110   base::UmaHistogramSparse("WebAudio.OfflineAudioContext.ChannelCount",
111                            number_of_channels);
112   // Arbitrarly limit the maximum length to 1 million frames (about 20 sec
113   // at 48kHz).  The number of buckets is fairly arbitrary.
114   base::UmaHistogramCounts1M("WebAudio.OfflineAudioContext.Length",
115                              number_of_frames);
116   // The limits are the min and max AudioBuffer sample rates currently
117   // supported.  We use explicit values here instead of
118   // audio_utilities::minAudioBufferSampleRate() and
119   // audio_utilities::maxAudioBufferSampleRate().  The number of buckets is
120   // fairly arbitrary.
121   base::UmaHistogramCustomCounts(
122       "WebAudio.OfflineAudioContext.SampleRate384kHz", sample_rate, 3000,
123       384000, 50);
124 
125   return audio_context;
126 }
127 
Create(ExecutionContext * context,const OfflineAudioContextOptions * options,ExceptionState & exception_state)128 OfflineAudioContext* OfflineAudioContext::Create(
129     ExecutionContext* context,
130     const OfflineAudioContextOptions* options,
131     ExceptionState& exception_state) {
132   OfflineAudioContext* offline_context =
133       Create(context, options->numberOfChannels(), options->length(),
134              options->sampleRate(), exception_state);
135 
136   return offline_context;
137 }
138 
OfflineAudioContext(Document * document,unsigned number_of_channels,uint32_t number_of_frames,float sample_rate,ExceptionState & exception_state)139 OfflineAudioContext::OfflineAudioContext(Document* document,
140                                          unsigned number_of_channels,
141                                          uint32_t number_of_frames,
142                                          float sample_rate,
143                                          ExceptionState& exception_state)
144     : BaseAudioContext(document, kOfflineContext),
145       is_rendering_started_(false),
146       total_render_frames_(number_of_frames) {
147   destination_node_ = OfflineAudioDestinationNode::Create(
148       this, number_of_channels, number_of_frames, sample_rate);
149   Initialize();
150 }
151 
~OfflineAudioContext()152 OfflineAudioContext::~OfflineAudioContext() {
153 #if DEBUG_AUDIONODE_REFERENCES
154   fprintf(stderr, "[%16p]: OfflineAudioContext::~OfflineAudioContext()\n",
155           this);
156 #endif
157 }
158 
Trace(Visitor * visitor)159 void OfflineAudioContext::Trace(Visitor* visitor) {
160   visitor->Trace(complete_resolver_);
161   visitor->Trace(scheduled_suspends_);
162   BaseAudioContext::Trace(visitor);
163 }
164 
startOfflineRendering(ScriptState * script_state,ExceptionState & exception_state)165 ScriptPromise OfflineAudioContext::startOfflineRendering(
166     ScriptState* script_state,
167     ExceptionState& exception_state) {
168   DCHECK(IsMainThread());
169 
170   // Calling close() on an OfflineAudioContext is not supported/allowed,
171   // but it might well have been stopped by its execution context.
172   //
173   // See: crbug.com/435867
174   if (IsContextClosed()) {
175     exception_state.ThrowDOMException(
176         DOMExceptionCode::kInvalidStateError,
177         "cannot call startRendering on an OfflineAudioContext in a stopped "
178         "state.");
179     return ScriptPromise();
180   }
181 
182   // If the context is not in the suspended state (i.e. running), reject the
183   // promise.
184   if (ContextState() != AudioContextState::kSuspended) {
185     exception_state.ThrowDOMException(
186         DOMExceptionCode::kInvalidStateError,
187         "cannot startRendering when an OfflineAudioContext is " + state());
188     return ScriptPromise();
189   }
190 
191   // Can't call startRendering more than once.  Return a rejected promise now.
192   if (is_rendering_started_) {
193     exception_state.ThrowDOMException(
194         DOMExceptionCode::kInvalidStateError,
195         "cannot call startRendering more than once");
196     return ScriptPromise();
197   }
198 
199   DCHECK(!is_rendering_started_);
200 
201   complete_resolver_ =
202       MakeGarbageCollected<ScriptPromiseResolver>(script_state);
203 
204   // Allocate the AudioBuffer to hold the rendered result.
205   float sample_rate = DestinationHandler().SampleRate();
206   unsigned number_of_channels = DestinationHandler().NumberOfChannels();
207 
208   AudioBuffer* render_target = AudioBuffer::CreateUninitialized(
209       number_of_channels, total_render_frames_, sample_rate);
210 
211   if (!render_target) {
212     exception_state.ThrowDOMException(
213         DOMExceptionCode::kNotSupportedError,
214         "startRendering failed to create AudioBuffer(" +
215             String::Number(number_of_channels) + ", " +
216             String::Number(total_render_frames_) + ", " +
217             String::Number(sample_rate) + ")");
218     return ScriptPromise();
219   }
220 
221   // Start rendering and return the promise.
222   is_rendering_started_ = true;
223   SetContextState(kRunning);
224   static_cast<OfflineAudioDestinationNode*>(destination())
225       ->SetDestinationBuffer(render_target);
226   DestinationHandler().InitializeOfflineRenderThread(render_target);
227   DestinationHandler().StartRendering();
228 
229   return complete_resolver_->Promise();
230 }
231 
suspendContext(ScriptState * script_state,double when)232 ScriptPromise OfflineAudioContext::suspendContext(ScriptState* script_state,
233                                                   double when) {
234   DCHECK(IsMainThread());
235 
236   auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
237   ScriptPromise promise = resolver->Promise();
238 
239   // If the rendering is finished, reject the promise.
240   if (ContextState() == AudioContextState::kClosed) {
241     resolver->Reject(MakeGarbageCollected<DOMException>(
242         DOMExceptionCode::kInvalidStateError,
243         "the rendering is already finished"));
244     return promise;
245   }
246 
247   // The specified suspend time is negative; reject the promise.
248   if (when < 0) {
249     resolver->Reject(MakeGarbageCollected<DOMException>(
250         DOMExceptionCode::kInvalidStateError,
251         "negative suspend time (" + String::Number(when) + ") is not allowed"));
252     return promise;
253   }
254 
255   // The suspend time should be earlier than the total render frame. If the
256   // requested suspension time is equal to the total render frame, the promise
257   // will be rejected.
258   double total_render_duration = total_render_frames_ / sampleRate();
259   if (total_render_duration <= when) {
260     resolver->Reject(MakeGarbageCollected<DOMException>(
261         DOMExceptionCode::kInvalidStateError,
262         "cannot schedule a suspend at " +
263             String::NumberToStringECMAScript(when) +
264             " seconds because it is greater than "
265             "or equal to the total "
266             "render duration of " +
267             String::Number(total_render_frames_) + " frames (" +
268             String::NumberToStringECMAScript(total_render_duration) +
269             " seconds)"));
270     return promise;
271   }
272 
273   // Find the sample frame and round up to the nearest render quantum
274   // boundary.  This assumes the render quantum is a power of two.
275   size_t frame = when * sampleRate();
276   frame = audio_utilities::kRenderQuantumFrames *
277           ((frame + audio_utilities::kRenderQuantumFrames - 1) /
278            audio_utilities::kRenderQuantumFrames);
279 
280   // The specified suspend time is in the past; reject the promise.
281   if (frame < CurrentSampleFrame()) {
282     size_t current_frame_clamped =
283         std::min(CurrentSampleFrame(), static_cast<size_t>(length()));
284     double current_time_clamped =
285         std::min(currentTime(), length() / static_cast<double>(sampleRate()));
286     resolver->Reject(MakeGarbageCollected<DOMException>(
287         DOMExceptionCode::kInvalidStateError,
288         "suspend(" + String::Number(when) + ") failed to suspend at frame " +
289             String::Number(frame) + " because it is earlier than the current " +
290             "frame of " + String::Number(current_frame_clamped) + " (" +
291             String::Number(current_time_clamped) + " seconds)"));
292     return promise;
293   }
294 
295   // Wait until the suspend map is available for the insertion. Here we should
296   // use GraphAutoLocker because it locks the graph from the main thread.
297   GraphAutoLocker locker(this);
298 
299   // If there is a duplicate suspension at the same quantized frame,
300   // reject the promise.
301   if (scheduled_suspends_.Contains(frame)) {
302     resolver->Reject(MakeGarbageCollected<DOMException>(
303         DOMExceptionCode::kInvalidStateError,
304         "cannot schedule more than one suspend at frame " +
305             String::Number(frame) + " (" + String::Number(when) + " seconds)"));
306     return promise;
307   }
308 
309   scheduled_suspends_.insert(frame, resolver);
310 
311   return promise;
312 }
313 
resumeContext(ScriptState * script_state)314 ScriptPromise OfflineAudioContext::resumeContext(ScriptState* script_state) {
315   DCHECK(IsMainThread());
316 
317   auto* resolver = MakeGarbageCollected<ScriptPromiseResolver>(script_state);
318   ScriptPromise promise = resolver->Promise();
319 
320   // If the rendering has not started, reject the promise.
321   if (!is_rendering_started_) {
322     resolver->Reject(MakeGarbageCollected<DOMException>(
323         DOMExceptionCode::kInvalidStateError,
324         "cannot resume an offline context that has not started"));
325     return promise;
326   }
327 
328   // If the context is in a closed state or it really is closed (cleared),
329   // reject the promise.
330   if (IsContextClosed()) {
331     resolver->Reject(MakeGarbageCollected<DOMException>(
332         DOMExceptionCode::kInvalidStateError,
333         "cannot resume a closed offline context"));
334     return promise;
335   }
336 
337   // If the context is already running, resolve the promise without altering
338   // the current state or starting the rendering loop.
339   if (ContextState() == AudioContextState::kRunning) {
340     resolver->Resolve();
341     return promise;
342   }
343 
344   DCHECK_EQ(ContextState(), AudioContextState::kSuspended);
345 
346   // If the context is suspended, resume rendering by setting the state to
347   // "Running". and calling startRendering(). Note that resuming is possible
348   // only after the rendering started.
349   SetContextState(kRunning);
350   DestinationHandler().StartRendering();
351 
352   // Resolve the promise immediately.
353   resolver->Resolve();
354 
355   return promise;
356 }
357 
FireCompletionEvent()358 void OfflineAudioContext::FireCompletionEvent() {
359   DCHECK(IsMainThread());
360 
361   // Context is finished, so remove any tail processing nodes; there's nowhere
362   // for the output to go.
363   GetDeferredTaskHandler().FinishTailProcessing();
364 
365   // We set the state to closed here so that the oncomplete event handler sees
366   // that the context has been closed.
367   SetContextState(kClosed);
368 
369   // Avoid firing the event if the document has already gone away.
370   if (GetExecutionContext()) {
371     AudioBuffer* rendered_buffer =
372         static_cast<OfflineAudioDestinationNode*>(destination())
373             ->DestinationBuffer();
374     DCHECK(rendered_buffer);
375     if (!rendered_buffer)
376       return;
377 
378     // Call the offline rendering completion event listener and resolve the
379     // promise too.
380     DispatchEvent(*OfflineAudioCompletionEvent::Create(rendered_buffer));
381     complete_resolver_->Resolve(rendered_buffer);
382   } else {
383     // The resolver should be rejected when the execution context is gone.
384     complete_resolver_->Reject(MakeGarbageCollected<DOMException>(
385         DOMExceptionCode::kInvalidStateError,
386         "the execution context does not exist"));
387   }
388 
389   is_rendering_started_ = false;
390 
391   PerformCleanupOnMainThread();
392 }
393 
HandlePreRenderTasks(const AudioIOPosition * output_position,const AudioCallbackMetric * metric)394 bool OfflineAudioContext::HandlePreRenderTasks(
395     const AudioIOPosition* output_position,
396     const AudioCallbackMetric* metric) {
397   // TODO(hongchan, rtoy): passing |nullptr| as an argument is not a good
398   // pattern. Consider rewriting this method/interface.
399   DCHECK_EQ(output_position, nullptr);
400   DCHECK_EQ(metric, nullptr);
401 
402   DCHECK(IsAudioThread());
403 
404   // OfflineGraphAutoLocker here locks the audio graph for this scope. Note
405   // that this locker does not use tryLock() inside because the timing of
406   // suspension MUST NOT be delayed.
407   OfflineGraphAutoLocker locker(this);
408 
409   // Update the dirty state of the listener.
410   listener()->UpdateState();
411 
412   GetDeferredTaskHandler().HandleDeferredTasks();
413   HandleStoppableSourceNodes();
414 
415   return ShouldSuspend();
416 }
417 
HandlePostRenderTasks()418 void OfflineAudioContext::HandlePostRenderTasks() {
419   DCHECK(IsAudioThread());
420 
421   // OfflineGraphAutoLocker here locks the audio graph for the same reason
422   // above in |handlePreOfflineRenderTasks|.
423   {
424     OfflineGraphAutoLocker locker(this);
425 
426     GetDeferredTaskHandler().BreakConnections();
427     GetDeferredTaskHandler().HandleDeferredTasks();
428     GetDeferredTaskHandler().RequestToDeleteHandlersOnMainThread();
429   }
430 }
431 
DestinationHandler()432 OfflineAudioDestinationHandler& OfflineAudioContext::DestinationHandler() {
433   return static_cast<OfflineAudioDestinationHandler&>(
434       destination()->GetAudioDestinationHandler());
435 }
436 
ResolveSuspendOnMainThread(size_t frame)437 void OfflineAudioContext::ResolveSuspendOnMainThread(size_t frame) {
438   DCHECK(IsMainThread());
439 
440   // Suspend the context first. This will fire onstatechange event.
441   SetContextState(kSuspended);
442 
443   // Wait until the suspend map is available for the removal.
444   GraphAutoLocker locker(this);
445 
446   // If the context is going away, m_scheduledSuspends could have had all its
447   // entries removed.  Check for that here.
448   if (scheduled_suspends_.size()) {
449     // |frame| must exist in the map.
450     DCHECK(scheduled_suspends_.Contains(frame));
451 
452     SuspendMap::iterator it = scheduled_suspends_.find(frame);
453     it->value->Resolve();
454 
455     scheduled_suspends_.erase(it);
456   }
457 }
458 
RejectPendingResolvers()459 void OfflineAudioContext::RejectPendingResolvers() {
460   DCHECK(IsMainThread());
461 
462   // Wait until the suspend map is available for removal.
463   GraphAutoLocker locker(this);
464 
465   // Offline context is going away so reject any promises that are still
466   // pending.
467 
468   for (auto& pending_suspend_resolver : scheduled_suspends_) {
469     pending_suspend_resolver.value->Reject(MakeGarbageCollected<DOMException>(
470         DOMExceptionCode::kInvalidStateError, "Audio context is going away"));
471   }
472 
473   scheduled_suspends_.clear();
474   DCHECK_EQ(resume_resolvers_.size(), 0u);
475 
476   RejectPendingDecodeAudioDataResolvers();
477 }
478 
IsPullingAudioGraph() const479 bool OfflineAudioContext::IsPullingAudioGraph() const {
480   DCHECK(IsMainThread());
481 
482   // For an offline context, we're rendering only while the context is running.
483   // Unlike an AudioContext, there's no audio device that keeps pulling on graph
484   // after the context has finished rendering.
485   return ContextState() == BaseAudioContext::kRunning;
486 }
487 
ShouldSuspend()488 bool OfflineAudioContext::ShouldSuspend() {
489   DCHECK(IsAudioThread());
490 
491   // Note that the GraphLock is required before this check. Since this needs
492   // to run on the audio thread, OfflineGraphAutoLocker must be used.
493   if (scheduled_suspends_.Contains(CurrentSampleFrame()))
494     return true;
495 
496   return false;
497 }
498 
HasPendingActivity() const499 bool OfflineAudioContext::HasPendingActivity() const {
500   return is_rendering_started_;
501 }
502 
503 }  // namespace blink
504