1 /*
2  * Copyright (C) 2011, Google Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1.  Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2.  Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
14  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
16  * ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE
17  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
18  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
19  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
20  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
21  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
22  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
23  * DAMAGE.
24  */
25 
26 #ifndef THIRD_PARTY_BLINK_RENDERER_MODULES_WEBAUDIO_OFFLINE_AUDIO_DESTINATION_NODE_H_
27 #define THIRD_PARTY_BLINK_RENDERER_MODULES_WEBAUDIO_OFFLINE_AUDIO_DESTINATION_NODE_H_
28 
29 #include <memory>
30 #include "base/memory/scoped_refptr.h"
31 #include "base/memory/weak_ptr.h"
32 #include "third_party/blink/renderer/modules/webaudio/audio_buffer.h"
33 #include "third_party/blink/renderer/modules/webaudio/audio_destination_node.h"
34 #include "third_party/blink/renderer/modules/webaudio/offline_audio_context.h"
35 #include "third_party/blink/renderer/platform/scheduler/public/thread.h"
36 
37 namespace blink {
38 
39 class BaseAudioContext;
40 class AudioBus;
41 class OfflineAudioContext;
42 
43 class OfflineAudioDestinationHandler final : public AudioDestinationHandler {
44  public:
45   static scoped_refptr<OfflineAudioDestinationHandler> Create(
46       AudioNode&,
47       unsigned number_of_channels,
48       uint32_t frames_to_process,
49       float sample_rate);
50   ~OfflineAudioDestinationHandler() override;
51 
52   // AudioHandler
53   void Dispose() override;
54   void Initialize() override;
55   void Uninitialize() override;
56 
57   // AudioNode
TailTime()58   double TailTime() const override { return 0; }
LatencyTime()59   double LatencyTime() const override { return 0; }
60 
61   OfflineAudioContext* Context() const final;
62 
63   // AudioDestinationHandler
64   void StartRendering() override;
65   void StopRendering() override;
66   void Pause() override;
67   void Resume() override;
68   uint32_t MaxChannelCount() const override;
69 
70   void RestartRendering() override;
71 
SampleRate()72   double SampleRate() const override { return sample_rate_; }
73 
RenderQuantumFrames()74   size_t RenderQuantumFrames() const {
75     return audio_utilities::kRenderQuantumFrames;
76   }
77 
78   // This is called when rendering of the offline context is started
79   // which will save the rendered audio data in |render_target|.  This
80   // allows creation of the AudioBuffer when startRendering is called
81   // instead of when the OfflineAudioContext is created.
82   void InitializeOfflineRenderThread(AudioBuffer* render_target);
83 
NumberOfChannels()84   unsigned NumberOfChannels() const { return number_of_channels_; }
85 
RequiresTailProcessing()86   bool RequiresTailProcessing() const final { return false; }
87 
88  private:
89   OfflineAudioDestinationHandler(AudioNode&,
90                                  unsigned number_of_channels,
91                                  uint32_t frames_to_process,
92                                  float sample_rate);
93 
94   // Set up the rendering and start. After setting the context up, it will
95   // eventually call |doOfflineRendering|.
96   void StartOfflineRendering();
97 
98   // Suspend the rendering loop and notify the main thread to resolve the
99   // associated promise.
100   void SuspendOfflineRendering();
101 
102   // Start the rendering loop.
103   void DoOfflineRendering();
104 
105   // Finish the rendering loop and notify the main thread to resolve the
106   // promise with the rendered buffer.
107   void FinishOfflineRendering();
108 
109   // Suspend/completion callbacks for the main thread.
110   void NotifySuspend(size_t);
111   void NotifyComplete();
112 
113   // The offline version of render() method. If the rendering needs to be
114   // suspended after checking, this stops the rendering and returns true.
115   // Otherwise, it returns false after rendering one quantum.
116   bool RenderIfNotSuspended(AudioBus* source_bus,
117                             AudioBus* destination_bus,
118                             uint32_t number_of_frames);
119 
120   // Prepares a task runner for the rendering based on the operation mode
121   // (i.e. non-AudioWorklet or AudioWorklet). This is called when the
122   // rendering restarts such as context.resume() after context.suspend().
123   // The only possible transition is from the non-AudioWorklet mode to the
124   // AudioWorklet mode. Once the AudioWorklet mode is activated, the task runner
125   // from AudioWorkletThread will be used until the rendering is finished.
126   void PrepareTaskRunnerForRendering();
127 
128   // For cross-thread posting, this object uses two different targets.
129   // 1. rendering thread -> main thread: WeakPtr
130   //    When the main thread starts deleting this object, a task posted with
131   //    a WeakPtr from the rendering thread will be cancelled.
132   // 2. main thread -> rendering thread: scoped_refptr
133   //    |render_thread_| is owned by this object, so it is safe to target with
134   //    WrapRefCounted() instead of GetWeakPtr().
GetWeakPtr()135   base::WeakPtr<OfflineAudioDestinationHandler> GetWeakPtr() {
136     return weak_factory_.GetWeakPtr();
137   }
138 
139   // This AudioHandler renders into this SharedAudioBuffer.
140   std::unique_ptr<SharedAudioBuffer> shared_render_target_;
141   // Temporary AudioBus for each render quantum.
142   scoped_refptr<AudioBus> render_bus_;
143 
144   // These variables are for counting the number of frames for the current
145   // progress and the remaining frames to be processed.
146   size_t frames_processed_;
147   uint32_t frames_to_process_;
148 
149   // This flag is necessary to distinguish the state of the context between
150   // 'created' and 'suspended'. If this flag is false and the current state
151   // is 'suspended', it means the context is created and have not started yet.
152   bool is_rendering_started_;
153 
154   unsigned number_of_channels_;
155   float sample_rate_;
156 
157   // The rendering thread for the non-AudioWorklet mode. For the AudioWorklet
158   // node, AudioWorkletThread will drive the rendering.
159   std::unique_ptr<Thread> render_thread_;
160 
161   scoped_refptr<base::SingleThreadTaskRunner> render_thread_task_runner_;
162   scoped_refptr<base::SingleThreadTaskRunner> main_thread_task_runner_;
163 
164   base::WeakPtrFactory<OfflineAudioDestinationHandler> weak_factory_{this};
165 };
166 
167 class OfflineAudioDestinationNode final : public AudioDestinationNode {
168  public:
169   static OfflineAudioDestinationNode* Create(BaseAudioContext*,
170                                              unsigned number_of_channels,
171                                              uint32_t frames_to_process,
172                                              float sample_rate);
173 
174   OfflineAudioDestinationNode(BaseAudioContext&,
175                               unsigned number_of_channels,
176                               uint32_t frames_to_process,
177                               float sample_rate);
178 
DestinationBuffer()179   AudioBuffer* DestinationBuffer() const { return destination_buffer_; }
180 
SetDestinationBuffer(AudioBuffer * buffer)181   void SetDestinationBuffer(AudioBuffer* buffer) {
182     destination_buffer_ = buffer;
183   }
184 
185   void Trace(Visitor* visitor) const override;
186 
187  private:
188   Member<AudioBuffer> destination_buffer_;
189 };
190 
191 }  // namespace blink
192 
193 #endif  // THIRD_PARTY_BLINK_RENDERER_MODULES_WEBAUDIO_OFFLINE_AUDIO_DESTINATION_NODE_H_
194