1 /*
2  * Copyright (C) 2010 Google Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1.  Redistributions of source code must retain the above copyright
9  *     notice, this list of conditions and the following disclaimer.
10  * 2.  Redistributions in binary form must reproduce the above copyright
11  *     notice, this list of conditions and the following disclaimer in the
12  *     documentation and/or other materials provided with the distribution.
13  * 3.  Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14  *     its contributors may be used to endorse or promote products derived
15  *     from this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20  * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24  * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include "config.h"
30 
31 #if ENABLE(WEB_AUDIO)
32 
33 #include "HRTFElevation.h"
34 
35 #include "AudioBus.h"
36 #include "AudioFileReader.h"
37 #include "Biquad.h"
38 #include "FFTFrame.h"
39 #include "HRTFPanner.h"
40 #include <algorithm>
41 #include <math.h>
42 #include <wtf/OwnPtr.h>
43 
44 using namespace std;
45 
46 namespace WebCore {
47 
48 const unsigned HRTFElevation::AzimuthSpacing = 15;
49 const unsigned HRTFElevation::NumberOfRawAzimuths = 360 / AzimuthSpacing;
50 const unsigned HRTFElevation::InterpolationFactor = 8;
51 const unsigned HRTFElevation::NumberOfTotalAzimuths = NumberOfRawAzimuths * InterpolationFactor;
52 
53 // Takes advantage of the symmetry and creates a composite version of the two measured versions.  For example, we have both azimuth 30 and -30 degrees
54 // where the roles of left and right ears are reversed with respect to each other.
calculateSymmetricKernelsForAzimuthElevation(int azimuth,int elevation,double sampleRate,const String & subjectName,RefPtr<HRTFKernel> & kernelL,RefPtr<HRTFKernel> & kernelR)55 bool HRTFElevation::calculateSymmetricKernelsForAzimuthElevation(int azimuth, int elevation, double sampleRate, const String& subjectName,
56                                                                  RefPtr<HRTFKernel>& kernelL, RefPtr<HRTFKernel>& kernelR)
57 {
58     RefPtr<HRTFKernel> kernelL1;
59     RefPtr<HRTFKernel> kernelR1;
60     bool success = calculateKernelsForAzimuthElevation(azimuth, elevation, sampleRate, subjectName, kernelL1, kernelR1);
61     if (!success)
62         return false;
63 
64     // And symmetric version
65     int symmetricAzimuth = !azimuth ? 0 : 360 - azimuth;
66 
67     RefPtr<HRTFKernel> kernelL2;
68     RefPtr<HRTFKernel> kernelR2;
69     success = calculateKernelsForAzimuthElevation(symmetricAzimuth, elevation, sampleRate, subjectName, kernelL2, kernelR2);
70     if (!success)
71         return false;
72 
73     // Notice L/R reversal in symmetric version.
74     kernelL = HRTFKernel::createInterpolatedKernel(kernelL1.get(), kernelR2.get(), 0.5);
75     kernelR = HRTFKernel::createInterpolatedKernel(kernelR1.get(), kernelL2.get(), 0.5);
76 
77     return true;
78 }
79 
calculateKernelsForAzimuthElevation(int azimuth,int elevation,double sampleRate,const String & subjectName,RefPtr<HRTFKernel> & kernelL,RefPtr<HRTFKernel> & kernelR)80 bool HRTFElevation::calculateKernelsForAzimuthElevation(int azimuth, int elevation, double sampleRate, const String& subjectName,
81                                                         RefPtr<HRTFKernel>& kernelL, RefPtr<HRTFKernel>& kernelR)
82 {
83     // Valid values for azimuth are 0 -> 345 in 15 degree increments.
84     // Valid values for elevation are -45 -> +90 in 15 degree increments.
85 
86     bool isAzimuthGood = azimuth >= 0 && azimuth <= 345 && (azimuth / 15) * 15 == azimuth;
87     ASSERT(isAzimuthGood);
88     if (!isAzimuthGood)
89         return false;
90 
91     bool isElevationGood = elevation >= -45 && elevation <= 90 && (elevation / 15) * 15 == elevation;
92     ASSERT(isElevationGood);
93     if (!isElevationGood)
94         return false;
95 
96     // Construct the resource name from the subject name, azimuth, and elevation, for example:
97     // "IRC_Composite_C_R0195_T015_P000"
98     // Note: the passed in subjectName is not a string passed in via JavaScript or the web.
99     // It's passed in as an internal ASCII identifier and is an implementation detail.
100     int positiveElevation = elevation < 0 ? elevation + 360 : elevation;
101     String resourceName = String::format("IRC_%s_C_R0195_T%03d_P%03d", subjectName.utf8().data(), azimuth, positiveElevation);
102 
103     OwnPtr<AudioBus> impulseResponse(AudioBus::loadPlatformResource(resourceName.utf8().data(), sampleRate));
104 
105     ASSERT(impulseResponse.get());
106     if (!impulseResponse.get())
107         return false;
108 
109     size_t responseLength = impulseResponse->length();
110     size_t expectedLength = static_cast<size_t>(256 * (sampleRate / 44100.0));
111 
112     // Check number of channels and length.  For now these are fixed and known.
113     bool isBusGood = responseLength == expectedLength && impulseResponse->numberOfChannels() == 2;
114     ASSERT(isBusGood);
115     if (!isBusGood)
116         return false;
117 
118     AudioChannel* leftEarImpulseResponse = impulseResponse->channelByType(AudioBus::ChannelLeft);
119     AudioChannel* rightEarImpulseResponse = impulseResponse->channelByType(AudioBus::ChannelRight);
120 
121     // Note that depending on the fftSize returned by the panner, we may be truncating the impulse response we just loaded in.
122     const size_t fftSize = HRTFPanner::fftSizeForSampleRate(sampleRate);
123     kernelL = HRTFKernel::create(leftEarImpulseResponse, fftSize, sampleRate, true);
124     kernelR = HRTFKernel::create(rightEarImpulseResponse, fftSize, sampleRate, true);
125 
126     return true;
127 }
128 
129 // The range of elevations for the IRCAM impulse responses varies depending on azimuth, but the minimum elevation appears to always be -45.
130 //
131 // Here's how it goes:
132 static int maxElevations[] = {
133         //  Azimuth
134         //
135     90, // 0
136     45, // 15
137     60, // 30
138     45, // 45
139     75, // 60
140     45, // 75
141     60, // 90
142     45, // 105
143     75, // 120
144     45, // 135
145     60, // 150
146     45, // 165
147     75, // 180
148     45, // 195
149     60, // 210
150     45, // 225
151     75, // 240
152     45, // 255
153     60, // 270
154     45, // 285
155     75, // 300
156     45, // 315
157     60, // 330
158     45 //  345
159 };
160 
createForSubject(const String & subjectName,int elevation,double sampleRate)161 PassOwnPtr<HRTFElevation> HRTFElevation::createForSubject(const String& subjectName, int elevation, double sampleRate)
162 {
163     bool isElevationGood = elevation >= -45 && elevation <= 90 && (elevation / 15) * 15 == elevation;
164     ASSERT(isElevationGood);
165     if (!isElevationGood)
166         return 0;
167 
168     OwnPtr<HRTFKernelList> kernelListL = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
169     OwnPtr<HRTFKernelList> kernelListR = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
170 
171     // Load convolution kernels from HRTF files.
172     int interpolatedIndex = 0;
173     for (unsigned rawIndex = 0; rawIndex < NumberOfRawAzimuths; ++rawIndex) {
174         // Don't let elevation exceed maximum for this azimuth.
175         int maxElevation = maxElevations[rawIndex];
176         int actualElevation = min(elevation, maxElevation);
177 
178         bool success = calculateKernelsForAzimuthElevation(rawIndex * AzimuthSpacing, actualElevation, sampleRate, subjectName, kernelListL->at(interpolatedIndex), kernelListR->at(interpolatedIndex));
179         if (!success)
180             return 0;
181 
182         interpolatedIndex += InterpolationFactor;
183     }
184 
185     // Now go back and interpolate intermediate azimuth values.
186     for (unsigned i = 0; i < NumberOfTotalAzimuths; i += InterpolationFactor) {
187         int j = (i + InterpolationFactor) % NumberOfTotalAzimuths;
188 
189         // Create the interpolated convolution kernels and delays.
190         for (unsigned jj = 1; jj < InterpolationFactor; ++jj) {
191             double x = double(jj) / double(InterpolationFactor); // interpolate from 0 -> 1
192 
193             (*kernelListL)[i + jj] = HRTFKernel::createInterpolatedKernel(kernelListL->at(i).get(), kernelListL->at(j).get(), x);
194             (*kernelListR)[i + jj] = HRTFKernel::createInterpolatedKernel(kernelListR->at(i).get(), kernelListR->at(j).get(), x);
195         }
196     }
197 
198     OwnPtr<HRTFElevation> hrtfElevation = adoptPtr(new HRTFElevation(kernelListL.release(), kernelListR.release(), elevation, sampleRate));
199     return hrtfElevation.release();
200 }
201 
createByInterpolatingSlices(HRTFElevation * hrtfElevation1,HRTFElevation * hrtfElevation2,double x,double sampleRate)202 PassOwnPtr<HRTFElevation> HRTFElevation::createByInterpolatingSlices(HRTFElevation* hrtfElevation1, HRTFElevation* hrtfElevation2, double x, double sampleRate)
203 {
204     ASSERT(hrtfElevation1 && hrtfElevation2);
205     if (!hrtfElevation1 || !hrtfElevation2)
206         return 0;
207 
208     ASSERT(x >= 0.0 && x < 1.0);
209 
210     OwnPtr<HRTFKernelList> kernelListL = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
211     OwnPtr<HRTFKernelList> kernelListR = adoptPtr(new HRTFKernelList(NumberOfTotalAzimuths));
212 
213     HRTFKernelList* kernelListL1 = hrtfElevation1->kernelListL();
214     HRTFKernelList* kernelListR1 = hrtfElevation1->kernelListR();
215     HRTFKernelList* kernelListL2 = hrtfElevation2->kernelListL();
216     HRTFKernelList* kernelListR2 = hrtfElevation2->kernelListR();
217 
218     // Interpolate kernels of corresponding azimuths of the two elevations.
219     for (unsigned i = 0; i < NumberOfTotalAzimuths; ++i) {
220         (*kernelListL)[i] = HRTFKernel::createInterpolatedKernel(kernelListL1->at(i).get(), kernelListL2->at(i).get(), x);
221         (*kernelListR)[i] = HRTFKernel::createInterpolatedKernel(kernelListR1->at(i).get(), kernelListR2->at(i).get(), x);
222     }
223 
224     // Interpolate elevation angle.
225     double angle = (1.0 - x) * hrtfElevation1->elevationAngle() + x * hrtfElevation2->elevationAngle();
226 
227     OwnPtr<HRTFElevation> hrtfElevation = adoptPtr(new HRTFElevation(kernelListL.release(), kernelListR.release(), static_cast<int>(angle), sampleRate));
228     return hrtfElevation.release();
229 }
230 
getKernelsFromAzimuth(double azimuthBlend,unsigned azimuthIndex,HRTFKernel * & kernelL,HRTFKernel * & kernelR,double & frameDelayL,double & frameDelayR)231 void HRTFElevation::getKernelsFromAzimuth(double azimuthBlend, unsigned azimuthIndex, HRTFKernel* &kernelL, HRTFKernel* &kernelR, double& frameDelayL, double& frameDelayR)
232 {
233     bool checkAzimuthBlend = azimuthBlend >= 0.0 && azimuthBlend < 1.0;
234     ASSERT(checkAzimuthBlend);
235     if (!checkAzimuthBlend)
236         azimuthBlend = 0.0;
237 
238     unsigned numKernels = m_kernelListL->size();
239 
240     bool isIndexGood = azimuthIndex < numKernels;
241     ASSERT(isIndexGood);
242     if (!isIndexGood) {
243         kernelL = 0;
244         kernelR = 0;
245         return;
246     }
247 
248     // Return the left and right kernels.
249     kernelL = m_kernelListL->at(azimuthIndex).get();
250     kernelR = m_kernelListR->at(azimuthIndex).get();
251 
252     frameDelayL = m_kernelListL->at(azimuthIndex)->frameDelay();
253     frameDelayR = m_kernelListR->at(azimuthIndex)->frameDelay();
254 
255     int azimuthIndex2 = (azimuthIndex + 1) % numKernels;
256     double frameDelay2L = m_kernelListL->at(azimuthIndex2)->frameDelay();
257     double frameDelay2R = m_kernelListR->at(azimuthIndex2)->frameDelay();
258 
259     // Linearly interpolate delays.
260     frameDelayL = (1.0 - azimuthBlend) * frameDelayL + azimuthBlend * frameDelay2L;
261     frameDelayR = (1.0 - azimuthBlend) * frameDelayR + azimuthBlend * frameDelay2R;
262 }
263 
264 } // namespace WebCore
265 
266 #endif // ENABLE(WEB_AUDIO)
267