| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010 Google Inc. All rights reserved. | 2 * Copyright (C) 2010 Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * | 7 * |
| 8 * 1. Redistributions of source code must retain the above copyright | 8 * 1. Redistributions of source code must retain the above copyright |
| 9 * notice, this list of conditions and the following disclaimer. | 9 * notice, this list of conditions and the following disclaimer. |
| 10 * 2. Redistributions in binary form must reproduce the above copyright | 10 * 2. Redistributions in binary form must reproduce the above copyright |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 50 const size_t TotalNumberOfResponses = 240; | 50 const size_t TotalNumberOfResponses = 240; |
| 51 | 51 |
| 52 // Number of frames in an individual impulse response. | 52 // Number of frames in an individual impulse response. |
| 53 const size_t ResponseFrameSize = 256; | 53 const size_t ResponseFrameSize = 256; |
| 54 | 54 |
| 55 // Sample-rate of the spatialization impulse responses as stored in the resource
file. | 55 // Sample-rate of the spatialization impulse responses as stored in the resource
file. |
| 56 // The impulse responses may be resampled to a different sample-rate (depending
on the audio hardware) when they are loaded. | 56 // The impulse responses may be resampled to a different sample-rate (depending
on the audio hardware) when they are loaded. |
| 57 const float ResponseSampleRate = 44100; | 57 const float ResponseSampleRate = 44100; |
| 58 | 58 |
| 59 #if USE(CONCATENATED_IMPULSE_RESPONSES) | 59 #if USE(CONCATENATED_IMPULSE_RESPONSES) |
| 60 |
| 61 // This table maps the index into the elevation table with the corresponding ang
le. See |
| 62 // https://bugs.webkit.org/show_bug.cgi?id=98294#c9 for the elevation angles and
their order in the |
| 63 // concatenated response. |
| 64 const int ElevationIndexTableSize = 10; |
| 65 const int ElevationIndexTable[ElevationIndexTableSize] = { |
| 66 0, 15, 30, 45, 60, 75, 90, 315, 330, 345 |
| 67 }; |
| 68 |
| 60 // Lazily load a concatenated HRTF database for given subject and store it in a | 69 // Lazily load a concatenated HRTF database for given subject and store it in a |
| 61 // local hash table to ensure quick efficient future retrievals. | 70 // local hash table to ensure quick efficient future retrievals. |
| 62 static PassRefPtr<AudioBus> getConcatenatedImpulseResponsesForSubject(const Stri
ng& subjectName) | 71 static PassRefPtr<AudioBus> getConcatenatedImpulseResponsesForSubject(const Stri
ng& subjectName) |
| 63 { | 72 { |
| 64 typedef HashMap<String, RefPtr<AudioBus> > AudioBusMap; | 73 typedef HashMap<String, RefPtr<AudioBus> > AudioBusMap; |
| 65 DEFINE_STATIC_LOCAL(AudioBusMap, audioBusMap, ()); | 74 DEFINE_STATIC_LOCAL(AudioBusMap, audioBusMap, ()); |
| 66 DEFINE_STATIC_LOCAL(Mutex, mutex, ()); | 75 DEFINE_STATIC_LOCAL(Mutex, mutex, ()); |
| 67 | 76 |
| 68 MutexLocker locker(mutex); | 77 MutexLocker locker(mutex); |
| 69 RefPtr<AudioBus> bus; | 78 RefPtr<AudioBus> bus; |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 113 // Note: the passed in subjectName is not a string passed in via JavaScript
or the web. | 122 // Note: the passed in subjectName is not a string passed in via JavaScript
or the web. |
| 114 // It's passed in as an internal ASCII identifier and is an implementation d
etail. | 123 // It's passed in as an internal ASCII identifier and is an implementation d
etail. |
| 115 int positiveElevation = elevation < 0 ? elevation + 360 : elevation; | 124 int positiveElevation = elevation < 0 ? elevation + 360 : elevation; |
| 116 | 125 |
| 117 #if USE(CONCATENATED_IMPULSE_RESPONSES) | 126 #if USE(CONCATENATED_IMPULSE_RESPONSES) |
| 118 RefPtr<AudioBus> bus(getConcatenatedImpulseResponsesForSubject(subjectName))
; | 127 RefPtr<AudioBus> bus(getConcatenatedImpulseResponsesForSubject(subjectName))
; |
| 119 | 128 |
| 120 if (!bus) | 129 if (!bus) |
| 121 return false; | 130 return false; |
| 122 | 131 |
| 123 int elevationIndex = positiveElevation / AzimuthSpacing; | 132 // Just sequentially search the table to find the correct index. |
| 124 if (positiveElevation > 90) | 133 int elevationIndex = -1; |
| 125 elevationIndex -= AzimuthSpacing; | 134 |
| 135 for (int k = 0; k < ElevationIndexTableSize; ++k) { |
| 136 if (ElevationIndexTable[k] == positiveElevation) { |
| 137 elevationIndex = k; |
| 138 break; |
| 139 } |
| 140 } |
| 141 |
| 142 bool isElevationIndexGood = (elevationIndex >= 0) && (elevationIndex < Eleva
tionIndexTableSize); |
| 143 ASSERT(isElevationIndexGood); |
| 144 if (!isElevationIndexGood) |
| 145 return false; |
| 126 | 146 |
| 127 // The concatenated impulse response is a bus containing all | 147 // The concatenated impulse response is a bus containing all |
| 128 // the elevations per azimuth, for all azimuths by increasing | 148 // the elevations per azimuth, for all azimuths by increasing |
| 129 // order. So for a given azimuth and elevation we need to compute | 149 // order. So for a given azimuth and elevation we need to compute |
| 130 // the index of the wanted audio frames in the concatenated table. | 150 // the index of the wanted audio frames in the concatenated table. |
| 131 unsigned index = ((azimuth / AzimuthSpacing) * HRTFDatabase::NumberOfRawElev
ations) + elevationIndex; | 151 unsigned index = ((azimuth / AzimuthSpacing) * HRTFDatabase::NumberOfRawElev
ations) + elevationIndex; |
| 132 bool isIndexGood = index < TotalNumberOfResponses; | 152 bool isIndexGood = index < TotalNumberOfResponses; |
| 133 ASSERT(isIndexGood); | 153 ASSERT(isIndexGood); |
| 134 if (!isIndexGood) | 154 if (!isIndexGood) |
| 135 return false; | 155 return false; |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 304 double frameDelay2R = m_kernelListR->at(azimuthIndex2)->frameDelay(); | 324 double frameDelay2R = m_kernelListR->at(azimuthIndex2)->frameDelay(); |
| 305 | 325 |
| 306 // Linearly interpolate delays. | 326 // Linearly interpolate delays. |
| 307 frameDelayL = (1.0 - azimuthBlend) * frameDelayL + azimuthBlend * frameDelay
2L; | 327 frameDelayL = (1.0 - azimuthBlend) * frameDelayL + azimuthBlend * frameDelay
2L; |
| 308 frameDelayR = (1.0 - azimuthBlend) * frameDelayR + azimuthBlend * frameDelay
2R; | 328 frameDelayR = (1.0 - azimuthBlend) * frameDelayR + azimuthBlend * frameDelay
2R; |
| 309 } | 329 } |
| 310 | 330 |
| 311 } // namespace blink | 331 } // namespace blink |
| 312 | 332 |
| 313 #endif // ENABLE(WEB_AUDIO) | 333 #endif // ENABLE(WEB_AUDIO) |
| OLD | NEW |