OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/audio/mac/audio_low_latency_input_mac.h" | 5 #include "media/audio/mac/audio_low_latency_input_mac.h" |
6 | 6 |
7 #include <CoreServices/CoreServices.h> | 7 #include <CoreServices/CoreServices.h> |
8 | 8 |
9 #include "base/basictypes.h" | 9 #include "base/basictypes.h" |
10 #include "base/logging.h" | 10 #include "base/logging.h" |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
57 format_.mBytesPerPacket = (format_.mBitsPerChannel * | 57 format_.mBytesPerPacket = (format_.mBitsPerChannel * |
58 input_params.channels()) / 8; | 58 input_params.channels()) / 8; |
59 format_.mBytesPerFrame = format_.mBytesPerPacket; | 59 format_.mBytesPerFrame = format_.mBytesPerPacket; |
60 format_.mReserved = 0; | 60 format_.mReserved = 0; |
61 | 61 |
62 DVLOG(1) << "Desired ouput format: " << format_; | 62 DVLOG(1) << "Desired ouput format: " << format_; |
63 | 63 |
64 // Set number of sample frames per callback used by the internal audio layer. | 64 // Set number of sample frames per callback used by the internal audio layer. |
65 // An internal FIFO is then utilized to adapt the internal size to the size | 65 // An internal FIFO is then utilized to adapt the internal size to the size |
66 // requested by the client. | 66 // requested by the client. |
67 // Note that we use the same native buffer size as for the output side here | |
68 // since the AUHAL implementation requires that both capture and render side | |
69 // use the same buffer size. See http://crbug.com/154352 for more details. | |
70 number_of_frames_ = output_params.frames_per_buffer(); | 67 number_of_frames_ = output_params.frames_per_buffer(); |
71 DVLOG(1) << "Size of data buffer in frames : " << number_of_frames_; | 68 DVLOG(1) << "Size of data buffer in frames : " << number_of_frames_; |
72 | 69 |
73 // Derive size (in bytes) of the buffers that we will render to. | 70 // Derive size (in bytes) of the buffers that we will render to. |
74 UInt32 data_byte_size = number_of_frames_ * format_.mBytesPerFrame; | 71 UInt32 data_byte_size = number_of_frames_ * format_.mBytesPerFrame; |
75 DVLOG(1) << "Size of data buffer in bytes : " << data_byte_size; | 72 DVLOG(1) << "Size of data buffer in bytes : " << data_byte_size; |
76 | 73 |
77 // Allocate AudioBuffers to be used as storage for the received audio. | 74 // Allocate AudioBuffers to be used as storage for the received audio. |
78 // The AudioBufferList structure works as a placeholder for the | 75 // The AudioBufferList structure works as a placeholder for the |
79 // AudioBuffer structure, which holds a pointer to the actual data buffer. | 76 // AudioBuffer structure, which holds a pointer to the actual data buffer. |
(...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
226 kAudioUnitScope_Output, | 223 kAudioUnitScope_Output, |
227 1, | 224 1, |
228 &format_, | 225 &format_, |
229 sizeof(format_)); | 226 sizeof(format_)); |
230 if (result) { | 227 if (result) { |
231 HandleError(result); | 228 HandleError(result); |
232 return false; | 229 return false; |
233 } | 230 } |
234 | 231 |
235 // Set the desired number of frames in the IO buffer (output scope). | 232 // Set the desired number of frames in the IO buffer (output scope). |
236 // WARNING: Setting this value changes the frame size for all audio units in | 233 // WARNING: Setting this value changes the frame size for all input and output |
no longer working on chromium
2014/04/17 10:25:46
ditto?
DaleCurtis
2014/04/17 20:50:29
Done.
| |
237 // the current process. It's imperative that the input and output frame sizes | 234 // audio units in the current process. As a result, the AURenderCallback must |
238 // be the same as the frames_per_buffer() returned by | 235 // be able to handle arbitrary buffer sizes and FIFO appropriately. |
239 // GetInputStreamParameters(). | 236 UInt32 buffer_size = 0; |
240 // TODO(henrika): Due to http://crrev.com/159666 this is currently not true | 237 UInt32 property_size = sizeof(buffer_size); |
241 // and should be fixed, a CHECK() should be added at that time. | 238 result = AudioUnitGetProperty(audio_unit_, |
242 result = AudioUnitSetProperty(audio_unit_, | |
243 kAudioDevicePropertyBufferFrameSize, | 239 kAudioDevicePropertyBufferFrameSize, |
244 kAudioUnitScope_Output, | 240 kAudioUnitScope_Output, |
245 1, | 241 1, |
246 &number_of_frames_, // size is set in the ctor | 242 &buffer_size, |
247 sizeof(number_of_frames_)); | 243 &property_size); |
248 if (result) { | 244 if (result != noErr) { |
249 HandleError(result); | 245 HandleError(result); |
250 return false; | 246 return false; |
251 } | 247 } |
252 | 248 |
249 // Only set the buffer size if we're the only active stream or the buffer size | |
250 // is lower than the current buffer size. | |
251 if (manager_->input_stream_count() == 1 || number_of_frames_ < buffer_size) { | |
252 buffer_size = number_of_frames_; | |
253 result = AudioUnitSetProperty(audio_unit_, | |
254 kAudioDevicePropertyBufferFrameSize, | |
255 kAudioUnitScope_Output, | |
256 1, | |
257 &buffer_size, | |
258 sizeof(buffer_size)); | |
259 if (result != noErr) { | |
260 HandleError(result); | |
261 return false; | |
262 } | |
263 } | |
264 | |
253 // Finally, initialize the audio unit and ensure that it is ready to render. | 265 // Finally, initialize the audio unit and ensure that it is ready to render. |
254 // Allocates memory according to the maximum number of audio frames | 266 // Allocates memory according to the maximum number of audio frames |
255 // it can produce in response to a single render call. | 267 // it can produce in response to a single render call. |
256 result = AudioUnitInitialize(audio_unit_); | 268 result = AudioUnitInitialize(audio_unit_); |
257 if (result) { | 269 if (result) { |
258 HandleError(result); | 270 HandleError(result); |
259 return false; | 271 return false; |
260 } | 272 } |
261 | 273 |
262 // The hardware latency is fixed and will not change during the call. | 274 // The hardware latency is fixed and will not change during the call. |
(...skipping 392 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
655 kAudioDevicePropertyScopeInput, | 667 kAudioDevicePropertyScopeInput, |
656 static_cast<UInt32>(channel) | 668 static_cast<UInt32>(channel) |
657 }; | 669 }; |
658 OSStatus result = AudioObjectIsPropertySettable(input_device_id_, | 670 OSStatus result = AudioObjectIsPropertySettable(input_device_id_, |
659 &property_address, | 671 &property_address, |
660 &is_settable); | 672 &is_settable); |
661 return (result == noErr) ? is_settable : false; | 673 return (result == noErr) ? is_settable : false; |
662 } | 674 } |
663 | 675 |
664 } // namespace media | 676 } // namespace media |
OLD | NEW |