| Index: media/test/audio-hang.cc
|
| diff --git a/media/test/audio-hang.cc b/media/test/audio-hang.cc
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..95b1f49277c321a91056a135e2feefab096f6841
|
| --- /dev/null
|
| +++ b/media/test/audio-hang.cc
|
| @@ -0,0 +1,259 @@
|
| +//
|
| +// Minimized test case for audio playback issue where AudioUnits are started
|
| +// "successfully" during system resume, but never receive AURenderCallbacks.
|
| +//
|
| +// Date: Fri, Nov 15, 2013
|
| +// Author: Dale Curtis <dalecurtis@google.com>
|
| +// Original Chrome issue: http://crbug.com/160920
|
| +//
|
| +// Code based on Technical Note TN2091:
|
| +// https://developer.apple.com/library/mac/technotes/tn2091/_index.html
|
| +//
|
| +// Compiles with the following command line:
|
| +// clang -framework CoreAudio -framework AudioUnit -lstdc++ audio-hang.cc
|
| +//
|
| +
|
| +#include <cstring>
|
| +#include <pthread.h>
|
| +
|
| +#include <AudioUnit/AudioUnit.h>
|
| +#include <CoreAudio/CoreAudio.h>
|
| +#include <CoreServices/CoreServices.h>
|
| +
|
| +// Create an AUHAL based AudioUnit.
|
| +static AudioUnit CreateAudioUnit() {
|
| + AudioComponentDescription desc = {
|
| + kAudioUnitType_Output,
|
| + kAudioUnitSubType_HALOutput,
|
| + kAudioUnitManufacturer_Apple,
|
| + 0,
|
| + 0
|
| + };
|
| +
|
| + AudioComponent comp = AudioComponentFindNext(0, &desc);
|
| + assert(comp);
|
| +
|
| + AudioUnit audio_unit;
|
| + OSStatus result = AudioComponentInstanceNew(comp, &audio_unit);
|
| + assert(result == noErr);
|
| +
|
| + return audio_unit;
|
| +}
|
| +
|
| +// Configure an AudioUnit to use the default output device.
|
| +static void ConfigureAudioUnitForDefaultDevice(AudioUnit audio_unit) {
|
| + const AudioObjectPropertyAddress property_address = {
|
| + kAudioHardwarePropertyDefaultOutputDevice,
|
| + kAudioObjectPropertyScopeGlobal,
|
| + kAudioObjectPropertyElementMaster
|
| + };
|
| +
|
| + AudioDeviceID device_id = 0;
|
| + UInt32 size = sizeof(device_id);
|
| + OSStatus result = AudioObjectGetPropertyData(
|
| + kAudioObjectSystemObject, &property_address, 0, 0, &size, &device_id);
|
| + assert(result == kAudioHardwareNoError && device_id != kAudioDeviceUnknown);
|
| +
|
| + result = AudioUnitSetProperty(audio_unit,
|
| + kAudioOutputUnitProperty_CurrentDevice,
|
| + kAudioUnitScope_Global,
|
| + 0,
|
| + &device_id,
|
| + sizeof(device_id));
|
| + assert(result == noErr);
|
| +}
|
| +
|
| +// Configure an AudioUnit for output only.
|
| +static void ConfigureAudioUnitForOutputOnly(AudioUnit audio_unit) {
|
| + // Disable input.
|
| + UInt32 enabled = 0;
|
| + OSStatus result = AudioUnitSetProperty(audio_unit,
|
| + kAudioOutputUnitProperty_EnableIO,
|
| + kAudioUnitScope_Input,
|
| + 1,
|
| + &enabled,
|
| + sizeof(enabled));
|
| + assert (result == noErr);
|
| +
|
| + // Enable output.
|
| + enabled = 1;
|
| + result = AudioUnitSetProperty(audio_unit,
|
| + kAudioOutputUnitProperty_EnableIO,
|
| + kAudioUnitScope_Output,
|
| + 0,
|
| + &enabled,
|
| + sizeof(enabled));
|
| + assert (result == noErr);
|
| +}
|
| +
|
| +// Configure an AudioUnit for float planar w/ native channel count and sample
|
| +// rate.
|
| +static void ConfigureAudioUnitStreamFormat(AudioUnit audio_unit) {
|
| + AudioStreamBasicDescription device_format = {0};
|
| + UInt32 size = sizeof(device_format);
|
| + OSStatus result = AudioUnitGetProperty(audio_unit,
|
| + kAudioUnitProperty_StreamFormat,
|
| + kAudioUnitScope_Input,
|
| + 0,
|
| + &device_format,
|
| + &size);
|
| + assert(result == noErr);
|
| +
|
| + AudioStreamBasicDescription desired_format = {
|
| + device_format.mSampleRate,
|
| + kAudioFormatLinearPCM,
|
| + kAudioFormatFlagsNativeFloatPacked | kLinearPCMFormatFlagIsNonInterleaved,
|
| + sizeof(Float32),
|
| + 1,
|
| + sizeof(Float32),
|
| + device_format.mChannelsPerFrame,
|
| + sizeof(Float32) * 8,
|
| + 0
|
| + };
|
| +
|
| + result = AudioUnitSetProperty(audio_unit,
|
| + kAudioUnitProperty_StreamFormat,
|
| + kAudioUnitScope_Input,
|
| + 0,
|
| + &desired_format,
|
| + size);
|
| + assert(result == noErr);
|
| +}
|
| +
|
| +class AudioOutputStream {
|
| + public:
|
| + AudioOutputStream()
|
| + : audio_unit_(CreateAudioUnit()),
|
| + time_state_(0),
|
| + render_signal_(NULL) {
|
| + ConfigureAudioUnitForOutputOnly(audio_unit_);
|
| + ConfigureAudioUnitForDefaultDevice(audio_unit_);
|
| + ConfigureAudioUnitStreamFormat(audio_unit_);
|
| + ConfigureAudioUnitRenderCallback(audio_unit_, this);
|
| +
|
| + OSStatus result = AudioUnitInitialize(audio_unit_);
|
| + assert(result == noErr);
|
| +
|
| + printf("AudioOutputStream(%p) created.\n", this);
|
| + }
|
| +
|
| + ~AudioOutputStream() {
|
| + assert(!render_signal_);
|
| +
|
| + OSStatus result = AudioUnitUninitialize(audio_unit_);
|
| + assert(result == noErr);
|
| +
|
| + result = AudioComponentInstanceDispose(audio_unit_);
|
| + assert(result == noErr);
|
| +
|
| + printf("AudioOutputStream(%p) destroyed.\n", this);
|
| + }
|
| +
|
| + void Start(pthread_cond_t* render_signal) {
|
| + assert(render_signal);
|
| + assert(!render_signal_);
|
| + render_signal_ = render_signal;
|
| +
|
| + OSStatus result = AudioOutputUnitStart(audio_unit_);
|
| + assert(result == noErr);
|
| +
|
| + printf(" AudioOutputStream(%p) started.\n", this);
|
| + }
|
| +
|
| + void Stop() {
|
| + OSStatus result = AudioOutputUnitStop(audio_unit_);
|
| + assert(result == noErr);
|
| +
|
| + assert(render_signal_);
|
| + render_signal_ = NULL;
|
| +
|
| + printf(" AudioOutputStream(%p) stopped.\n", this);
|
| + }
|
| +
|
| + void Render(UInt32 number_of_frames, AudioBufferList* io_data) {
|
| + assert(io_data->mNumberBuffers > 0);
|
| + assert(io_data->mBuffers[0].mDataByteSize ==
|
| + number_of_frames * sizeof(float));
|
| +
|
| + // Fill the first AudioBuffer with a sine wave and copy to other channels.
|
| + float* output = reinterpret_cast<float*>(io_data->mBuffers[0].mData);
|
| + for (UInt32 i = 0; i < number_of_frames; ++i)
|
| + output[i] = sin(2.0 * M_PI * 600.0 / 48000.0 * time_state_++);
|
| + for (UInt32 i = 1; i < io_data->mNumberBuffers; ++i) {
|
| + memcpy(io_data->mBuffers[i].mData,
|
| + io_data->mBuffers[0].mData,
|
| + io_data->mBuffers[i].mDataByteSize);
|
| + }
|
| +
|
| + // Signal the main loop that we've recieved the first Render() call.
|
| + assert(render_signal_);
|
| + pthread_cond_signal(render_signal_);
|
| + }
|
| +
|
| + static OSStatus AURenderCallbackProc(void* user_data,
|
| + AudioUnitRenderActionFlags* flags,
|
| + const AudioTimeStamp* output_time_stamp,
|
| + UInt32 bus_number,
|
| + UInt32 number_of_frames,
|
| + AudioBufferList* io_data) {
|
| + assert(user_data);
|
| + static_cast<AudioOutputStream*>(user_data)->Render(
|
| + number_of_frames, io_data);
|
| + return noErr;
|
| + }
|
| +
|
| + void ConfigureAudioUnitRenderCallback(AudioUnit audio_unit,
|
| + void* user_data) {
|
| + AURenderCallbackStruct callback = { AURenderCallbackProc, user_data };
|
| + OSStatus result = AudioUnitSetProperty(audio_unit,
|
| + kAudioUnitProperty_SetRenderCallback,
|
| + kAudioUnitScope_Input,
|
| + 0,
|
| + &callback,
|
| + sizeof(callback));
|
| + assert(result == noErr);
|
| + }
|
| +
|
| + private:
|
| + const AudioUnit audio_unit_;
|
| + size_t time_state_;
|
| + pthread_cond_t* render_signal_;
|
| +};
|
| +
|
| +int main(int argc, char* argv[]) {
|
| + pthread_mutex_t stream_mutex = PTHREAD_MUTEX_INITIALIZER;
|
| + pthread_cond_t stream_1_render_called = PTHREAD_COND_INITIALIZER;
|
| + pthread_cond_t stream_2_render_called = PTHREAD_COND_INITIALIZER;
|
| +
|
| + // Repeatedly configure and start two streams, wait for the first callback on
|
| + // each stream, and then close and destroy the streams. To reproduce the bug,
|
| + // manually suspend (close the lid) and resume (open the lid) the device until
|
| + // the beeping noise stops. At this point you should see the loop hung while
|
| + // waiting for the first callback; e.g.,
|
| + //
|
| + // AudioOutputStream(0x00000001) created.
|
| + // AudioOutputStream(0x00000002) created.
|
| + // AudioOutputStream(0x00000001) started.
|
| + // AudioOutputStream(0x00000002) started.
|
| + //
|
| + // Usually only a few suspend and resume cycles are necessary to reproduce the
|
| + // issue.
|
| + while (true) {
|
| + printf("\n");
|
| + AudioOutputStream stream_1;
|
| + AudioOutputStream stream_2;
|
| +
|
| + stream_1.Start(&stream_1_render_called);
|
| + stream_2.Start(&stream_2_render_called);
|
| +
|
| + pthread_mutex_lock(&stream_mutex);
|
| + pthread_cond_wait(&stream_1_render_called, &stream_mutex);
|
| + pthread_cond_wait(&stream_2_render_called, &stream_mutex);
|
| + pthread_mutex_unlock(&stream_mutex);
|
| +
|
| + stream_2.Stop();
|
| + stream_1.Stop();
|
| + }
|
| +
|
| + return 0;
|
| +}
|
|
|