| Index: media/audio/win/audio_low_latency_output_win.cc
|
| diff --git a/media/audio/win/audio_low_latency_output_win.cc b/media/audio/win/audio_low_latency_output_win.cc
|
| index 53b8a8dd242b0a13de3d1bfc9fa2def74b9ac210..babf8a63eb7953c3bf4e39ed5158ed4a216dcff4 100644
|
| --- a/media/audio/win/audio_low_latency_output_win.cc
|
| +++ b/media/audio/win/audio_low_latency_output_win.cc
|
| @@ -70,6 +70,7 @@ WASAPIAudioOutputStream::WASAPIAudioOutputStream(AudioManagerWin* manager,
|
| share_mode_(GetShareMode()),
|
| num_written_frames_(0),
|
| source_(NULL),
|
| + com_stream_(NULL),
|
| audio_bus_(AudioBus::Create(params)) {
|
| DCHECK(manager_);
|
|
|
| @@ -254,6 +255,11 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) {
|
| }
|
| num_written_frames_ = endpoint_buffer_size_frames_;
|
|
|
| + if (!MarshalComPointers()) {
|
| + callback->OnError(this);
|
| + return;
|
| + }
|
| +
|
| // Create and start the thread that will drive the rendering by waiting for
|
| // render events.
|
| render_thread_.reset(
|
| @@ -267,6 +273,7 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) {
|
| }
|
|
|
| // Start streaming data between the endpoint buffer and the audio engine.
|
| + // TODO(dalecurtis): Do we need a lock on this with STA mode?
|
| HRESULT hr = audio_client_->Start();
|
| if (FAILED(hr)) {
|
| PLOG(ERROR) << "Failed to start output streaming: " << std::hex << hr;
|
| @@ -282,6 +289,7 @@ void WASAPIAudioOutputStream::Stop() {
|
| return;
|
|
|
| // Stop output audio streaming.
|
| + // TODO(dalecurtis): Do we need a lock on this with STA mode?
|
| HRESULT hr = audio_client_->Stop();
|
| if (FAILED(hr)) {
|
| PLOG(ERROR) << "Failed to stop output streaming: " << std::hex << hr;
|
| @@ -338,7 +346,7 @@ void WASAPIAudioOutputStream::GetVolume(double* volume) {
|
| }
|
|
|
| void WASAPIAudioOutputStream::Run() {
|
| - ScopedCOMInitializer com_init(ScopedCOMInitializer::kMTA);
|
| + ScopedCOMInitializer com_init;
|
|
|
| // Increase the thread priority.
|
| render_thread_->SetThreadPriority(base::ThreadPriority::REALTIME_AUDIO);
|
| @@ -357,20 +365,30 @@ void WASAPIAudioOutputStream::Run() {
|
| LOG(WARNING) << "Failed to enable MMCSS (error code=" << err << ").";
|
| }
|
|
|
| + // Retrieve COM pointers from the main thread.
|
| + IAudioClient* thread_audio_client = NULL;
|
| + IAudioRenderClient* thread_audio_render_client = NULL;
|
| + IAudioClock* thread_audio_clock = NULL;
|
| +
|
| HRESULT hr = S_FALSE;
|
|
|
| bool playing = true;
|
| - bool error = false;
|
| + bool error =
|
| + !UnmarshalComPointers(&thread_audio_client, &thread_audio_render_client,
|
| + &thread_audio_clock);
|
| +
|
| HANDLE wait_array[] = { stop_render_event_.Get(),
|
| audio_samples_render_event_.Get() };
|
| UINT64 device_frequency = 0;
|
|
|
| - // The device frequency is the frequency generated by the hardware clock in
|
| - // the audio device. The GetFrequency() method reports a constant frequency.
|
| - hr = audio_clock_->GetFrequency(&device_frequency);
|
| - error = FAILED(hr);
|
| - PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: "
|
| - << std::hex << hr;
|
| + if (!error) {
|
| + // The device frequency is the frequency generated by the hardware clock in
|
| + // the audio device. The GetFrequency() method reports a constant frequency.
|
| + hr = audio_clock_->GetFrequency(&device_frequency);
|
| + error = FAILED(hr);
|
| + PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: "
|
| + << std::hex << hr;
|
| + }
|
|
|
| // Keep rendering audio until the stop event or the stream-switch event
|
| // is signaled. An error event can also break the main thread loop.
|
| @@ -388,7 +406,9 @@ void WASAPIAudioOutputStream::Run() {
|
| break;
|
| case WAIT_OBJECT_0 + 1:
|
| // |audio_samples_render_event_| has been set.
|
| - error = !RenderAudioFromSource(device_frequency);
|
| + error = !RenderAudioFromSource(device_frequency, thread_audio_client,
|
| + thread_audio_render_client,
|
| + thread_audio_clock);
|
| break;
|
| default:
|
| error = true;
|
| @@ -396,11 +416,11 @@ void WASAPIAudioOutputStream::Run() {
|
| }
|
| }
|
|
|
| - if (playing && error) {
|
| + if (playing && error && thread_audio_client) {
|
| // Stop audio rendering since something has gone wrong in our main thread
|
| // loop. Note that, we are still in a "started" state, hence a Stop() call
|
| // is required to join the thread properly.
|
| - audio_client_->Stop();
|
| + thread_audio_client->Stop();
|
| PLOG(ERROR) << "WASAPI rendering failed.";
|
| }
|
|
|
| @@ -410,7 +430,11 @@ void WASAPIAudioOutputStream::Run() {
|
| }
|
| }
|
|
|
| -bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
|
| +bool WASAPIAudioOutputStream::RenderAudioFromSource(
|
| + UINT64 device_frequency,
|
| + IAudioClient* thread_audio_client,
|
| + IAudioRenderClient* thread_audio_render_client,
|
| + IAudioClock* thread_audio_clock) {
|
| TRACE_EVENT0("audio", "RenderAudioFromSource");
|
|
|
| HRESULT hr = S_FALSE;
|
| @@ -425,7 +449,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
|
| if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) {
|
| // Get the padding value which represents the amount of rendering
|
| // data that is queued up to play in the endpoint buffer.
|
| - hr = audio_client_->GetCurrentPadding(&num_queued_frames);
|
| + hr = thread_audio_client->GetCurrentPadding(&num_queued_frames);
|
| num_available_frames =
|
| endpoint_buffer_size_frames_ - num_queued_frames;
|
| if (FAILED(hr)) {
|
| @@ -467,8 +491,8 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
|
| for (size_t n = 0; n < num_packets; ++n) {
|
| // Grab all available space in the rendering endpoint buffer
|
| // into which the client can write a data packet.
|
| - hr = audio_render_client_->GetBuffer(packet_size_frames_,
|
| - &audio_data);
|
| + hr =
|
| + thread_audio_render_client->GetBuffer(packet_size_frames_, &audio_data);
|
| if (FAILED(hr)) {
|
| DLOG(ERROR) << "Failed to use rendering audio buffer: "
|
| << std::hex << hr;
|
| @@ -482,7 +506,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
|
| // unit at the render side.
|
| UINT64 position = 0;
|
| uint32 audio_delay_bytes = 0;
|
| - hr = audio_clock_->GetPosition(&position, NULL);
|
| + hr = thread_audio_clock->GetPosition(&position, NULL);
|
| if (SUCCEEDED(hr)) {
|
| // Stream position of the sample that is currently playing
|
| // through the speaker.
|
| @@ -522,7 +546,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) {
|
| // Render silence if we were not able to fill up the buffer totally.
|
| DWORD flags = (num_filled_bytes < packet_size_bytes_) ?
|
| AUDCLNT_BUFFERFLAGS_SILENT : 0;
|
| - audio_render_client_->ReleaseBuffer(packet_size_frames_, flags);
|
| + thread_audio_render_client->ReleaseBuffer(packet_size_frames_, flags);
|
|
|
| num_written_frames_ += packet_size_frames_;
|
| }
|
| @@ -627,4 +651,87 @@ void WASAPIAudioOutputStream::StopThread() {
|
| source_ = NULL;
|
| }
|
|
|
| +bool WASAPIAudioOutputStream::MarshalComPointers() {
|
| + HRESULT hr = CreateStreamOnHGlobal(0, TRUE, &com_stream_);
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Failed to create stream for marshaling COM pointers.";
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + hr = CoMarshalInterface(com_stream_, __uuidof(IAudioClient),
|
| + audio_client_.get(), MSHCTX_INPROC, NULL,
|
| + MSHLFLAGS_NORMAL);
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Marshal failed for IAudioClient: " << std::hex << hr;
|
| + if (com_stream_) {
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + }
|
| + return false;
|
| + }
|
| +
|
| + hr = CoMarshalInterface(com_stream_, __uuidof(IAudioRenderClient),
|
| + audio_render_client_.get(), MSHCTX_INPROC, NULL,
|
| + MSHLFLAGS_NORMAL);
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Marshal failed for IAudioRenderClient: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + hr =
|
| + CoMarshalInterface(com_stream_, __uuidof(IAudioClock), audio_clock_.get(),
|
| + MSHCTX_INPROC, NULL, MSHLFLAGS_NORMAL);
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Marshal failed for IAudioClock: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + LARGE_INTEGER pos = {0};
|
| + hr = com_stream_->Seek(pos, STREAM_SEEK_SET, NULL);
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Failed to seek IStream for marshaling: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + return true;
|
| +}
|
| +
|
| +bool WASAPIAudioOutputStream::UnmarshalComPointers(
|
| + IAudioClient** audio_client,
|
| + IAudioRenderClient** audio_render_client,
|
| + IAudioClock** audio_clock) {
|
| + HRESULT hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioClient),
|
| + reinterpret_cast<LPVOID*>(audio_client));
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Unmarshal failed IAudioClient: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioRenderClient),
|
| + reinterpret_cast<LPVOID*>(audio_render_client));
|
| + if (FAILED(hr)) {
|
| + DLOG(ERROR) << "Unmarshal failed IAudioRenderClient: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return false;
|
| + }
|
| +
|
| + hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioClock),
|
| + reinterpret_cast<LPVOID*>(audio_clock));
|
| + if (FAILED(hr))
|
| + DLOG(ERROR) << "Unmarshal failed IAudioClock: " << std::hex << hr;
|
| + CoReleaseMarshalData(com_stream_);
|
| + com_stream_ = NULL;
|
| + return SUCCEEDED(hr);
|
| +}
|
| +
|
| } // namespace media
|
|
|