Index: media/audio/win/audio_low_latency_output_win.cc |
diff --git a/media/audio/win/audio_low_latency_output_win.cc b/media/audio/win/audio_low_latency_output_win.cc |
index f7b31a3c00a09d75376a190fa9df926f9b32b6db..8b2b8e343bb9e8c4d3cb2c95580b9c570d1c69d0 100644 |
--- a/media/audio/win/audio_low_latency_output_win.cc |
+++ b/media/audio/win/audio_low_latency_output_win.cc |
@@ -249,6 +249,11 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) { |
} |
num_written_frames_ = endpoint_buffer_size_frames_; |
+ if (!MarshalComPointers()) { |
+ callback->OnError(this); |
+ return; |
+ } |
+ |
// Create and start the thread that will drive the rendering by waiting for |
// render events. |
render_thread_.reset( |
@@ -262,6 +267,7 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) { |
} |
// Start streaming data between the endpoint buffer and the audio engine. |
+ // TODO(dalecurtis): Do we need a lock on this with STA mode? |
DaleCurtis
2015/04/22 17:48:54
Tentatively removed this assuming you POV is corre
|
HRESULT hr = audio_client_->Start(); |
if (FAILED(hr)) { |
PLOG(ERROR) << "Failed to start output streaming: " << std::hex << hr; |
@@ -277,6 +283,7 @@ void WASAPIAudioOutputStream::Stop() { |
return; |
// Stop output audio streaming. |
+ // TODO(dalecurtis): Do we need a lock on this with STA mode? |
HRESULT hr = audio_client_->Stop(); |
if (FAILED(hr)) { |
PLOG(ERROR) << "Failed to stop output streaming: " << std::hex << hr; |
@@ -333,7 +340,7 @@ void WASAPIAudioOutputStream::GetVolume(double* volume) { |
} |
void WASAPIAudioOutputStream::Run() { |
- ScopedCOMInitializer com_init(ScopedCOMInitializer::kMTA); |
+ ScopedCOMInitializer com_init; |
// Increase the thread priority. |
render_thread_->SetThreadPriority(base::ThreadPriority::REALTIME_AUDIO); |
@@ -352,20 +359,30 @@ void WASAPIAudioOutputStream::Run() { |
LOG(WARNING) << "Failed to enable MMCSS (error code=" << err << ")."; |
} |
+ // Retrieve COM pointers from the main thread. |
+ ScopedComPtr<IAudioClient> thread_audio_client; |
+ ScopedComPtr<IAudioRenderClient> thread_audio_render_client; |
+ ScopedComPtr<IAudioClock> thread_audio_clock; |
+ |
HRESULT hr = S_FALSE; |
bool playing = true; |
- bool error = false; |
+ bool error = |
+ !UnmarshalComPointers(&thread_audio_client, &thread_audio_render_client, |
+ &thread_audio_clock); |
+ |
HANDLE wait_array[] = { stop_render_event_.Get(), |
audio_samples_render_event_.Get() }; |
UINT64 device_frequency = 0; |
- // The device frequency is the frequency generated by the hardware clock in |
- // the audio device. The GetFrequency() method reports a constant frequency. |
- hr = audio_clock_->GetFrequency(&device_frequency); |
- error = FAILED(hr); |
- PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: " |
- << std::hex << hr; |
+ if (!error) { |
+ // The device frequency is the frequency generated by the hardware clock in |
+ // the audio device. The GetFrequency() method reports a constant frequency. |
+ hr = thread_audio_clock->GetFrequency(&device_frequency); |
+ error = FAILED(hr); |
+ PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: " |
+ << std::hex << hr; |
+ } |
// Keep rendering audio until the stop event or the stream-switch event |
// is signaled. An error event can also break the main thread loop. |
@@ -383,7 +400,9 @@ void WASAPIAudioOutputStream::Run() { |
break; |
case WAIT_OBJECT_0 + 1: |
// |audio_samples_render_event_| has been set. |
- error = !RenderAudioFromSource(device_frequency); |
+ error = !RenderAudioFromSource( |
+ device_frequency, thread_audio_client.get(), |
+ thread_audio_render_client.get(), thread_audio_clock.get()); |
break; |
default: |
error = true; |
@@ -391,11 +410,11 @@ void WASAPIAudioOutputStream::Run() { |
} |
} |
- if (playing && error) { |
+ if (playing && error && thread_audio_client) { |
// Stop audio rendering since something has gone wrong in our main thread |
// loop. Note that, we are still in a "started" state, hence a Stop() call |
// is required to join the thread properly. |
- audio_client_->Stop(); |
+ thread_audio_client->Stop(); |
PLOG(ERROR) << "WASAPI rendering failed."; |
} |
@@ -405,7 +424,11 @@ void WASAPIAudioOutputStream::Run() { |
} |
} |
-bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
+bool WASAPIAudioOutputStream::RenderAudioFromSource( |
+ UINT64 device_frequency, |
+ IAudioClient* thread_audio_client, |
tommi (sloooow) - chröme
2015/04/22 10:31:52
nit: Does the 'thread_' prefix add context? I thi
DaleCurtis
2015/04/22 17:48:54
Done.
|
+ IAudioRenderClient* thread_audio_render_client, |
+ IAudioClock* thread_audio_clock) { |
TRACE_EVENT0("audio", "RenderAudioFromSource"); |
HRESULT hr = S_FALSE; |
@@ -420,7 +443,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
// Get the padding value which represents the amount of rendering |
// data that is queued up to play in the endpoint buffer. |
- hr = audio_client_->GetCurrentPadding(&num_queued_frames); |
+ hr = thread_audio_client->GetCurrentPadding(&num_queued_frames); |
num_available_frames = |
endpoint_buffer_size_frames_ - num_queued_frames; |
if (FAILED(hr)) { |
@@ -462,8 +485,8 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
for (size_t n = 0; n < num_packets; ++n) { |
// Grab all available space in the rendering endpoint buffer |
// into which the client can write a data packet. |
- hr = audio_render_client_->GetBuffer(packet_size_frames_, |
- &audio_data); |
+ hr = |
+ thread_audio_render_client->GetBuffer(packet_size_frames_, &audio_data); |
if (FAILED(hr)) { |
DLOG(ERROR) << "Failed to use rendering audio buffer: " |
<< std::hex << hr; |
@@ -477,7 +500,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
// unit at the render side. |
UINT64 position = 0; |
uint32 audio_delay_bytes = 0; |
- hr = audio_clock_->GetPosition(&position, NULL); |
+ hr = thread_audio_clock->GetPosition(&position, NULL); |
if (SUCCEEDED(hr)) { |
// Stream position of the sample that is currently playing |
// through the speaker. |
@@ -517,7 +540,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
// Render silence if we were not able to fill up the buffer totally. |
DWORD flags = (num_filled_bytes < packet_size_bytes_) ? |
AUDCLNT_BUFFERFLAGS_SILENT : 0; |
- audio_render_client_->ReleaseBuffer(packet_size_frames_, flags); |
+ thread_audio_render_client->ReleaseBuffer(packet_size_frames_, flags); |
num_written_frames_ += packet_size_frames_; |
} |
@@ -622,4 +645,77 @@ void WASAPIAudioOutputStream::StopThread() { |
source_ = NULL; |
} |
+bool WASAPIAudioOutputStream::MarshalComPointers() { |
tommi (sloooow) - chröme
2015/04/22 10:31:52
Can we add a thread checker for these methods? Th
DaleCurtis
2015/04/22 16:08:23
I'll see if I can. I forget if the unit tests try
DaleCurtis
2015/04/22 17:48:54
Done.
|
+ HRESULT hr = CreateStreamOnHGlobal(NULL, TRUE, com_stream_.Receive()); |
tommi (sloooow) - chröme
2015/04/22 10:31:52
what about using a local variable for the stream h
DaleCurtis
2015/04/22 16:08:23
Good idea, I'll do this.
DaleCurtis
2015/04/22 17:48:54
Done.
|
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Failed to create stream for marshaling COM pointers."; |
+ return false; |
+ } |
+ |
+ hr = CoMarshalInterface(com_stream_.get(), __uuidof(IAudioClient), |
tommi (sloooow) - chröme
2015/04/22 10:31:52
was there a particular reason you decided to go wi
DaleCurtis
2015/04/22 16:08:23
Yes, but maybe not good ones, as I basically just
|
+ audio_client_.get(), MSHCTX_INPROC, NULL, |
+ MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioClient: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ hr = CoMarshalInterface(com_stream_.get(), __uuidof(IAudioRenderClient), |
+ audio_render_client_.get(), MSHCTX_INPROC, NULL, |
+ MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioRenderClient: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ hr = CoMarshalInterface(com_stream_.get(), __uuidof(IAudioClock), |
+ audio_clock_.get(), MSHCTX_INPROC, NULL, |
+ MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioClock: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ LARGE_INTEGER pos = {0}; |
+ hr = com_stream_->Seek(pos, STREAM_SEEK_SET, NULL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Failed to seek IStream for marshaling: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ return true; |
+} |
+ |
+bool WASAPIAudioOutputStream::UnmarshalComPointers( |
+ ScopedComPtr<IAudioClient>* audio_client, |
+ ScopedComPtr<IAudioRenderClient>* audio_render_client, |
+ ScopedComPtr<IAudioClock>* audio_clock) { |
+ HRESULT hr = CoUnmarshalInterface(com_stream_.get(), __uuidof(IAudioClient), |
tommi (sloooow) - chröme
2015/04/22 10:31:52
nit: would be nice to detach com_stream_ here to a
DaleCurtis
2015/04/22 17:48:54
Done.
|
+ audio_client->ReceiveVoid()); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Unmarshal failed IAudioClient: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ hr = CoUnmarshalInterface(com_stream_.get(), __uuidof(IAudioRenderClient), |
+ audio_render_client->ReceiveVoid()); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Unmarshal failed IAudioRenderClient: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return false; |
+ } |
+ |
+ hr = CoUnmarshalInterface(com_stream_.get(), __uuidof(IAudioClock), |
+ audio_clock->ReceiveVoid()); |
+ if (FAILED(hr)) |
+ DLOG(ERROR) << "Unmarshal failed IAudioClock: " << std::hex << hr; |
+ com_stream_.Release(); |
+ return SUCCEEDED(hr); |
+} |
+ |
} // namespace media |