Index: media/audio/win/audio_low_latency_output_win.cc |
diff --git a/media/audio/win/audio_low_latency_output_win.cc b/media/audio/win/audio_low_latency_output_win.cc |
index f7b31a3c00a09d75376a190fa9df926f9b32b6db..128ab76f531549a80dc01e5ab234882d462a4030 100644 |
--- a/media/audio/win/audio_low_latency_output_win.cc |
+++ b/media/audio/win/audio_low_latency_output_win.cc |
@@ -70,6 +70,7 @@ WASAPIAudioOutputStream::WASAPIAudioOutputStream(AudioManagerWin* manager, |
share_mode_(GetShareMode()), |
num_written_frames_(0), |
source_(NULL), |
+ com_stream_(NULL), |
tommi (sloooow) - chröme
2015/04/20 18:23:38
is this a COM pointer? If so, use ScopedComPtr
DaleCurtis
2015/04/20 18:54:07
I think so, but it's seemingly manually released v
|
audio_bus_(AudioBus::Create(params)) { |
DCHECK(manager_); |
@@ -249,6 +250,11 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) { |
} |
num_written_frames_ = endpoint_buffer_size_frames_; |
+ if (!MarshalComPointers()) { |
+ callback->OnError(this); |
+ return; |
+ } |
+ |
// Create and start the thread that will drive the rendering by waiting for |
// render events. |
render_thread_.reset( |
@@ -262,6 +268,7 @@ void WASAPIAudioOutputStream::Start(AudioSourceCallback* callback) { |
} |
// Start streaming data between the endpoint buffer and the audio engine. |
+ // TODO(dalecurtis): Do we need a lock on this with STA mode? |
tommi (sloooow) - chröme
2015/04/20 18:23:38
One thing to be aware of with STA is reentrancy.
|
HRESULT hr = audio_client_->Start(); |
if (FAILED(hr)) { |
PLOG(ERROR) << "Failed to start output streaming: " << std::hex << hr; |
@@ -277,6 +284,7 @@ void WASAPIAudioOutputStream::Stop() { |
return; |
// Stop output audio streaming. |
+ // TODO(dalecurtis): Do we need a lock on this with STA mode? |
tommi (sloooow) - chröme
2015/04/20 18:23:38
if a lock wasn't needed before, I don't think it's
DaleCurtis
2015/04/20 18:54:07
Hmm, previously we were talking to the same instan
DaleCurtis
2015/04/22 16:08:23
Can you add some more details here?
|
HRESULT hr = audio_client_->Stop(); |
if (FAILED(hr)) { |
PLOG(ERROR) << "Failed to stop output streaming: " << std::hex << hr; |
@@ -333,7 +341,7 @@ void WASAPIAudioOutputStream::GetVolume(double* volume) { |
} |
void WASAPIAudioOutputStream::Run() { |
- ScopedCOMInitializer com_init(ScopedCOMInitializer::kMTA); |
+ ScopedCOMInitializer com_init; |
// Increase the thread priority. |
render_thread_->SetThreadPriority(base::ThreadPriority::REALTIME_AUDIO); |
@@ -352,20 +360,30 @@ void WASAPIAudioOutputStream::Run() { |
LOG(WARNING) << "Failed to enable MMCSS (error code=" << err << ")."; |
} |
+ // Retrieve COM pointers from the main thread. |
+ IAudioClient* thread_audio_client = NULL; |
tommi (sloooow) - chröme
2015/04/20 18:23:38
use ScopedComPtr instead of raw pointers? actuall
DaleCurtis
2015/04/20 18:54:07
I have no idea what I'm doing here :) Marshal tuto
|
+ IAudioRenderClient* thread_audio_render_client = NULL; |
+ IAudioClock* thread_audio_clock = NULL; |
+ |
HRESULT hr = S_FALSE; |
bool playing = true; |
- bool error = false; |
+ bool error = |
+ !UnmarshalComPointers(&thread_audio_client, &thread_audio_render_client, |
+ &thread_audio_clock); |
+ |
HANDLE wait_array[] = { stop_render_event_.Get(), |
audio_samples_render_event_.Get() }; |
UINT64 device_frequency = 0; |
- // The device frequency is the frequency generated by the hardware clock in |
- // the audio device. The GetFrequency() method reports a constant frequency. |
- hr = audio_clock_->GetFrequency(&device_frequency); |
- error = FAILED(hr); |
- PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: " |
- << std::hex << hr; |
+ if (!error) { |
+ // The device frequency is the frequency generated by the hardware clock in |
+ // the audio device. The GetFrequency() method reports a constant frequency. |
+ hr = audio_clock_->GetFrequency(&device_frequency); |
+ error = FAILED(hr); |
+ PLOG_IF(ERROR, error) << "Failed to acquire IAudioClock interface: " |
+ << std::hex << hr; |
+ } |
// Keep rendering audio until the stop event or the stream-switch event |
// is signaled. An error event can also break the main thread loop. |
@@ -383,7 +401,9 @@ void WASAPIAudioOutputStream::Run() { |
break; |
case WAIT_OBJECT_0 + 1: |
// |audio_samples_render_event_| has been set. |
- error = !RenderAudioFromSource(device_frequency); |
+ error = !RenderAudioFromSource(device_frequency, thread_audio_client, |
+ thread_audio_render_client, |
+ thread_audio_clock); |
break; |
default: |
error = true; |
@@ -391,11 +411,11 @@ void WASAPIAudioOutputStream::Run() { |
} |
} |
- if (playing && error) { |
+ if (playing && error && thread_audio_client) { |
// Stop audio rendering since something has gone wrong in our main thread |
// loop. Note that, we are still in a "started" state, hence a Stop() call |
// is required to join the thread properly. |
- audio_client_->Stop(); |
+ thread_audio_client->Stop(); |
PLOG(ERROR) << "WASAPI rendering failed."; |
} |
@@ -405,7 +425,11 @@ void WASAPIAudioOutputStream::Run() { |
} |
} |
-bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
+bool WASAPIAudioOutputStream::RenderAudioFromSource( |
+ UINT64 device_frequency, |
+ IAudioClient* thread_audio_client, |
+ IAudioRenderClient* thread_audio_render_client, |
+ IAudioClock* thread_audio_clock) { |
TRACE_EVENT0("audio", "RenderAudioFromSource"); |
HRESULT hr = S_FALSE; |
@@ -420,7 +444,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
// Get the padding value which represents the amount of rendering |
// data that is queued up to play in the endpoint buffer. |
- hr = audio_client_->GetCurrentPadding(&num_queued_frames); |
+ hr = thread_audio_client->GetCurrentPadding(&num_queued_frames); |
num_available_frames = |
endpoint_buffer_size_frames_ - num_queued_frames; |
if (FAILED(hr)) { |
@@ -462,8 +486,8 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
for (size_t n = 0; n < num_packets; ++n) { |
// Grab all available space in the rendering endpoint buffer |
// into which the client can write a data packet. |
- hr = audio_render_client_->GetBuffer(packet_size_frames_, |
- &audio_data); |
+ hr = |
+ thread_audio_render_client->GetBuffer(packet_size_frames_, &audio_data); |
if (FAILED(hr)) { |
DLOG(ERROR) << "Failed to use rendering audio buffer: " |
<< std::hex << hr; |
@@ -477,7 +501,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
// unit at the render side. |
UINT64 position = 0; |
uint32 audio_delay_bytes = 0; |
- hr = audio_clock_->GetPosition(&position, NULL); |
+ hr = thread_audio_clock->GetPosition(&position, NULL); |
if (SUCCEEDED(hr)) { |
// Stream position of the sample that is currently playing |
// through the speaker. |
@@ -517,7 +541,7 @@ bool WASAPIAudioOutputStream::RenderAudioFromSource(UINT64 device_frequency) { |
// Render silence if we were not able to fill up the buffer totally. |
DWORD flags = (num_filled_bytes < packet_size_bytes_) ? |
AUDCLNT_BUFFERFLAGS_SILENT : 0; |
- audio_render_client_->ReleaseBuffer(packet_size_frames_, flags); |
+ thread_audio_render_client->ReleaseBuffer(packet_size_frames_, flags); |
num_written_frames_ += packet_size_frames_; |
} |
@@ -622,4 +646,87 @@ void WASAPIAudioOutputStream::StopThread() { |
source_ = NULL; |
} |
+bool WASAPIAudioOutputStream::MarshalComPointers() { |
+ HRESULT hr = CreateStreamOnHGlobal(0, TRUE, &com_stream_); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Failed to create stream for marshaling COM pointers."; |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ hr = CoMarshalInterface(com_stream_, __uuidof(IAudioClient), |
+ audio_client_.get(), MSHCTX_INPROC, NULL, |
+ MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioClient: " << std::hex << hr; |
+ if (com_stream_) { |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ } |
+ return false; |
+ } |
+ |
+ hr = CoMarshalInterface(com_stream_, __uuidof(IAudioRenderClient), |
+ audio_render_client_.get(), MSHCTX_INPROC, NULL, |
+ MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioRenderClient: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ hr = |
+ CoMarshalInterface(com_stream_, __uuidof(IAudioClock), audio_clock_.get(), |
+ MSHCTX_INPROC, NULL, MSHLFLAGS_NORMAL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Marshal failed for IAudioClock: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ LARGE_INTEGER pos = {0}; |
+ hr = com_stream_->Seek(pos, STREAM_SEEK_SET, NULL); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Failed to seek IStream for marshaling: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ return true; |
+} |
+ |
+bool WASAPIAudioOutputStream::UnmarshalComPointers( |
+ IAudioClient** audio_client, |
+ IAudioRenderClient** audio_render_client, |
+ IAudioClock** audio_clock) { |
+ HRESULT hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioClient), |
+ reinterpret_cast<LPVOID*>(audio_client)); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Unmarshal failed IAudioClient: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioRenderClient), |
+ reinterpret_cast<LPVOID*>(audio_render_client)); |
+ if (FAILED(hr)) { |
+ DLOG(ERROR) << "Unmarshal failed IAudioRenderClient: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return false; |
+ } |
+ |
+ hr = CoUnmarshalInterface(com_stream_, __uuidof(IAudioClock), |
+ reinterpret_cast<LPVOID*>(audio_clock)); |
+ if (FAILED(hr)) |
+ DLOG(ERROR) << "Unmarshal failed IAudioClock: " << std::hex << hr; |
+ CoReleaseMarshalData(com_stream_); |
+ com_stream_ = NULL; |
+ return SUCCEEDED(hr); |
+} |
+ |
} // namespace media |