| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/audio/win/audio_low_latency_output_win.h" | 5 #include "media/audio/win/audio_low_latency_output_win.h" |
| 6 | 6 |
| 7 #include <Functiondiscoverykeys_devpkey.h> | 7 #include <Functiondiscoverykeys_devpkey.h> |
| 8 | 8 |
| 9 #include <climits> | 9 #include <climits> |
| 10 | 10 |
| (...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 124 WASAPIAudioOutputStream::~WASAPIAudioOutputStream() { | 124 WASAPIAudioOutputStream::~WASAPIAudioOutputStream() { |
| 125 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); | 125 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); |
| 126 } | 126 } |
| 127 | 127 |
| 128 bool WASAPIAudioOutputStream::Open() { | 128 bool WASAPIAudioOutputStream::Open() { |
| 129 DVLOG(1) << "WASAPIAudioOutputStream::Open()"; | 129 DVLOG(1) << "WASAPIAudioOutputStream::Open()"; |
| 130 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); | 130 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); |
| 131 if (opened_) | 131 if (opened_) |
| 132 return true; | 132 return true; |
| 133 | 133 |
| 134 DCHECK(!audio_client_.get()); | 134 DCHECK(!audio_client_.Get()); |
| 135 DCHECK(!audio_render_client_.get()); | 135 DCHECK(!audio_render_client_.Get()); |
| 136 | 136 |
| 137 // Will be set to true if we ended up opening the default communications | 137 // Will be set to true if we ended up opening the default communications |
| 138 // device. | 138 // device. |
| 139 bool communications_device = false; | 139 bool communications_device = false; |
| 140 | 140 |
| 141 // Create an IAudioClient interface for the default rendering IMMDevice. | 141 // Create an IAudioClient interface for the default rendering IMMDevice. |
| 142 ScopedComPtr<IAudioClient> audio_client; | 142 ScopedComPtr<IAudioClient> audio_client; |
| 143 if (device_id_.empty()) { | 143 if (device_id_.empty()) { |
| 144 audio_client = CoreAudioUtil::CreateDefaultClient(eRender, device_role_); | 144 audio_client = CoreAudioUtil::CreateDefaultClient(eRender, device_role_); |
| 145 communications_device = (device_role_ == eCommunications); | 145 communications_device = (device_role_ == eCommunications); |
| 146 } else { | 146 } else { |
| 147 ScopedComPtr<IMMDevice> device(CoreAudioUtil::CreateDevice(device_id_)); | 147 ScopedComPtr<IMMDevice> device(CoreAudioUtil::CreateDevice(device_id_)); |
| 148 DLOG_IF(ERROR, !device.get()) << "Failed to open device: " << device_id_; | 148 DLOG_IF(ERROR, !device.Get()) << "Failed to open device: " << device_id_; |
| 149 if (device.get()) | 149 if (device.Get()) |
| 150 audio_client = CoreAudioUtil::CreateClient(device.get()); | 150 audio_client = CoreAudioUtil::CreateClient(device.Get()); |
| 151 } | 151 } |
| 152 | 152 |
| 153 if (!audio_client.get()) | 153 if (!audio_client.Get()) |
| 154 return false; | 154 return false; |
| 155 | 155 |
| 156 // Extra sanity to ensure that the provided device format is still valid. | 156 // Extra sanity to ensure that the provided device format is still valid. |
| 157 if (!CoreAudioUtil::IsFormatSupported(audio_client.get(), share_mode_, | 157 if (!CoreAudioUtil::IsFormatSupported(audio_client.Get(), share_mode_, |
| 158 &format_)) { | 158 &format_)) { |
| 159 LOG(ERROR) << "Audio parameters are not supported."; | 159 LOG(ERROR) << "Audio parameters are not supported."; |
| 160 return false; | 160 return false; |
| 161 } | 161 } |
| 162 | 162 |
| 163 HRESULT hr = S_FALSE; | 163 HRESULT hr = S_FALSE; |
| 164 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { | 164 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
| 165 // Initialize the audio stream between the client and the device in shared | 165 // Initialize the audio stream between the client and the device in shared |
| 166 // mode and using event-driven buffer handling. | 166 // mode and using event-driven buffer handling. |
| 167 hr = CoreAudioUtil::SharedModeInitialize( | 167 hr = CoreAudioUtil::SharedModeInitialize( |
| 168 audio_client.get(), &format_, audio_samples_render_event_.Get(), | 168 audio_client.Get(), &format_, audio_samples_render_event_.Get(), |
| 169 &endpoint_buffer_size_frames_, | 169 &endpoint_buffer_size_frames_, |
| 170 communications_device ? &kCommunicationsSessionId : NULL); | 170 communications_device ? &kCommunicationsSessionId : NULL); |
| 171 if (FAILED(hr)) | 171 if (FAILED(hr)) |
| 172 return false; | 172 return false; |
| 173 | 173 |
| 174 REFERENCE_TIME device_period = 0; | 174 REFERENCE_TIME device_period = 0; |
| 175 if (FAILED(CoreAudioUtil::GetDevicePeriod( | 175 if (FAILED(CoreAudioUtil::GetDevicePeriod( |
| 176 audio_client.get(), AUDCLNT_SHAREMODE_SHARED, &device_period))) { | 176 audio_client.Get(), AUDCLNT_SHAREMODE_SHARED, &device_period))) { |
| 177 return false; | 177 return false; |
| 178 } | 178 } |
| 179 | 179 |
| 180 const int preferred_frames_per_buffer = static_cast<int>( | 180 const int preferred_frames_per_buffer = static_cast<int>( |
| 181 format_.Format.nSamplesPerSec * | 181 format_.Format.nSamplesPerSec * |
| 182 CoreAudioUtil::RefererenceTimeToTimeDelta(device_period) | 182 CoreAudioUtil::RefererenceTimeToTimeDelta(device_period) |
| 183 .InSecondsF() + | 183 .InSecondsF() + |
| 184 0.5); | 184 0.5); |
| 185 | 185 |
| 186 // Packet size should always be an even divisor of the device period for | 186 // Packet size should always be an even divisor of the device period for |
| (...skipping 19 matching lines...) Expand all Loading... |
| 206 LOG(WARNING) | 206 LOG(WARNING) |
| 207 << "Using WASAPI output with a non-optimal buffer size, glitches from" | 207 << "Using WASAPI output with a non-optimal buffer size, glitches from" |
| 208 << " back to back shared memory reads and partial fills of WASAPI" | 208 << " back to back shared memory reads and partial fills of WASAPI" |
| 209 << " output buffers may occur. Buffer size of " | 209 << " output buffers may occur. Buffer size of " |
| 210 << packet_size_frames_ << " is not an even divisor of " | 210 << packet_size_frames_ << " is not an even divisor of " |
| 211 << preferred_frames_per_buffer; | 211 << preferred_frames_per_buffer; |
| 212 } | 212 } |
| 213 } else { | 213 } else { |
| 214 // TODO(henrika): break out to CoreAudioUtil::ExclusiveModeInitialize() | 214 // TODO(henrika): break out to CoreAudioUtil::ExclusiveModeInitialize() |
| 215 // when removing the enable-exclusive-audio flag. | 215 // when removing the enable-exclusive-audio flag. |
| 216 hr = ExclusiveModeInitialization(audio_client.get(), | 216 hr = ExclusiveModeInitialization(audio_client.Get(), |
| 217 audio_samples_render_event_.Get(), | 217 audio_samples_render_event_.Get(), |
| 218 &endpoint_buffer_size_frames_); | 218 &endpoint_buffer_size_frames_); |
| 219 if (FAILED(hr)) | 219 if (FAILED(hr)) |
| 220 return false; | 220 return false; |
| 221 | 221 |
| 222 // The buffer scheme for exclusive mode streams is not designed for max | 222 // The buffer scheme for exclusive mode streams is not designed for max |
| 223 // flexibility. We only allow a "perfect match" between the packet size set | 223 // flexibility. We only allow a "perfect match" between the packet size set |
| 224 // by the user and the actual endpoint buffer size. | 224 // by the user and the actual endpoint buffer size. |
| 225 if (endpoint_buffer_size_frames_ != packet_size_frames_) { | 225 if (endpoint_buffer_size_frames_ != packet_size_frames_) { |
| 226 LOG(ERROR) << "Bailing out due to non-perfect timing."; | 226 LOG(ERROR) << "Bailing out due to non-perfect timing."; |
| 227 return false; | 227 return false; |
| 228 } | 228 } |
| 229 } | 229 } |
| 230 | 230 |
| 231 // Create an IAudioRenderClient client for an initialized IAudioClient. | 231 // Create an IAudioRenderClient client for an initialized IAudioClient. |
| 232 // The IAudioRenderClient interface enables us to write output data to | 232 // The IAudioRenderClient interface enables us to write output data to |
| 233 // a rendering endpoint buffer. | 233 // a rendering endpoint buffer. |
| 234 ScopedComPtr<IAudioRenderClient> audio_render_client = | 234 ScopedComPtr<IAudioRenderClient> audio_render_client = |
| 235 CoreAudioUtil::CreateRenderClient(audio_client.get()); | 235 CoreAudioUtil::CreateRenderClient(audio_client.Get()); |
| 236 if (!audio_render_client.get()) | 236 if (!audio_render_client.Get()) |
| 237 return false; | 237 return false; |
| 238 | 238 |
| 239 // Store valid COM interfaces. | 239 // Store valid COM interfaces. |
| 240 audio_client_ = audio_client; | 240 audio_client_ = audio_client; |
| 241 audio_render_client_ = audio_render_client; | 241 audio_render_client_ = audio_render_client; |
| 242 | 242 |
| 243 hr = audio_client_->GetService(__uuidof(IAudioClock), | 243 hr = audio_client_->GetService(__uuidof(IAudioClock), |
| 244 audio_clock_.ReceiveVoid()); | 244 audio_clock_.ReceiveVoid()); |
| 245 if (FAILED(hr)) { | 245 if (FAILED(hr)) { |
| 246 LOG(ERROR) << "Failed to get IAudioClock service."; | 246 LOG(ERROR) << "Failed to get IAudioClock service."; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 260 if (render_thread_) { | 260 if (render_thread_) { |
| 261 CHECK_EQ(callback, source_); | 261 CHECK_EQ(callback, source_); |
| 262 return; | 262 return; |
| 263 } | 263 } |
| 264 | 264 |
| 265 source_ = callback; | 265 source_ = callback; |
| 266 | 266 |
| 267 // Ensure that the endpoint buffer is prepared with silence. | 267 // Ensure that the endpoint buffer is prepared with silence. |
| 268 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { | 268 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
| 269 if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence( | 269 if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence( |
| 270 audio_client_.get(), audio_render_client_.get())) { | 270 audio_client_.Get(), audio_render_client_.Get())) { |
| 271 LOG(ERROR) << "Failed to prepare endpoint buffers with silence."; | 271 LOG(ERROR) << "Failed to prepare endpoint buffers with silence."; |
| 272 callback->OnError(this); | 272 callback->OnError(this); |
| 273 return; | 273 return; |
| 274 } | 274 } |
| 275 } | 275 } |
| 276 num_written_frames_ = endpoint_buffer_size_frames_; | 276 num_written_frames_ = endpoint_buffer_size_frames_; |
| 277 | 277 |
| 278 // Create and start the thread that will drive the rendering by waiting for | 278 // Create and start the thread that will drive the rendering by waiting for |
| 279 // render events. | 279 // render events. |
| 280 render_thread_.reset(new base::DelegateSimpleThread( | 280 render_thread_.reset(new base::DelegateSimpleThread( |
| (...skipping 368 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 649 | 649 |
| 650 // Ensure that we don't quit the main thread loop immediately next | 650 // Ensure that we don't quit the main thread loop immediately next |
| 651 // time Start() is called. | 651 // time Start() is called. |
| 652 ResetEvent(stop_render_event_.Get()); | 652 ResetEvent(stop_render_event_.Get()); |
| 653 } | 653 } |
| 654 | 654 |
| 655 source_ = NULL; | 655 source_ = NULL; |
| 656 } | 656 } |
| 657 | 657 |
| 658 } // namespace media | 658 } // namespace media |
| OLD | NEW |