OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/audio/win/audio_low_latency_output_win.h" | 5 #include "media/audio/win/audio_low_latency_output_win.h" |
6 | 6 |
7 #include <Functiondiscoverykeys_devpkey.h> | 7 #include <Functiondiscoverykeys_devpkey.h> |
8 | 8 |
9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
10 #include "base/debug/trace_event.h" | 10 #include "base/debug/trace_event.h" |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
64 packet_size_frames_(0), | 64 packet_size_frames_(0), |
65 packet_size_bytes_(0), | 65 packet_size_bytes_(0), |
66 endpoint_buffer_size_frames_(0), | 66 endpoint_buffer_size_frames_(0), |
67 device_id_(device_id), | 67 device_id_(device_id), |
68 device_role_(device_role), | 68 device_role_(device_role), |
69 share_mode_(GetShareMode()), | 69 share_mode_(GetShareMode()), |
70 num_written_frames_(0), | 70 num_written_frames_(0), |
71 source_(NULL), | 71 source_(NULL), |
72 audio_bus_(AudioBus::Create(params)) { | 72 audio_bus_(AudioBus::Create(params)) { |
73 DCHECK(manager_); | 73 DCHECK(manager_); |
| 74 |
74 VLOG(1) << "WASAPIAudioOutputStream::WASAPIAudioOutputStream()"; | 75 VLOG(1) << "WASAPIAudioOutputStream::WASAPIAudioOutputStream()"; |
75 VLOG_IF(1, share_mode_ == AUDCLNT_SHAREMODE_EXCLUSIVE) | 76 VLOG_IF(1, share_mode_ == AUDCLNT_SHAREMODE_EXCLUSIVE) |
76 << "Core Audio (WASAPI) EXCLUSIVE MODE is enabled."; | 77 << "Core Audio (WASAPI) EXCLUSIVE MODE is enabled."; |
77 | 78 |
78 // Load the Avrt DLL if not already loaded. Required to support MMCSS. | 79 // Load the Avrt DLL if not already loaded. Required to support MMCSS. |
79 bool avrt_init = avrt::Initialize(); | 80 bool avrt_init = avrt::Initialize(); |
80 DCHECK(avrt_init) << "Failed to load the avrt.dll"; | 81 DCHECK(avrt_init) << "Failed to load the avrt.dll"; |
81 | 82 |
82 // Set up the desired render format specified by the client. We use the | 83 // Set up the desired render format specified by the client. We use the |
83 // WAVE_FORMAT_EXTENSIBLE structure to ensure that multiple channel ordering | 84 // WAVE_FORMAT_EXTENSIBLE structure to ensure that multiple channel ordering |
(...skipping 29 matching lines...) Expand all Loading... |
113 // Create the event which the audio engine will signal each time | 114 // Create the event which the audio engine will signal each time |
114 // a buffer becomes ready to be processed by the client. | 115 // a buffer becomes ready to be processed by the client. |
115 audio_samples_render_event_.Set(CreateEvent(NULL, FALSE, FALSE, NULL)); | 116 audio_samples_render_event_.Set(CreateEvent(NULL, FALSE, FALSE, NULL)); |
116 DCHECK(audio_samples_render_event_.IsValid()); | 117 DCHECK(audio_samples_render_event_.IsValid()); |
117 | 118 |
118 // Create the event which will be set in Stop() when capturing shall stop. | 119 // Create the event which will be set in Stop() when capturing shall stop. |
119 stop_render_event_.Set(CreateEvent(NULL, FALSE, FALSE, NULL)); | 120 stop_render_event_.Set(CreateEvent(NULL, FALSE, FALSE, NULL)); |
120 DCHECK(stop_render_event_.IsValid()); | 121 DCHECK(stop_render_event_.IsValid()); |
121 } | 122 } |
122 | 123 |
123 WASAPIAudioOutputStream::~WASAPIAudioOutputStream() {} | 124 WASAPIAudioOutputStream::~WASAPIAudioOutputStream() { |
| 125 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); |
| 126 } |
124 | 127 |
125 bool WASAPIAudioOutputStream::Open() { | 128 bool WASAPIAudioOutputStream::Open() { |
126 VLOG(1) << "WASAPIAudioOutputStream::Open()"; | 129 VLOG(1) << "WASAPIAudioOutputStream::Open()"; |
127 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); | 130 DCHECK_EQ(GetCurrentThreadId(), creating_thread_id_); |
128 if (opened_) | 131 if (opened_) |
129 return true; | 132 return true; |
130 | 133 |
| 134 DCHECK(!audio_client_); |
| 135 DCHECK(!audio_render_client_); |
| 136 |
| 137 // Will be set to true if we ended up opening the default communications |
| 138 // device. |
| 139 bool communications_device = false; |
| 140 |
131 // Create an IAudioClient interface for the default rendering IMMDevice. | 141 // Create an IAudioClient interface for the default rendering IMMDevice. |
132 ScopedComPtr<IAudioClient> audio_client; | 142 ScopedComPtr<IAudioClient> audio_client; |
133 if (device_id_.empty() || | 143 if (device_id_.empty() || |
134 CoreAudioUtil::DeviceIsDefault(eRender, device_role_, device_id_)) { | 144 CoreAudioUtil::DeviceIsDefault(eRender, device_role_, device_id_)) { |
135 audio_client = CoreAudioUtil::CreateDefaultClient(eRender, device_role_); | 145 audio_client = CoreAudioUtil::CreateDefaultClient(eRender, device_role_); |
| 146 communications_device = (device_role_ == eCommunications); |
136 } else { | 147 } else { |
137 ScopedComPtr<IMMDevice> device(CoreAudioUtil::CreateDevice(device_id_)); | 148 ScopedComPtr<IMMDevice> device(CoreAudioUtil::CreateDevice(device_id_)); |
138 DLOG_IF(ERROR, !device) << "Failed to open device: " << device_id_; | 149 DLOG_IF(ERROR, !device) << "Failed to open device: " << device_id_; |
139 if (device) | 150 if (device) |
140 audio_client = CoreAudioUtil::CreateClient(device); | 151 audio_client = CoreAudioUtil::CreateClient(device); |
141 } | 152 } |
142 | 153 |
143 if (!audio_client) | 154 if (!audio_client) |
144 return false; | 155 return false; |
145 | 156 |
146 // Extra sanity to ensure that the provided device format is still valid. | 157 // Extra sanity to ensure that the provided device format is still valid. |
147 if (!CoreAudioUtil::IsFormatSupported(audio_client, | 158 if (!CoreAudioUtil::IsFormatSupported(audio_client, |
148 share_mode_, | 159 share_mode_, |
149 &format_)) { | 160 &format_)) { |
150 LOG(ERROR) << "Audio parameters are not supported."; | 161 LOG(ERROR) << "Audio parameters are not supported."; |
151 return false; | 162 return false; |
152 } | 163 } |
153 | 164 |
154 HRESULT hr = S_FALSE; | 165 HRESULT hr = S_FALSE; |
155 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { | 166 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
156 // Initialize the audio stream between the client and the device in shared | 167 // Initialize the audio stream between the client and the device in shared |
157 // mode and using event-driven buffer handling. | 168 // mode and using event-driven buffer handling. |
158 hr = CoreAudioUtil::SharedModeInitialize( | 169 hr = CoreAudioUtil::SharedModeInitialize( |
159 audio_client, &format_, audio_samples_render_event_.Get(), | 170 audio_client, &format_, audio_samples_render_event_.Get(), |
160 &endpoint_buffer_size_frames_); | 171 &endpoint_buffer_size_frames_, |
| 172 communications_device ? &kCommunicationsSessionId : NULL); |
161 if (FAILED(hr)) | 173 if (FAILED(hr)) |
162 return false; | 174 return false; |
163 | 175 |
164 // We know from experience that the best possible callback sequence is | 176 // We know from experience that the best possible callback sequence is |
165 // achieved when the packet size (given by the native device period) | 177 // achieved when the packet size (given by the native device period) |
166 // is an even divisor of the endpoint buffer size. | 178 // is an even divisor of the endpoint buffer size. |
167 // Examples: 48kHz => 960 % 480, 44.1kHz => 896 % 448 or 882 % 441. | 179 // Examples: 48kHz => 960 % 480, 44.1kHz => 896 % 448 or 882 % 441. |
168 if (endpoint_buffer_size_frames_ % packet_size_frames_ != 0) { | 180 if (endpoint_buffer_size_frames_ % packet_size_frames_ != 0) { |
169 LOG(ERROR) | 181 LOG(ERROR) |
170 << "Bailing out due to non-perfect timing. Buffer size of " | 182 << "Bailing out due to non-perfect timing. Buffer size of " |
(...skipping 20 matching lines...) Expand all Loading... |
191 } | 203 } |
192 | 204 |
193 // Create an IAudioRenderClient client for an initialized IAudioClient. | 205 // Create an IAudioRenderClient client for an initialized IAudioClient. |
194 // The IAudioRenderClient interface enables us to write output data to | 206 // The IAudioRenderClient interface enables us to write output data to |
195 // a rendering endpoint buffer. | 207 // a rendering endpoint buffer. |
196 ScopedComPtr<IAudioRenderClient> audio_render_client = | 208 ScopedComPtr<IAudioRenderClient> audio_render_client = |
197 CoreAudioUtil::CreateRenderClient(audio_client); | 209 CoreAudioUtil::CreateRenderClient(audio_client); |
198 if (!audio_render_client) | 210 if (!audio_render_client) |
199 return false; | 211 return false; |
200 | 212 |
201 // Store valid COM interfaces. | 213 // Store valid COM interfaces. |
202 audio_client_ = audio_client; | 214 audio_client_ = audio_client; |
203 audio_render_client_ = audio_render_client; | 215 audio_render_client_ = audio_render_client; |
204 | 216 |
205 hr = audio_client_->GetService(__uuidof(IAudioClock), | 217 hr = audio_client_->GetService(__uuidof(IAudioClock), |
206 audio_clock_.ReceiveVoid()); | 218 audio_clock_.ReceiveVoid()); |
207 if (FAILED(hr)) { | 219 if (FAILED(hr)) { |
208 LOG(ERROR) << "Failed to get IAudioClock service."; | 220 LOG(ERROR) << "Failed to get IAudioClock service."; |
209 return false; | 221 return false; |
210 } | 222 } |
211 | 223 |
(...skipping 396 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
608 | 620 |
609 // Ensure that we don't quit the main thread loop immediately next | 621 // Ensure that we don't quit the main thread loop immediately next |
610 // time Start() is called. | 622 // time Start() is called. |
611 ResetEvent(stop_render_event_.Get()); | 623 ResetEvent(stop_render_event_.Get()); |
612 } | 624 } |
613 | 625 |
614 source_ = NULL; | 626 source_ = NULL; |
615 } | 627 } |
616 | 628 |
617 } // namespace media | 629 } // namespace media |
OLD | NEW |