OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/audio/win/audio_unified_win.h" | 5 #include "media/audio/win/audio_unified_win.h" |
6 | 6 |
7 #include <Functiondiscoverykeys_devpkey.h> | 7 #include <Functiondiscoverykeys_devpkey.h> |
8 | 8 |
9 #include "base/debug/trace_event.h" | 9 #include "base/debug/trace_event.h" |
10 #include "base/time.h" | 10 #include "base/time.h" |
11 #include "base/win/scoped_com_initializer.h" | 11 #include "base/win/scoped_com_initializer.h" |
| 12 #include "media/audio/audio_util.h" |
12 #include "media/audio/win/audio_manager_win.h" | 13 #include "media/audio/win/audio_manager_win.h" |
13 #include "media/audio/win/avrt_wrapper_win.h" | 14 #include "media/audio/win/avrt_wrapper_win.h" |
14 #include "media/audio/win/core_audio_util_win.h" | 15 #include "media/audio/win/core_audio_util_win.h" |
15 | 16 |
16 using base::win::ScopedComPtr; | 17 using base::win::ScopedComPtr; |
17 using base::win::ScopedCOMInitializer; | 18 using base::win::ScopedCOMInitializer; |
18 using base::win::ScopedCoMem; | 19 using base::win::ScopedCoMem; |
19 | 20 |
20 // Time in milliseconds between two successive delay measurements. | 21 // Time in milliseconds between two successive delay measurements. |
21 // We save resources by not updating the delay estimates for each capture | 22 // We save resources by not updating the delay estimates for each capture |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
69 | 70 |
70 namespace media { | 71 namespace media { |
71 | 72 |
72 WASAPIUnifiedStream::WASAPIUnifiedStream(AudioManagerWin* manager, | 73 WASAPIUnifiedStream::WASAPIUnifiedStream(AudioManagerWin* manager, |
73 const AudioParameters& params) | 74 const AudioParameters& params) |
74 : creating_thread_id_(base::PlatformThread::CurrentId()), | 75 : creating_thread_id_(base::PlatformThread::CurrentId()), |
75 manager_(manager), | 76 manager_(manager), |
76 share_mode_(CoreAudioUtil::GetShareMode()), | 77 share_mode_(CoreAudioUtil::GetShareMode()), |
77 audio_io_thread_(NULL), | 78 audio_io_thread_(NULL), |
78 opened_(false), | 79 opened_(false), |
| 80 volume_(1.0), |
79 endpoint_render_buffer_size_frames_(0), | 81 endpoint_render_buffer_size_frames_(0), |
80 endpoint_capture_buffer_size_frames_(0), | 82 endpoint_capture_buffer_size_frames_(0), |
81 num_written_frames_(0), | 83 num_written_frames_(0), |
82 total_delay_ms_(0.0), | 84 total_delay_ms_(0.0), |
83 source_(NULL), | 85 source_(NULL), |
84 capture_bus_(AudioBus::Create(params)), | 86 capture_bus_(AudioBus::Create(params)), |
85 render_bus_(AudioBus::Create(params)) { | 87 render_bus_(AudioBus::Create(params)) { |
86 DCHECK(manager_); | 88 DCHECK(manager_); |
87 | 89 |
88 DVLOG_IF(1, !HasUnifiedDefaultIO()) << "Unified audio I/O is not supported."; | 90 DVLOG_IF(1, !HasUnifiedDefaultIO()) << "Unified audio I/O is not supported."; |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 } | 241 } |
240 | 242 |
241 // Start input streaming data between the endpoint buffer and the audio | 243 // Start input streaming data between the endpoint buffer and the audio |
242 // engine. | 244 // engine. |
243 HRESULT hr = audio_input_client_->Start(); | 245 HRESULT hr = audio_input_client_->Start(); |
244 if (FAILED(hr)) { | 246 if (FAILED(hr)) { |
245 StopAndJoinThread(hr); | 247 StopAndJoinThread(hr); |
246 return; | 248 return; |
247 } | 249 } |
248 | 250 |
249 // Reset the counter for number of rendered frames taking into account the | 251 // Ensure that the endpoint buffer is prepared with silence. |
250 // fact that we always initialize the render side with silence. | 252 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
251 UINT32 num_queued_frames = 0; | 253 if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence( |
252 audio_output_client_->GetCurrentPadding(&num_queued_frames); | 254 audio_output_client_, audio_render_client_)) { |
253 DCHECK_EQ(num_queued_frames, endpoint_render_buffer_size_frames_); | 255 DLOG(WARNING) << "Failed to prepare endpoint buffers with silence."; |
254 num_written_frames_ = num_queued_frames; | 256 return; |
| 257 } |
| 258 } |
| 259 num_written_frames_ = endpoint_render_buffer_size_frames_; |
255 | 260 |
256 // Start output streaming data between the endpoint buffer and the audio | 261 // Start output streaming data between the endpoint buffer and the audio |
257 // engine. | 262 // engine. |
258 hr = audio_output_client_->Start(); | 263 hr = audio_output_client_->Start(); |
259 if (FAILED(hr)) { | 264 if (FAILED(hr)) { |
260 StopAndJoinThread(hr); | 265 StopAndJoinThread(hr); |
261 return; | 266 return; |
262 } | 267 } |
263 } | 268 } |
264 | 269 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
321 // It is valid to call Close() before calling open or Start(). | 326 // It is valid to call Close() before calling open or Start(). |
322 // It is also valid to call Close() after Start() has been called. | 327 // It is also valid to call Close() after Start() has been called. |
323 Stop(); | 328 Stop(); |
324 | 329 |
325 // Inform the audio manager that we have been closed. This will cause our | 330 // Inform the audio manager that we have been closed. This will cause our |
326 // destruction. | 331 // destruction. |
327 manager_->ReleaseOutputStream(this); | 332 manager_->ReleaseOutputStream(this); |
328 } | 333 } |
329 | 334 |
330 void WASAPIUnifiedStream::SetVolume(double volume) { | 335 void WASAPIUnifiedStream::SetVolume(double volume) { |
331 NOTIMPLEMENTED(); | 336 DVLOG(1) << "SetVolume(volume=" << volume << ")"; |
| 337 if (volume < 0 || volume > 1) |
| 338 return; |
| 339 volume_ = volume; |
332 } | 340 } |
333 | 341 |
334 void WASAPIUnifiedStream::GetVolume(double* volume) { | 342 void WASAPIUnifiedStream::GetVolume(double* volume) { |
335 NOTIMPLEMENTED(); | 343 DVLOG(1) << "GetVolume()"; |
| 344 *volume = static_cast<double>(volume_); |
336 } | 345 } |
337 | 346 |
338 // static | 347 // static |
339 bool WASAPIUnifiedStream::HasUnifiedDefaultIO() { | 348 bool WASAPIUnifiedStream::HasUnifiedDefaultIO() { |
340 AudioParameters in_params; | 349 AudioParameters in_params; |
341 HRESULT hr = CoreAudioUtil::GetPreferredAudioParameters(eCapture, eConsole, | 350 HRESULT hr = CoreAudioUtil::GetPreferredAudioParameters(eCapture, eConsole, |
342 &in_params); | 351 &in_params); |
343 if (FAILED(hr)) | 352 if (FAILED(hr)) |
344 return false; | 353 return false; |
345 | 354 |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
516 if (FAILED(hr)) { | 525 if (FAILED(hr)) { |
517 DLOG(ERROR) << "Failed to access render buffer"; | 526 DLOG(ERROR) << "Failed to access render buffer"; |
518 continue; | 527 continue; |
519 } | 528 } |
520 | 529 |
521 // Convert the audio bus content to interleaved integer data using | 530 // Convert the audio bus content to interleaved integer data using |
522 // |audio_data| as destination. | 531 // |audio_data| as destination. |
523 render_bus_->ToInterleaved( | 532 render_bus_->ToInterleaved( |
524 packet_size_frames_, bytes_per_sample, audio_data); | 533 packet_size_frames_, bytes_per_sample, audio_data); |
525 | 534 |
| 535 // Perform in-place, software-volume adjustments. |
| 536 media::AdjustVolume(audio_data, |
| 537 frames_filled * format_.Format.nBlockAlign, |
| 538 render_bus_->channels(), |
| 539 bytes_per_sample, |
| 540 volume_); |
| 541 |
526 // Release the buffer space acquired in the GetBuffer() call. | 542 // Release the buffer space acquired in the GetBuffer() call. |
527 audio_render_client_->ReleaseBuffer(packet_size_frames_, 0); | 543 audio_render_client_->ReleaseBuffer(packet_size_frames_, 0); |
528 DLOG_IF(ERROR, FAILED(hr)) << "Failed to release render buffer"; | 544 DLOG_IF(ERROR, FAILED(hr)) << "Failed to release render buffer"; |
529 } | 545 } |
530 break; | 546 break; |
531 default: | 547 default: |
532 error = true; | 548 error = true; |
533 break; | 549 break; |
534 } | 550 } |
535 } | 551 } |
(...skipping 24 matching lines...) Expand all Loading... |
560 void WASAPIUnifiedStream::StopAndJoinThread(HRESULT err) { | 576 void WASAPIUnifiedStream::StopAndJoinThread(HRESULT err) { |
561 CHECK(GetCurrentThreadId() == creating_thread_id_); | 577 CHECK(GetCurrentThreadId() == creating_thread_id_); |
562 DCHECK(audio_io_thread_.get()); | 578 DCHECK(audio_io_thread_.get()); |
563 SetEvent(stop_streaming_event_.Get()); | 579 SetEvent(stop_streaming_event_.Get()); |
564 audio_io_thread_->Join(); | 580 audio_io_thread_->Join(); |
565 audio_io_thread_.reset(); | 581 audio_io_thread_.reset(); |
566 HandleError(err); | 582 HandleError(err); |
567 } | 583 } |
568 | 584 |
569 } // namespace media | 585 } // namespace media |
OLD | NEW |