Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/audio/win/audio_unified_win.h" | 5 #include "media/audio/win/audio_unified_win.h" |
| 6 | 6 |
| 7 #include <Functiondiscoverykeys_devpkey.h> | 7 #include <Functiondiscoverykeys_devpkey.h> |
| 8 | 8 |
| 9 #include "base/debug/trace_event.h" | 9 #include "base/debug/trace_event.h" |
| 10 #include "base/time.h" | 10 #include "base/time.h" |
| 11 #include "base/win/scoped_com_initializer.h" | 11 #include "base/win/scoped_com_initializer.h" |
| 12 #include "media/audio/audio_util.h" | |
| 12 #include "media/audio/win/audio_manager_win.h" | 13 #include "media/audio/win/audio_manager_win.h" |
| 13 #include "media/audio/win/avrt_wrapper_win.h" | 14 #include "media/audio/win/avrt_wrapper_win.h" |
| 14 #include "media/audio/win/core_audio_util_win.h" | 15 #include "media/audio/win/core_audio_util_win.h" |
| 15 | 16 |
| 16 using base::win::ScopedComPtr; | 17 using base::win::ScopedComPtr; |
| 17 using base::win::ScopedCOMInitializer; | 18 using base::win::ScopedCOMInitializer; |
| 18 using base::win::ScopedCoMem; | 19 using base::win::ScopedCoMem; |
| 19 | 20 |
| 20 // Time in milliseconds between two successive delay measurements. | 21 // Time in milliseconds between two successive delay measurements. |
| 21 // We save resources by not updating the delay estimates for each capture | 22 // We save resources by not updating the delay estimates for each capture |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 69 | 70 |
| 70 namespace media { | 71 namespace media { |
| 71 | 72 |
| 72 WASAPIUnifiedStream::WASAPIUnifiedStream(AudioManagerWin* manager, | 73 WASAPIUnifiedStream::WASAPIUnifiedStream(AudioManagerWin* manager, |
| 73 const AudioParameters& params) | 74 const AudioParameters& params) |
| 74 : creating_thread_id_(base::PlatformThread::CurrentId()), | 75 : creating_thread_id_(base::PlatformThread::CurrentId()), |
| 75 manager_(manager), | 76 manager_(manager), |
| 76 share_mode_(CoreAudioUtil::GetShareMode()), | 77 share_mode_(CoreAudioUtil::GetShareMode()), |
| 77 audio_io_thread_(NULL), | 78 audio_io_thread_(NULL), |
| 78 opened_(false), | 79 opened_(false), |
| 80 volume_(1.0), | |
| 79 endpoint_render_buffer_size_frames_(0), | 81 endpoint_render_buffer_size_frames_(0), |
| 80 endpoint_capture_buffer_size_frames_(0), | 82 endpoint_capture_buffer_size_frames_(0), |
| 81 num_written_frames_(0), | 83 num_written_frames_(0), |
| 82 total_delay_ms_(0.0), | 84 total_delay_ms_(0.0), |
| 83 source_(NULL), | 85 source_(NULL), |
| 84 capture_bus_(AudioBus::Create(params)), | 86 capture_bus_(AudioBus::Create(params)), |
| 85 render_bus_(AudioBus::Create(params)) { | 87 render_bus_(AudioBus::Create(params)) { |
| 86 DCHECK(manager_); | 88 DCHECK(manager_); |
| 87 | 89 |
| 88 DVLOG_IF(1, !HasUnifiedDefaultIO()) << "Unified audio I/O is not supported."; | 90 DVLOG_IF(1, !HasUnifiedDefaultIO()) << "Unified audio I/O is not supported."; |
| (...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 239 } | 241 } |
| 240 | 242 |
| 241 // Start input streaming data between the endpoint buffer and the audio | 243 // Start input streaming data between the endpoint buffer and the audio |
| 242 // engine. | 244 // engine. |
| 243 HRESULT hr = audio_input_client_->Start(); | 245 HRESULT hr = audio_input_client_->Start(); |
| 244 if (FAILED(hr)) { | 246 if (FAILED(hr)) { |
| 245 StopAndJoinThread(hr); | 247 StopAndJoinThread(hr); |
| 246 return; | 248 return; |
| 247 } | 249 } |
| 248 | 250 |
| 249 // Reset the counter for number of rendered frames taking into account the | 251 // Ensure that the endpoint buffer is prepared with silence. |
| 250 // fact that we always initialize the render side with silence. | 252 if (share_mode_ == AUDCLNT_SHAREMODE_SHARED) { |
| 251 UINT32 num_queued_frames = 0; | 253 if (!CoreAudioUtil::FillRenderEndpointBufferWithSilence( |
| 252 audio_output_client_->GetCurrentPadding(&num_queued_frames); | 254 audio_output_client_, audio_render_client_)) { |
| 253 DCHECK_EQ(num_queued_frames, endpoint_render_buffer_size_frames_); | 255 DLOG(WARNING) << "Failed to prepare endpoint buffers with silence."; |
| 254 num_written_frames_ = num_queued_frames; | 256 return; |
| 257 } | |
| 258 } | |
| 259 num_written_frames_ = endpoint_render_buffer_size_frames_; | |
| 255 | 260 |
| 256 // Start output streaming data between the endpoint buffer and the audio | 261 // Start output streaming data between the endpoint buffer and the audio |
| 257 // engine. | 262 // engine. |
| 258 hr = audio_output_client_->Start(); | 263 hr = audio_output_client_->Start(); |
| 259 if (FAILED(hr)) { | 264 if (FAILED(hr)) { |
| 260 StopAndJoinThread(hr); | 265 StopAndJoinThread(hr); |
| 261 return; | 266 return; |
| 262 } | 267 } |
| 263 } | 268 } |
| 264 | 269 |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 321 // It is valid to call Close() before calling open or Start(). | 326 // It is valid to call Close() before calling open or Start(). |
| 322 // It is also valid to call Close() after Start() has been called. | 327 // It is also valid to call Close() after Start() has been called. |
| 323 Stop(); | 328 Stop(); |
| 324 | 329 |
| 325 // Inform the audio manager that we have been closed. This will cause our | 330 // Inform the audio manager that we have been closed. This will cause our |
| 326 // destruction. | 331 // destruction. |
| 327 manager_->ReleaseOutputStream(this); | 332 manager_->ReleaseOutputStream(this); |
| 328 } | 333 } |
| 329 | 334 |
| 330 void WASAPIUnifiedStream::SetVolume(double volume) { | 335 void WASAPIUnifiedStream::SetVolume(double volume) { |
| 331 NOTIMPLEMENTED(); | 336 DVLOG(1) << "SetVolume(volume=" << volume << ")"; |
| 337 float volume_float = static_cast<float>(volume); | |
|
tommi (sloooow) - chröme
2013/02/01 11:57:53
prefer to consistently use double for volume.
henrika (OOO until Aug 14)
2013/02/01 12:23:52
Done.
| |
| 338 if (volume_float < 0.0f || volume_float > 1.0f) { | |
|
tommi (sloooow) - chröme
2013/02/01 11:57:53
no {}
henrika (OOO until Aug 14)
2013/02/01 12:23:52
Done.
| |
| 339 return; | |
| 340 } | |
| 341 volume_ = volume_float; | |
| 332 } | 342 } |
| 333 | 343 |
| 334 void WASAPIUnifiedStream::GetVolume(double* volume) { | 344 void WASAPIUnifiedStream::GetVolume(double* volume) { |
| 335 NOTIMPLEMENTED(); | 345 DVLOG(1) << "GetVolume()"; |
| 346 *volume = static_cast<double>(volume_); | |
| 336 } | 347 } |
| 337 | 348 |
| 338 // static | 349 // static |
| 339 bool WASAPIUnifiedStream::HasUnifiedDefaultIO() { | 350 bool WASAPIUnifiedStream::HasUnifiedDefaultIO() { |
| 340 AudioParameters in_params; | 351 AudioParameters in_params; |
| 341 HRESULT hr = CoreAudioUtil::GetPreferredAudioParameters(eCapture, eConsole, | 352 HRESULT hr = CoreAudioUtil::GetPreferredAudioParameters(eCapture, eConsole, |
| 342 &in_params); | 353 &in_params); |
| 343 if (FAILED(hr)) | 354 if (FAILED(hr)) |
| 344 return false; | 355 return false; |
| 345 | 356 |
| (...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 516 if (FAILED(hr)) { | 527 if (FAILED(hr)) { |
| 517 DLOG(ERROR) << "Failed to access render buffer"; | 528 DLOG(ERROR) << "Failed to access render buffer"; |
| 518 continue; | 529 continue; |
| 519 } | 530 } |
| 520 | 531 |
| 521 // Convert the audio bus content to interleaved integer data using | 532 // Convert the audio bus content to interleaved integer data using |
| 522 // |audio_data| as destination. | 533 // |audio_data| as destination. |
| 523 render_bus_->ToInterleaved( | 534 render_bus_->ToInterleaved( |
| 524 packet_size_frames_, bytes_per_sample, audio_data); | 535 packet_size_frames_, bytes_per_sample, audio_data); |
| 525 | 536 |
| 537 // Perform in-place, software-volume adjustments. | |
| 538 media::AdjustVolume(audio_data, | |
| 539 frames_filled * format_.Format.nBlockAlign, | |
| 540 render_bus_->channels(), | |
| 541 bytes_per_sample, | |
| 542 volume_); | |
| 543 | |
| 526 // Release the buffer space acquired in the GetBuffer() call. | 544 // Release the buffer space acquired in the GetBuffer() call. |
| 527 audio_render_client_->ReleaseBuffer(packet_size_frames_, 0); | 545 audio_render_client_->ReleaseBuffer(packet_size_frames_, 0); |
| 528 DLOG_IF(ERROR, FAILED(hr)) << "Failed to release render buffer"; | 546 DLOG_IF(ERROR, FAILED(hr)) << "Failed to release render buffer"; |
| 529 } | 547 } |
| 530 break; | 548 break; |
| 531 default: | 549 default: |
| 532 error = true; | 550 error = true; |
| 533 break; | 551 break; |
| 534 } | 552 } |
| 535 } | 553 } |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 560 void WASAPIUnifiedStream::StopAndJoinThread(HRESULT err) { | 578 void WASAPIUnifiedStream::StopAndJoinThread(HRESULT err) { |
| 561 CHECK(GetCurrentThreadId() == creating_thread_id_); | 579 CHECK(GetCurrentThreadId() == creating_thread_id_); |
| 562 DCHECK(audio_io_thread_.get()); | 580 DCHECK(audio_io_thread_.get()); |
| 563 SetEvent(stop_streaming_event_.Get()); | 581 SetEvent(stop_streaming_event_.Get()); |
| 564 audio_io_thread_->Join(); | 582 audio_io_thread_->Join(); |
| 565 audio_io_thread_.reset(); | 583 audio_io_thread_.reset(); |
| 566 HandleError(err); | 584 HandleError(err); |
| 567 } | 585 } |
| 568 | 586 |
| 569 } // namespace media | 587 } // namespace media |
| OLD | NEW |