OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc_audio_renderer.h" | 5 #include "content/renderer/media/webrtc_audio_renderer.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "base/metrics/histogram.h" | 8 #include "base/metrics/histogram.h" |
9 #include "base/strings/string_util.h" | 9 #include "base/strings/string_util.h" |
10 #include "base/strings/stringprintf.h" | 10 #include "base/strings/stringprintf.h" |
(...skipping 11 matching lines...) Expand all Loading... |
22 | 22 |
23 #if defined(OS_WIN) | 23 #if defined(OS_WIN) |
24 #include "base/win/windows_version.h" | 24 #include "base/win/windows_version.h" |
25 #include "media/audio/win/core_audio_util_win.h" | 25 #include "media/audio/win/core_audio_util_win.h" |
26 #endif | 26 #endif |
27 | 27 |
28 namespace content { | 28 namespace content { |
29 | 29 |
30 namespace { | 30 namespace { |
31 | 31 |
| 32 // We add a UMA histogram measuring the execution time of the Render() method |
| 33 // every |kNumCallbacksBetweenRenderTimeHistograms| callback. Assuming 10ms |
| 34 // between each callback leads to one UMA update each 100ms. |
| 35 const int kNumCallbacksBetweenRenderTimeHistograms = 10; |
| 36 |
32 // This is a simple wrapper class that's handed out to users of a shared | 37 // This is a simple wrapper class that's handed out to users of a shared |
33 // WebRtcAudioRenderer instance. This class maintains the per-user 'playing' | 38 // WebRtcAudioRenderer instance. This class maintains the per-user 'playing' |
34 // and 'started' states to avoid problems related to incorrect usage which | 39 // and 'started' states to avoid problems related to incorrect usage which |
35 // might violate the implementation assumptions inside WebRtcAudioRenderer | 40 // might violate the implementation assumptions inside WebRtcAudioRenderer |
36 // (see the play reference count). | 41 // (see the play reference count). |
37 class SharedAudioRenderer : public MediaStreamAudioRenderer { | 42 class SharedAudioRenderer : public MediaStreamAudioRenderer { |
38 public: | 43 public: |
39 // Callback definition for a callback that is called when when Play(), Pause() | 44 // Callback definition for a callback that is called when when Play(), Pause() |
40 // or SetVolume are called (whenever the internal |playing_state_| changes). | 45 // or SetVolume are called (whenever the internal |playing_state_| changes). |
41 typedef base::Callback< | 46 typedef base::Callback< |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
183 session_id_(session_id), | 188 session_id_(session_id), |
184 media_stream_(media_stream), | 189 media_stream_(media_stream), |
185 source_(NULL), | 190 source_(NULL), |
186 play_ref_count_(0), | 191 play_ref_count_(0), |
187 start_ref_count_(0), | 192 start_ref_count_(0), |
188 audio_delay_milliseconds_(0), | 193 audio_delay_milliseconds_(0), |
189 fifo_delay_milliseconds_(0), | 194 fifo_delay_milliseconds_(0), |
190 sink_params_(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, | 195 sink_params_(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, |
191 media::CHANNEL_LAYOUT_STEREO, sample_rate, 16, | 196 media::CHANNEL_LAYOUT_STEREO, sample_rate, 16, |
192 frames_per_buffer, | 197 frames_per_buffer, |
193 GetCurrentDuckingFlag(source_render_frame_id)) { | 198 GetCurrentDuckingFlag(source_render_frame_id)), |
| 199 render_callback_count_(0) { |
194 WebRtcLogMessage(base::StringPrintf( | 200 WebRtcLogMessage(base::StringPrintf( |
195 "WAR::WAR. source_render_view_id=%d" | 201 "WAR::WAR. source_render_view_id=%d" |
196 ", session_id=%d, sample_rate=%d, frames_per_buffer=%d, effects=%i", | 202 ", session_id=%d, sample_rate=%d, frames_per_buffer=%d, effects=%i", |
197 source_render_view_id, | 203 source_render_view_id, |
198 session_id, | 204 session_id, |
199 sample_rate, | 205 sample_rate, |
200 frames_per_buffer, | 206 frames_per_buffer, |
201 sink_params_.effects())); | 207 sink_params_.effects())); |
202 } | 208 } |
203 | 209 |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
314 } | 320 } |
315 | 321 |
316 void WebRtcAudioRenderer::Play() { | 322 void WebRtcAudioRenderer::Play() { |
317 DVLOG(1) << "WebRtcAudioRenderer::Play()"; | 323 DVLOG(1) << "WebRtcAudioRenderer::Play()"; |
318 DCHECK(thread_checker_.CalledOnValidThread()); | 324 DCHECK(thread_checker_.CalledOnValidThread()); |
319 | 325 |
320 if (playing_state_.playing()) | 326 if (playing_state_.playing()) |
321 return; | 327 return; |
322 | 328 |
323 playing_state_.set_playing(true); | 329 playing_state_.set_playing(true); |
| 330 render_callback_count_ = 0; |
324 | 331 |
325 OnPlayStateChanged(media_stream_, &playing_state_); | 332 OnPlayStateChanged(media_stream_, &playing_state_); |
326 } | 333 } |
327 | 334 |
328 void WebRtcAudioRenderer::EnterPlayState() { | 335 void WebRtcAudioRenderer::EnterPlayState() { |
329 DVLOG(1) << "WebRtcAudioRenderer::EnterPlayState()"; | 336 DVLOG(1) << "WebRtcAudioRenderer::EnterPlayState()"; |
330 DCHECK(thread_checker_.CalledOnValidThread()); | 337 DCHECK(thread_checker_.CalledOnValidThread()); |
331 DCHECK_GT(start_ref_count_, 0) << "Did you forget to call Start()?"; | 338 DCHECK_GT(start_ref_count_, 0) << "Did you forget to call Start()?"; |
332 base::AutoLock auto_lock(lock_); | 339 base::AutoLock auto_lock(lock_); |
333 if (state_ == UNINITIALIZED) | 340 if (state_ == UNINITIALIZED) |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
433 } | 440 } |
434 | 441 |
435 void WebRtcAudioRenderer::OnRenderError() { | 442 void WebRtcAudioRenderer::OnRenderError() { |
436 NOTIMPLEMENTED(); | 443 NOTIMPLEMENTED(); |
437 LOG(ERROR) << "OnRenderError()"; | 444 LOG(ERROR) << "OnRenderError()"; |
438 } | 445 } |
439 | 446 |
440 // Called by AudioPullFifo when more data is necessary. | 447 // Called by AudioPullFifo when more data is necessary. |
441 void WebRtcAudioRenderer::SourceCallback( | 448 void WebRtcAudioRenderer::SourceCallback( |
442 int fifo_frame_delay, media::AudioBus* audio_bus) { | 449 int fifo_frame_delay, media::AudioBus* audio_bus) { |
| 450 base::TimeTicks start_time = base::TimeTicks::Now() ; |
443 DVLOG(2) << "WebRtcAudioRenderer::SourceCallback(" | 451 DVLOG(2) << "WebRtcAudioRenderer::SourceCallback(" |
444 << fifo_frame_delay << ", " | 452 << fifo_frame_delay << ", " |
445 << audio_bus->frames() << ")"; | 453 << audio_bus->frames() << ")"; |
446 | 454 |
447 int output_delay_milliseconds = audio_delay_milliseconds_; | 455 int output_delay_milliseconds = audio_delay_milliseconds_; |
448 output_delay_milliseconds += fifo_delay_milliseconds_; | 456 output_delay_milliseconds += fifo_delay_milliseconds_; |
449 DVLOG(2) << "output_delay_milliseconds: " << output_delay_milliseconds; | 457 DVLOG(2) << "output_delay_milliseconds: " << output_delay_milliseconds; |
450 | 458 |
451 // We need to keep render data for the |source_| regardless of |state_|, | 459 // We need to keep render data for the |source_| regardless of |state_|, |
452 // otherwise the data will be buffered up inside |source_|. | 460 // otherwise the data will be buffered up inside |source_|. |
453 source_->RenderData(audio_bus, sink_params_.sample_rate(), | 461 source_->RenderData(audio_bus, sink_params_.sample_rate(), |
454 output_delay_milliseconds, | 462 output_delay_milliseconds, |
455 ¤t_time_); | 463 ¤t_time_); |
456 | 464 |
457 // Avoid filling up the audio bus if we are not playing; instead | 465 // Avoid filling up the audio bus if we are not playing; instead |
458 // return here and ensure that the returned value in Render() is 0. | 466 // return here and ensure that the returned value in Render() is 0. |
459 if (state_ != PLAYING) | 467 if (state_ != PLAYING) |
460 audio_bus->Zero(); | 468 audio_bus->Zero(); |
| 469 |
| 470 if (++render_callback_count_ == kNumCallbacksBetweenRenderTimeHistograms) { |
| 471 base::TimeDelta elapsed = base::TimeTicks::Now() - start_time; |
| 472 render_callback_count_ = 0; |
| 473 UMA_HISTOGRAM_TIMES("WebRTC.AudioRenderTimes", elapsed); |
| 474 } |
461 } | 475 } |
462 | 476 |
463 void WebRtcAudioRenderer::UpdateSourceVolume( | 477 void WebRtcAudioRenderer::UpdateSourceVolume( |
464 webrtc::AudioSourceInterface* source) { | 478 webrtc::AudioSourceInterface* source) { |
465 DCHECK(thread_checker_.CalledOnValidThread()); | 479 DCHECK(thread_checker_.CalledOnValidThread()); |
466 | 480 |
467 // Note: If there are no playing audio renderers, then the volume will be | 481 // Note: If there are no playing audio renderers, then the volume will be |
468 // set to 0.0. | 482 // set to 0.0. |
469 float volume = 0.0f; | 483 float volume = 0.0f; |
470 | 484 |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
538 if (RemovePlayingState(source, state)) | 552 if (RemovePlayingState(source, state)) |
539 EnterPauseState(); | 553 EnterPauseState(); |
540 } else if (AddPlayingState(source, state)) { | 554 } else if (AddPlayingState(source, state)) { |
541 EnterPlayState(); | 555 EnterPlayState(); |
542 } | 556 } |
543 UpdateSourceVolume(source); | 557 UpdateSourceVolume(source); |
544 } | 558 } |
545 } | 559 } |
546 | 560 |
547 } // namespace content | 561 } // namespace content |
OLD | NEW |