Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(198)

Side by Side Diff: content/renderer/media/webrtc_audio_renderer.cc

Issue 1666363005: Switching audio clients to using RestartableAudioRendererSink interface as a sink. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: nit fixes Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc_audio_renderer.h" 5 #include "content/renderer/media/webrtc_audio_renderer.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/logging.h" 9 #include "base/logging.h"
10 #include "base/metrics/histogram.h" 10 #include "base/metrics/histogram.h"
11 #include "base/strings/string_util.h" 11 #include "base/strings/string_util.h"
12 #include "base/strings/stringprintf.h" 12 #include "base/strings/stringprintf.h"
13 #include "build/build_config.h" 13 #include "build/build_config.h"
14 #include "content/renderer/media/audio_device_factory.h" 14 #include "content/renderer/media/audio_device_factory.h"
15 #include "content/renderer/media/media_stream_audio_track.h" 15 #include "content/renderer/media/media_stream_audio_track.h"
16 #include "content/renderer/media/media_stream_dispatcher.h" 16 #include "content/renderer/media/media_stream_dispatcher.h"
17 #include "content/renderer/media/media_stream_track.h" 17 #include "content/renderer/media/media_stream_track.h"
18 #include "content/renderer/media/webrtc_audio_device_impl.h" 18 #include "content/renderer/media/webrtc_audio_device_impl.h"
19 #include "content/renderer/media/webrtc_logging.h" 19 #include "content/renderer/media/webrtc_logging.h"
20 #include "content/renderer/render_frame_impl.h" 20 #include "content/renderer/render_frame_impl.h"
21 #include "media/audio/audio_output_device.h"
22 #include "media/audio/audio_parameters.h" 21 #include "media/audio/audio_parameters.h"
23 #include "media/audio/sample_rates.h" 22 #include "media/audio/sample_rates.h"
24 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" 23 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
25 #include "third_party/webrtc/api/mediastreaminterface.h" 24 #include "third_party/webrtc/api/mediastreaminterface.h"
26 #include "third_party/webrtc/media/base/audiorenderer.h" 25 #include "third_party/webrtc/media/base/audiorenderer.h"
27 26
28 #if defined(OS_WIN) 27 #if defined(OS_WIN)
29 #include "base/win/windows_version.h" 28 #include "base/win/windows_version.h"
30 #include "media/audio/win/core_audio_util_win.h" 29 #include "media/audio/win/core_audio_util_win.h"
31 #endif 30 #endif
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after
214 DCHECK(thread_checker_.CalledOnValidThread()); 213 DCHECK(thread_checker_.CalledOnValidThread());
215 DCHECK(source); 214 DCHECK(source);
216 DCHECK(!sink_.get()); 215 DCHECK(!sink_.get());
217 DCHECK_GE(session_id_, 0); 216 DCHECK_GE(session_id_, 0);
218 { 217 {
219 base::AutoLock auto_lock(lock_); 218 base::AutoLock auto_lock(lock_);
220 DCHECK_EQ(state_, UNINITIALIZED); 219 DCHECK_EQ(state_, UNINITIALIZED);
221 DCHECK(!source_); 220 DCHECK(!source_);
222 } 221 }
223 222
224 sink_ = 223 sink_ = AudioDeviceFactory::NewAudioRendererSink(
225 AudioDeviceFactory::NewOutputDevice(source_render_frame_id_, session_id_, 224 AudioDeviceFactory::kSourceWebRtc, source_render_frame_id_, session_id_,
226 output_device_id_, security_origin_); 225 output_device_id_, security_origin_);
227 if (sink_->GetDeviceStatus() != media::OUTPUT_DEVICE_STATUS_OK) 226
227 if (sink_->GetOutputDevice()->GetDeviceStatus() !=
228 media::OUTPUT_DEVICE_STATUS_OK) {
228 return false; 229 return false;
230 }
229 231
230 PrepareSink(); 232 PrepareSink();
231 { 233 {
232 // No need to reassert the preconditions because the other thread accessing 234 // No need to reassert the preconditions because the other thread accessing
233 // the fields (checked by |audio_renderer_thread_checker_|) only reads them. 235 // the fields (checked by |audio_renderer_thread_checker_|) only reads them.
234 base::AutoLock auto_lock(lock_); 236 base::AutoLock auto_lock(lock_);
235 source_ = source; 237 source_ = source;
236 238
237 // User must call Play() before any audio can be heard. 239 // User must call Play() before any audio can be heard.
238 state_ = PAUSED; 240 state_ = PAUSED;
239 } 241 }
240 sink_->Start(); 242 sink_->Start();
243 sink_->Play(); // Not all the sinks play on start.
241 244
242 return true; 245 return true;
243 } 246 }
244 247
245 scoped_refptr<MediaStreamAudioRenderer> 248 scoped_refptr<MediaStreamAudioRenderer>
246 WebRtcAudioRenderer::CreateSharedAudioRendererProxy( 249 WebRtcAudioRenderer::CreateSharedAudioRendererProxy(
247 const blink::WebMediaStream& media_stream) { 250 const blink::WebMediaStream& media_stream) {
248 content::SharedAudioRenderer::OnPlayStateChanged on_play_state_changed = 251 content::SharedAudioRenderer::OnPlayStateChanged on_play_state_changed =
249 base::Bind(&WebRtcAudioRenderer::OnPlayStateChanged, this); 252 base::Bind(&WebRtcAudioRenderer::OnPlayStateChanged, this);
250 return new SharedAudioRenderer(this, media_stream, on_play_state_changed); 253 return new SharedAudioRenderer(this, media_stream, on_play_state_changed);
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
373 const media::SwitchOutputDeviceCB& callback) { 376 const media::SwitchOutputDeviceCB& callback) {
374 DVLOG(1) << "WebRtcAudioRenderer::SwitchOutputDevice()"; 377 DVLOG(1) << "WebRtcAudioRenderer::SwitchOutputDevice()";
375 DCHECK(thread_checker_.CalledOnValidThread()); 378 DCHECK(thread_checker_.CalledOnValidThread());
376 DCHECK_GE(session_id_, 0); 379 DCHECK_GE(session_id_, 0);
377 { 380 {
378 base::AutoLock auto_lock(lock_); 381 base::AutoLock auto_lock(lock_);
379 DCHECK(source_); 382 DCHECK(source_);
380 DCHECK_NE(state_, UNINITIALIZED); 383 DCHECK_NE(state_, UNINITIALIZED);
381 } 384 }
382 385
383 scoped_refptr<media::AudioOutputDevice> new_sink = 386 scoped_refptr<media::AudioRendererSink> new_sink =
384 AudioDeviceFactory::NewOutputDevice(source_render_frame_id_, session_id_, 387 AudioDeviceFactory::NewAudioRendererSink(
385 device_id, security_origin); 388 AudioDeviceFactory::kSourceWebRtc, source_render_frame_id_,
386 if (new_sink->GetDeviceStatus() != media::OUTPUT_DEVICE_STATUS_OK) { 389 session_id_, device_id, security_origin);
387 callback.Run(new_sink->GetDeviceStatus()); 390 if (new_sink->GetOutputDevice()->GetDeviceStatus() !=
391 media::OUTPUT_DEVICE_STATUS_OK) {
392 callback.Run(new_sink->GetOutputDevice()->GetDeviceStatus());
388 return; 393 return;
389 } 394 }
390 395
391 // Make sure to stop the sink while _not_ holding the lock since the Render() 396 // Make sure to stop the sink while _not_ holding the lock since the Render()
392 // callback may currently be executing and trying to grab the lock while we're 397 // callback may currently be executing and trying to grab the lock while we're
393 // stopping the thread on which it runs. 398 // stopping the thread on which it runs.
394 sink_->Stop(); 399 sink_->Stop();
395 audio_renderer_thread_checker_.DetachFromThread(); 400 audio_renderer_thread_checker_.DetachFromThread();
396 sink_ = new_sink; 401 sink_ = new_sink;
397 output_device_id_ = device_id; 402 output_device_id_ = device_id;
398 security_origin_ = security_origin; 403 security_origin_ = security_origin;
399 { 404 {
400 base::AutoLock auto_lock(lock_); 405 base::AutoLock auto_lock(lock_);
401 source_->AudioRendererThreadStopped(); 406 source_->AudioRendererThreadStopped();
402 } 407 }
403 PrepareSink(); 408 PrepareSink();
404 sink_->Start(); 409 sink_->Start();
405 410
406 callback.Run(media::OUTPUT_DEVICE_STATUS_OK); 411 callback.Run(media::OUTPUT_DEVICE_STATUS_OK);
407 } 412 }
408 413
409 media::AudioParameters WebRtcAudioRenderer::GetOutputParameters() { 414 media::AudioParameters WebRtcAudioRenderer::GetOutputParameters() {
410 DCHECK(thread_checker_.CalledOnValidThread()); 415 DCHECK(thread_checker_.CalledOnValidThread());
411 if (!sink_.get()) 416 if (!sink_.get())
412 return media::AudioParameters(); 417 return media::AudioParameters();
413 418
414 return sink_->GetOutputParameters(); 419 return sink_->GetOutputDevice()->GetOutputParameters();
415 } 420 }
416 421
417 media::OutputDeviceStatus WebRtcAudioRenderer::GetDeviceStatus() { 422 media::OutputDeviceStatus WebRtcAudioRenderer::GetDeviceStatus() {
418 DCHECK(thread_checker_.CalledOnValidThread()); 423 DCHECK(thread_checker_.CalledOnValidThread());
419 if (!sink_.get()) 424 if (!sink_.get())
420 return media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL; 425 return media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL;
421 426
422 return sink_->GetDeviceStatus(); 427 return sink_->GetOutputDevice()->GetDeviceStatus();
423 } 428 }
424 429
425 int WebRtcAudioRenderer::Render(media::AudioBus* audio_bus, 430 int WebRtcAudioRenderer::Render(media::AudioBus* audio_bus,
426 uint32_t audio_delay_milliseconds, 431 uint32_t audio_delay_milliseconds,
427 uint32_t frames_skipped) { 432 uint32_t frames_skipped) {
428 DCHECK(audio_renderer_thread_checker_.CalledOnValidThread()); 433 DCHECK(audio_renderer_thread_checker_.CalledOnValidThread());
429 base::AutoLock auto_lock(lock_); 434 base::AutoLock auto_lock(lock_);
430 if (!source_) 435 if (!source_)
431 return 0; 436 return 0;
432 437
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
611 { 616 {
612 base::AutoLock lock(lock_); 617 base::AutoLock lock(lock_);
613 new_sink_params = sink_params_; 618 new_sink_params = sink_params_;
614 } 619 }
615 // WebRTC does not yet support higher rates than 96000 on the client side 620 // WebRTC does not yet support higher rates than 96000 on the client side
616 // and 48000 is the preferred sample rate. Therefore, if 192000 is detected, 621 // and 48000 is the preferred sample rate. Therefore, if 192000 is detected,
617 // we change the rate to 48000 instead. The consequence is that the native 622 // we change the rate to 48000 instead. The consequence is that the native
618 // layer will be opened up at 192kHz but WebRTC will provide data at 48kHz 623 // layer will be opened up at 192kHz but WebRTC will provide data at 48kHz
619 // which will then be resampled by the audio converted on the browser side 624 // which will then be resampled by the audio converted on the browser side
620 // to match the native audio layer. 625 // to match the native audio layer.
621 int sample_rate = sink_->GetOutputParameters().sample_rate(); 626 int sample_rate =
627 sink_->GetOutputDevice()->GetOutputParameters().sample_rate();
622 DVLOG(1) << "Audio output hardware sample rate: " << sample_rate; 628 DVLOG(1) << "Audio output hardware sample rate: " << sample_rate;
623 if (sample_rate >= 192000) { 629 if (sample_rate >= 192000) {
624 DVLOG(1) << "Resampling from 48000 to " << sample_rate << " is required"; 630 DVLOG(1) << "Resampling from 48000 to " << sample_rate << " is required";
625 sample_rate = 48000; 631 sample_rate = 48000;
626 } 632 }
627 media::AudioSampleRate asr; 633 media::AudioSampleRate asr;
628 if (media::ToAudioSampleRate(sample_rate, &asr)) { 634 if (media::ToAudioSampleRate(sample_rate, &asr)) {
629 UMA_HISTOGRAM_ENUMERATION("WebRTC.AudioOutputSampleRate", asr, 635 UMA_HISTOGRAM_ENUMERATION("WebRTC.AudioOutputSampleRate", asr,
630 media::kAudioSampleRateMax + 1); 636 media::kAudioSampleRateMax + 1);
631 } else { 637 } else {
632 UMA_HISTOGRAM_COUNTS("WebRTC.AudioOutputSampleRateUnexpected", sample_rate); 638 UMA_HISTOGRAM_COUNTS("WebRTC.AudioOutputSampleRateUnexpected", sample_rate);
633 } 639 }
634 640
635 // Calculate the frames per buffer for the source, i.e. the WebRTC client. We 641 // Calculate the frames per buffer for the source, i.e. the WebRTC client. We
636 // use 10 ms of data since the WebRTC client only supports multiples of 10 ms 642 // use 10 ms of data since the WebRTC client only supports multiples of 10 ms
637 // as buffer size where 10 ms is preferred for lowest possible delay. 643 // as buffer size where 10 ms is preferred for lowest possible delay.
638 const int source_frames_per_buffer = (sample_rate / 100); 644 const int source_frames_per_buffer = (sample_rate / 100);
639 DVLOG(1) << "Using WebRTC output buffer size: " << source_frames_per_buffer; 645 DVLOG(1) << "Using WebRTC output buffer size: " << source_frames_per_buffer;
640 646
641 // Setup sink parameters. 647 // Setup sink parameters.
642 const int sink_frames_per_buffer = GetOptimalBufferSize( 648 const int sink_frames_per_buffer = GetOptimalBufferSize(
643 sample_rate, sink_->GetOutputParameters().frames_per_buffer()); 649 sample_rate,
650 sink_->GetOutputDevice()->GetOutputParameters().frames_per_buffer());
644 new_sink_params.set_sample_rate(sample_rate); 651 new_sink_params.set_sample_rate(sample_rate);
645 new_sink_params.set_frames_per_buffer(sink_frames_per_buffer); 652 new_sink_params.set_frames_per_buffer(sink_frames_per_buffer);
646 653
647 // Create a FIFO if re-buffering is required to match the source input with 654 // Create a FIFO if re-buffering is required to match the source input with
648 // the sink request. The source acts as provider here and the sink as 655 // the sink request. The source acts as provider here and the sink as
649 // consumer. 656 // consumer.
650 const bool different_source_sink_frames = 657 const bool different_source_sink_frames =
651 source_frames_per_buffer != new_sink_params.frames_per_buffer(); 658 source_frames_per_buffer != new_sink_params.frames_per_buffer();
652 if (different_source_sink_frames) { 659 if (different_source_sink_frames) {
653 DVLOG(1) << "Rebuffering from " << source_frames_per_buffer << " to " 660 DVLOG(1) << "Rebuffering from " << source_frames_per_buffer << " to "
654 << new_sink_params.frames_per_buffer(); 661 << new_sink_params.frames_per_buffer();
655 } 662 }
656 { 663 {
657 base::AutoLock lock(lock_); 664 base::AutoLock lock(lock_);
658 if ((!audio_fifo_ && different_source_sink_frames) || 665 if ((!audio_fifo_ && different_source_sink_frames) ||
659 (audio_fifo_ && 666 (audio_fifo_ &&
660 audio_fifo_->SizeInFrames() != source_frames_per_buffer)) { 667 audio_fifo_->SizeInFrames() != source_frames_per_buffer)) {
661 audio_fifo_.reset(new media::AudioPullFifo( 668 audio_fifo_.reset(new media::AudioPullFifo(
662 kChannels, source_frames_per_buffer, 669 kChannels, source_frames_per_buffer,
663 base::Bind(&WebRtcAudioRenderer::SourceCallback, 670 base::Bind(&WebRtcAudioRenderer::SourceCallback,
664 base::Unretained(this)))); 671 base::Unretained(this))));
665 } 672 }
666 sink_params_ = new_sink_params; 673 sink_params_ = new_sink_params;
667 } 674 }
668 675
669 sink_->Initialize(new_sink_params, this); 676 sink_->Initialize(new_sink_params, this);
670 } 677 }
671 678
672 } // namespace content 679 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/webrtc_audio_renderer.h ('k') | content/renderer/media/webrtc_audio_renderer_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698