Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" | 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 | 8 |
| 9 #include <string> | 9 #include <string> |
| 10 | 10 |
| 11 #include "base/command_line.h" | 11 #include "base/command_line.h" |
| 12 #include "base/logging.h" | 12 #include "base/logging.h" |
| 13 #include "base/single_thread_task_runner.h" | 13 #include "base/single_thread_task_runner.h" |
| 14 #include "base/threading/thread_task_runner_handle.h" | 14 #include "base/threading/thread_task_runner_handle.h" |
| 15 #include "base/time/time.h" | 15 #include "base/time/time.h" |
| 16 #include "build/build_config.h" | 16 #include "build/build_config.h" |
| 17 #include "content/renderer/media/audio_device_factory.h" | 17 #include "content/renderer/media/audio_device_factory.h" |
| 18 #include "content/renderer/render_frame_impl.h" | 18 #include "content/renderer/render_frame_impl.h" |
| 19 #include "media/audio/null_audio_sink.h" | 19 #include "media/audio/null_audio_sink.h" |
| 20 #include "media/base/audio_pull_fifo.h" | |
| 20 #include "media/base/media_switches.h" | 21 #include "media/base/media_switches.h" |
| 21 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 22 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
| 22 #include "third_party/WebKit/public/web/WebView.h" | 23 #include "third_party/WebKit/public/web/WebView.h" |
| 23 | 24 |
| 24 using blink::WebAudioDevice; | 25 using blink::WebAudioDevice; |
| 25 using blink::WebLocalFrame; | 26 using blink::WebLocalFrame; |
| 26 using blink::WebVector; | 27 using blink::WebVector; |
| 27 using blink::WebView; | 28 using blink::WebView; |
| 28 | 29 |
| 30 namespace { | |
| 31 | |
| 32 int CalculateRenderBufferSizer(int default_sink_frames_per_buffer) { | |
| 33 #if defined(OS_ANDROID) | |
| 34 // The optimum low-latency hardware buffer size is usually too small on | |
| 35 // Android for WebAudio to render without glitching. So, if it is small, use | |
| 36 // a larger size. | |
| 37 // | |
| 38 // Since WebAudio renders in 128-frame blocks, the small buffer sizes (144 for | |
| 39 // a Galaxy Nexus), cause significant processing jitter. Sometimes multiple | |
| 40 // blocks will processed, but other times will not be since the WebAudio can't | |
| 41 // satisfy the request. By using a larger render buffer size, we smooth out | |
| 42 // the jitter. | |
| 43 const int kSmallBufferSize = 1024; | |
| 44 const int kDefaultCallbackBufferSize = 2048; | |
| 45 if (default_sink_frames_per_buffer <= kSmallBufferSize) | |
| 46 return kDefaultCallbackBufferSize; | |
| 47 #endif | |
| 48 | |
| 49 return default_sink_frames_per_buffer; | |
| 50 } | |
| 51 | |
| 52 } // namespace | |
| 53 | |
| 29 namespace content { | 54 namespace content { |
| 30 | 55 |
| 31 #if defined(OS_ANDROID) | 56 #if defined(OS_ANDROID) |
| 32 static const int kSilenceInSecondsToEnterIdleMode = 30; | 57 static const int kSilenceInSecondsToEnterIdleMode = 30; |
| 33 #endif | 58 #endif |
| 34 | 59 |
| 35 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( | 60 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
| 36 const media::AudioParameters& params, | 61 const media::AudioParameters& params, |
| 37 WebAudioDevice::RenderCallback* callback, | 62 WebAudioDevice::RenderCallback* callback, |
| 38 int session_id, | 63 int session_id, |
| 39 const url::Origin& security_origin) | 64 const url::Origin& security_origin) |
| 40 : params_(params), | 65 : client_params_(params), |
| 41 client_callback_(callback), | 66 client_callback_(callback), |
| 42 session_id_(session_id), | 67 session_id_(session_id), |
| 43 task_runner_(base::ThreadTaskRunnerHandle::Get()), | 68 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 44 null_audio_sink_(new media::NullAudioSink(task_runner_)), | 69 null_audio_sink_(new media::NullAudioSink(task_runner_)), |
| 45 is_using_null_audio_sink_(false), | 70 is_using_null_audio_sink_(false), |
| 46 first_buffer_after_silence_(media::AudioBus::Create(params_)), | 71 first_buffer_after_silence_(media::AudioBus::Create(client_params_)), |
| 47 is_first_buffer_after_silence_(false), | 72 is_first_buffer_after_silence_(false), |
| 48 security_origin_(security_origin) { | 73 security_origin_(security_origin) { |
| 49 DCHECK(client_callback_); | 74 DCHECK(client_callback_); |
| 50 null_audio_sink_->Initialize(params_, this); | |
| 51 null_audio_sink_->Start(); | |
| 52 } | 75 } |
| 53 | 76 |
| 54 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { | 77 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
| 55 DCHECK(!sink_); | 78 DCHECK(!sink_); |
| 56 } | 79 } |
| 57 | 80 |
| 58 void RendererWebAudioDeviceImpl::start() { | 81 void RendererWebAudioDeviceImpl::start() { |
| 59 DCHECK(thread_checker_.CalledOnValidThread()); | 82 DCHECK(thread_checker_.CalledOnValidThread()); |
| 60 | 83 |
| 61 if (sink_) | 84 if (sink_) |
| 62 return; // Already started. | 85 return; // Already started. |
| 63 | 86 |
| 64 // Assumption: This method is being invoked within a V8 call stack. CHECKs | 87 // Assumption: This method is being invoked within a V8 call stack. CHECKs |
| 65 // will fail in the call to frameForCurrentContext() otherwise. | 88 // will fail in the call to frameForCurrentContext() otherwise. |
| 66 // | 89 // |
| 67 // Therefore, we can perform look-ups to determine which RenderView is | 90 // Therefore, we can perform look-ups to determine which RenderView is |
| 68 // starting the audio device. The reason for all this is because the creator | 91 // starting the audio device. The reason for all this is because the creator |
| 69 // of the WebAudio objects might not be the actual source of the audio (e.g., | 92 // of the WebAudio objects might not be the actual source of the audio (e.g., |
| 70 // an extension creates a object that is passed and used within a page). | 93 // an extension creates a object that is passed and used within a page). |
| 71 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); | 94 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); |
| 72 RenderFrame* const render_frame = | 95 RenderFrame* const render_frame = |
| 73 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; | 96 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; |
| 74 sink_ = AudioDeviceFactory::NewAudioRendererSink( | 97 sink_ = AudioDeviceFactory::NewAudioRendererSink( |
| 75 AudioDeviceFactory::kSourceWebAudio, | 98 AudioDeviceFactory::kSourceWebAudio, |
| 76 render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE, | 99 render_frame ? render_frame->GetRoutingID() : MSG_ROUTING_NONE, |
| 77 session_id_, std::string(), security_origin_); | 100 session_id_, std::string(), security_origin_); |
| 78 sink_->Initialize(params_, this); | 101 |
| 102 // Output basing on default sink buffer size. | |
| 103 media::AudioParameters output_params(client_params_); | |
| 104 output_params.set_frames_per_buffer(CalculateRenderBufferSizer( | |
| 105 sink_->GetOutputDeviceInfo().output_params().frames_per_buffer())); | |
| 106 | |
| 107 CreateFifoIfRequired(output_params.frames_per_buffer()); | |
| 108 | |
| 109 null_audio_sink_->Initialize(output_params, this); | |
| 110 null_audio_sink_->Start(); | |
| 111 start_null_audio_sink_callback_.Reset( | |
| 112 base::Bind(&media::NullAudioSink::Play, null_audio_sink_)); | |
|
hongchan
2016/05/26 16:43:33
I am ignorant on the renderer code, but why do we
o1ka
2016/05/26 16:47:47
It was introduce in this CL https://codereview.chr
| |
| 113 | |
| 114 sink_->Initialize(output_params, this); | |
| 79 sink_->Start(); | 115 sink_->Start(); |
| 80 sink_->Play(); | 116 sink_->Play(); |
| 81 start_null_audio_sink_callback_.Reset( | |
| 82 base::Bind(&media::NullAudioSink::Play, null_audio_sink_)); | |
| 83 // Note: Default behavior is to auto-play on start. | 117 // Note: Default behavior is to auto-play on start. |
| 84 } | 118 } |
| 85 | 119 |
| 86 void RendererWebAudioDeviceImpl::stop() { | 120 void RendererWebAudioDeviceImpl::stop() { |
| 87 DCHECK(thread_checker_.CalledOnValidThread()); | 121 DCHECK(thread_checker_.CalledOnValidThread()); |
| 88 | 122 |
| 89 if (sink_) { | 123 if (sink_) { |
| 90 sink_->Stop(); | 124 sink_->Stop(); |
| 91 sink_ = NULL; | 125 sink_ = NULL; |
| 92 } | 126 } |
| 93 null_audio_sink_->Stop(); | 127 null_audio_sink_->Stop(); |
| 94 is_using_null_audio_sink_ = false; | 128 is_using_null_audio_sink_ = false; |
| 95 is_first_buffer_after_silence_ = false; | 129 is_first_buffer_after_silence_ = false; |
| 96 start_null_audio_sink_callback_.Cancel(); | 130 start_null_audio_sink_callback_.Cancel(); |
| 97 } | 131 } |
| 98 | 132 |
| 99 double RendererWebAudioDeviceImpl::sampleRate() { | 133 double RendererWebAudioDeviceImpl::sampleRate() { |
| 100 return params_.sample_rate(); | 134 return client_params_.sample_rate(); |
| 101 } | 135 } |
| 102 | 136 |
| 103 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, | 137 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
| 104 uint32_t frames_delayed, | 138 uint32_t frames_delayed, |
| 105 uint32_t frames_skipped) { | 139 uint32_t frames_skipped) { |
| 106 #if defined(OS_ANDROID) | 140 #if defined(OS_ANDROID) |
| 141 // There can be a race in Render() on Android (https://crbug.com/614978), so | |
| 142 // don't try to inject the FIFO dynamically, just rely on the initialization. | |
| 143 DCHECK(audio_fifo_ || (dest->frames() == client_params_.frames_per_buffer())); | |
| 144 #else | |
| 145 // Allow Render() to work on varying buffer size. | |
| 146 CreateFifoIfRequired(dest->frames()); | |
| 147 #endif | |
| 148 | |
| 149 if (audio_fifo_) | |
| 150 audio_fifo_->Consume(dest, dest->frames()); | |
| 151 else | |
| 152 SourceCallback(0, dest); | |
| 153 | |
| 154 return dest->frames(); | |
| 155 } | |
| 156 | |
| 157 void RendererWebAudioDeviceImpl::SourceCallback(int fifo_frame_delay, | |
| 158 media::AudioBus* dest) { | |
| 159 #if defined(OS_ANDROID) | |
| 107 if (is_first_buffer_after_silence_) { | 160 if (is_first_buffer_after_silence_) { |
| 108 DCHECK(!is_using_null_audio_sink_); | 161 DCHECK(!is_using_null_audio_sink_); |
| 109 first_buffer_after_silence_->CopyTo(dest); | 162 first_buffer_after_silence_->CopyTo(dest); |
| 110 is_first_buffer_after_silence_ = false; | 163 is_first_buffer_after_silence_ = false; |
| 111 return dest->frames(); | 164 return; |
| 112 } | 165 } |
| 113 #endif | 166 #endif |
| 114 // Wrap the output pointers using WebVector. | 167 // Wrap the output pointers using WebVector. |
| 115 WebVector<float*> web_audio_dest_data( | 168 WebVector<float*> web_audio_dest_data( |
| 116 static_cast<size_t>(dest->channels())); | 169 static_cast<size_t>(dest->channels())); |
| 117 for (int i = 0; i < dest->channels(); ++i) | 170 for (int i = 0; i < dest->channels(); ++i) |
| 118 web_audio_dest_data[i] = dest->channel(i); | 171 web_audio_dest_data[i] = dest->channel(i); |
| 119 | 172 |
| 120 // TODO(xians): Remove the following |web_audio_source_data| after | 173 // TODO(xians): Remove the following |web_audio_source_data| after |
| 121 // changing the blink interface. | 174 // changing the blink interface. |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 133 DCHECK(thread_checker_.CalledOnValidThread()); | 186 DCHECK(thread_checker_.CalledOnValidThread()); |
| 134 is_using_null_audio_sink_ = false; | 187 is_using_null_audio_sink_ = false; |
| 135 is_first_buffer_after_silence_ = true; | 188 is_first_buffer_after_silence_ = true; |
| 136 dest->CopyTo(first_buffer_after_silence_.get()); | 189 dest->CopyTo(first_buffer_after_silence_.get()); |
| 137 task_runner_->PostTask( | 190 task_runner_->PostTask( |
| 138 FROM_HERE, | 191 FROM_HERE, |
| 139 base::Bind(&media::NullAudioSink::Pause, null_audio_sink_)); | 192 base::Bind(&media::NullAudioSink::Pause, null_audio_sink_)); |
| 140 // Calling sink_->Play() may trigger reentrancy into this | 193 // Calling sink_->Play() may trigger reentrancy into this |
| 141 // function, so this should be called at the end. | 194 // function, so this should be called at the end. |
| 142 sink_->Play(); | 195 sink_->Play(); |
| 143 return dest->frames(); | 196 return; |
| 144 } | 197 } |
| 145 } else if (!is_using_null_audio_sink_) { | 198 } else if (!is_using_null_audio_sink_) { |
| 146 // Called on the audio device thread. | 199 // Called on the audio device thread. |
| 147 const base::TimeTicks now = base::TimeTicks::Now(); | 200 const base::TimeTicks now = base::TimeTicks::Now(); |
| 148 if (first_silence_time_.is_null()) | 201 if (first_silence_time_.is_null()) |
| 149 first_silence_time_ = now; | 202 first_silence_time_ = now; |
| 150 if (now - first_silence_time_ | 203 if (now - first_silence_time_ |
| 151 > base::TimeDelta::FromSeconds(kSilenceInSecondsToEnterIdleMode)) { | 204 > base::TimeDelta::FromSeconds(kSilenceInSecondsToEnterIdleMode)) { |
| 152 sink_->Pause(); | 205 sink_->Pause(); |
| 153 is_using_null_audio_sink_ = true; | 206 is_using_null_audio_sink_ = true; |
| 154 // If Stop() is called right after the task is posted, need to cancel | 207 // If Stop() is called right after the task is posted, need to cancel |
| 155 // this task. | 208 // this task. |
| 156 task_runner_->PostDelayedTask( | 209 task_runner_->PostDelayedTask(FROM_HERE, |
| 157 FROM_HERE, | 210 start_null_audio_sink_callback_.callback(), |
| 158 start_null_audio_sink_callback_.callback(), | 211 client_params_.GetBufferDuration()); |
| 159 params_.GetBufferDuration()); | |
| 160 } | 212 } |
| 161 } | 213 } |
| 162 #endif | 214 #endif |
| 163 return dest->frames(); | |
| 164 } | 215 } |
| 165 | 216 |
| 166 void RendererWebAudioDeviceImpl::OnRenderError() { | 217 void RendererWebAudioDeviceImpl::OnRenderError() { |
| 167 // TODO(crogers): implement error handling. | 218 // TODO(crogers): implement error handling. |
| 168 } | 219 } |
| 169 | 220 |
| 221 void RendererWebAudioDeviceImpl::CreateFifoIfRequired( | |
| 222 int render_frames_per_buffer) { | |
| 223 if (!audio_fifo_ && | |
| 224 render_frames_per_buffer != client_params_.frames_per_buffer()) { | |
| 225 audio_fifo_.reset(new media::AudioPullFifo( | |
| 226 client_params_.channels(), client_params_.frames_per_buffer(), | |
| 227 base::Bind(&RendererWebAudioDeviceImpl::SourceCallback, | |
| 228 base::Unretained(this)))); | |
| 229 DVLOG(1) << "Client buffer size: " << client_params_.frames_per_buffer() | |
| 230 << " output buffer size: " << render_frames_per_buffer | |
| 231 << "; fifo injected."; | |
| 232 } | |
| 233 } | |
| 234 | |
| 170 } // namespace content | 235 } // namespace content |
| OLD | NEW |