OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" | 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" |
6 | 6 |
7 #include "base/command_line.h" | 7 #include "base/command_line.h" |
8 #include "base/logging.h" | 8 #include "base/logging.h" |
| 9 #include "base/single_thread_task_runner.h" |
| 10 #include "base/thread_task_runner_handle.h" |
| 11 #include "base/time/time.h" |
9 #include "content/renderer/media/audio_device_factory.h" | 12 #include "content/renderer/media/audio_device_factory.h" |
10 #include "content/renderer/render_frame_impl.h" | 13 #include "content/renderer/render_frame_impl.h" |
11 #include "media/audio/audio_output_device.h" | 14 #include "media/audio/audio_output_device.h" |
| 15 #include "media/audio/null_audio_sink.h" |
12 #include "media/base/media_switches.h" | 16 #include "media/base/media_switches.h" |
13 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 17 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
14 #include "third_party/WebKit/public/web/WebView.h" | 18 #include "third_party/WebKit/public/web/WebView.h" |
15 | 19 |
16 using blink::WebAudioDevice; | 20 using blink::WebAudioDevice; |
17 using blink::WebLocalFrame; | 21 using blink::WebLocalFrame; |
18 using blink::WebVector; | 22 using blink::WebVector; |
19 using blink::WebView; | 23 using blink::WebView; |
20 | 24 |
21 namespace content { | 25 namespace content { |
22 | 26 |
| 27 #if defined(OS_ANDROID) |
| 28 static const int kSilenceInSecondsToEnterIdleMode = 30; |
| 29 #endif |
| 30 |
23 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( | 31 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
24 const media::AudioParameters& params, | 32 const media::AudioParameters& params, |
25 WebAudioDevice::RenderCallback* callback, | 33 WebAudioDevice::RenderCallback* callback, |
26 int session_id) | 34 int session_id) |
27 : params_(params), | 35 : params_(params), |
28 client_callback_(callback), | 36 client_callback_(callback), |
29 session_id_(session_id) { | 37 session_id_(session_id), |
| 38 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
| 39 null_audio_sink_(new media::NullAudioSink(task_runner_)), |
| 40 is_using_null_audio_sink_(false), |
| 41 first_buffer_after_silence_(media::AudioBus::Create(params_)), |
| 42 is_first_buffer_after_silence_(false) { |
30 DCHECK(client_callback_); | 43 DCHECK(client_callback_); |
| 44 null_audio_sink_->Initialize(params_, this); |
31 } | 45 } |
32 | 46 |
33 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { | 47 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
34 DCHECK(!output_device_.get()); | 48 DCHECK(!output_device_); |
35 } | 49 } |
36 | 50 |
37 void RendererWebAudioDeviceImpl::start() { | 51 void RendererWebAudioDeviceImpl::start() { |
38 DCHECK(thread_checker_.CalledOnValidThread()); | 52 DCHECK(thread_checker_.CalledOnValidThread()); |
39 | 53 |
40 if (output_device_.get()) | 54 if (output_device_) |
41 return; // Already started. | 55 return; // Already started. |
42 | 56 |
43 // Assumption: This method is being invoked within a V8 call stack. CHECKs | 57 // Assumption: This method is being invoked within a V8 call stack. CHECKs |
44 // will fail in the call to frameForCurrentContext() otherwise. | 58 // will fail in the call to frameForCurrentContext() otherwise. |
45 // | 59 // |
46 // Therefore, we can perform look-ups to determine which RenderView is | 60 // Therefore, we can perform look-ups to determine which RenderView is |
47 // starting the audio device. The reason for all this is because the creator | 61 // starting the audio device. The reason for all this is because the creator |
48 // of the WebAudio objects might not be the actual source of the audio (e.g., | 62 // of the WebAudio objects might not be the actual source of the audio (e.g., |
49 // an extension creates a object that is passed and used within a page). | 63 // an extension creates a object that is passed and used within a page). |
50 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); | 64 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); |
51 RenderFrame* const render_frame = | 65 RenderFrame* const render_frame = |
52 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; | 66 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; |
53 output_device_ = AudioDeviceFactory::NewOutputDevice( | 67 output_device_ = AudioDeviceFactory::NewOutputDevice( |
54 render_frame ? render_frame->GetRoutingID(): MSG_ROUTING_NONE); | 68 render_frame ? render_frame->GetRoutingID(): MSG_ROUTING_NONE); |
55 output_device_->InitializeWithSessionId(params_, this, session_id_); | 69 output_device_->InitializeWithSessionId(params_, this, session_id_); |
56 output_device_->Start(); | 70 output_device_->Start(); |
| 71 start_null_audio_sink_callback_.Reset( |
| 72 base::Bind(&media::NullAudioSink::Play, null_audio_sink_)); |
57 // Note: Default behavior is to auto-play on start. | 73 // Note: Default behavior is to auto-play on start. |
58 } | 74 } |
59 | 75 |
60 void RendererWebAudioDeviceImpl::stop() { | 76 void RendererWebAudioDeviceImpl::stop() { |
61 DCHECK(thread_checker_.CalledOnValidThread()); | 77 DCHECK(thread_checker_.CalledOnValidThread()); |
62 | 78 |
63 if (output_device_.get()) { | 79 if (output_device_) { |
64 output_device_->Stop(); | 80 output_device_->Stop(); |
65 output_device_ = NULL; | 81 output_device_ = NULL; |
66 } | 82 } |
| 83 null_audio_sink_->Stop(); |
| 84 is_using_null_audio_sink_ = false; |
| 85 is_first_buffer_after_silence_ = false; |
| 86 start_null_audio_sink_callback_.Cancel(); |
67 } | 87 } |
68 | 88 |
69 double RendererWebAudioDeviceImpl::sampleRate() { | 89 double RendererWebAudioDeviceImpl::sampleRate() { |
70 return params_.sample_rate(); | 90 return params_.sample_rate(); |
71 } | 91 } |
72 | 92 |
73 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, | 93 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
74 int audio_delay_milliseconds) { | 94 int audio_delay_milliseconds) { |
75 if (client_callback_) { | 95 #if defined(OS_ANDROID) |
76 // Wrap the output pointers using WebVector. | 96 if (is_first_buffer_after_silence_) { |
77 WebVector<float*> web_audio_dest_data( | 97 DCHECK(!is_using_null_audio_sink_); |
78 static_cast<size_t>(dest->channels())); | 98 first_buffer_after_silence_->CopyTo(dest); |
79 for (int i = 0; i < dest->channels(); ++i) | 99 is_first_buffer_after_silence_ = false; |
80 web_audio_dest_data[i] = dest->channel(i); | 100 return dest->frames(); |
| 101 } |
| 102 #endif |
| 103 // Wrap the output pointers using WebVector. |
| 104 WebVector<float*> web_audio_dest_data( |
| 105 static_cast<size_t>(dest->channels())); |
| 106 for (int i = 0; i < dest->channels(); ++i) |
| 107 web_audio_dest_data[i] = dest->channel(i); |
81 | 108 |
82 // TODO(xians): Remove the following |web_audio_source_data| after | 109 // TODO(xians): Remove the following |web_audio_source_data| after |
83 // changing the blink interface. | 110 // changing the blink interface. |
84 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); | 111 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); |
85 client_callback_->render(web_audio_source_data, | 112 client_callback_->render(web_audio_source_data, |
86 web_audio_dest_data, | 113 web_audio_dest_data, |
87 dest->frames()); | 114 dest->frames()); |
| 115 |
| 116 #if defined(OS_ANDROID) |
| 117 const bool is_zero = dest->AreFramesZero(); |
| 118 if (!is_zero) { |
| 119 first_silence_time_ = base::TimeTicks(); |
| 120 if (is_using_null_audio_sink_) { |
| 121 // This is called on the main render thread when audio is detected. |
| 122 output_device_->Play(); |
| 123 is_using_null_audio_sink_ = false; |
| 124 is_first_buffer_after_silence_ = true; |
| 125 dest->CopyTo(first_buffer_after_silence_.get()); |
| 126 task_runner_->PostTask( |
| 127 FROM_HERE, |
| 128 base::Bind(&media::NullAudioSink::Stop, null_audio_sink_)); |
| 129 } |
| 130 } else if (!is_using_null_audio_sink_) { |
| 131 // Called on the audio device thread. |
| 132 const base::TimeTicks now = base::TimeTicks::Now(); |
| 133 if (first_silence_time_.is_null()) |
| 134 first_silence_time_ = now; |
| 135 if (now - first_silence_time_ |
| 136 > base::TimeDelta::FromSeconds(kSilenceInSecondsToEnterIdleMode)) { |
| 137 output_device_->Pause(); |
| 138 is_using_null_audio_sink_ = true; |
| 139 // If Stop() is called right after the task is posted, need to cancel |
| 140 // this task. |
| 141 task_runner_->PostDelayedTask( |
| 142 FROM_HERE, |
| 143 start_null_audio_sink_callback_.callback(), |
| 144 params_.GetBufferDuration()); |
| 145 } |
88 } | 146 } |
89 | 147 #endif |
90 return dest->frames(); | 148 return dest->frames(); |
91 } | 149 } |
92 | 150 |
93 void RendererWebAudioDeviceImpl::OnRenderError() { | 151 void RendererWebAudioDeviceImpl::OnRenderError() { |
94 // TODO(crogers): implement error handling. | 152 // TODO(crogers): implement error handling. |
95 } | 153 } |
96 | 154 |
97 } // namespace content | 155 } // namespace content |
OLD | NEW |