OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" | 5 #include "content/renderer/media/renderer_webaudiodevice_impl.h" |
6 | 6 |
7 #include "base/command_line.h" | 7 #include "base/command_line.h" |
8 #include "base/logging.h" | 8 #include "base/logging.h" |
9 #include "base/single_thread_task_runner.h" | |
10 #include "base/thread_task_runner_handle.h" | |
11 #include "base/time/time.h" | |
9 #include "content/renderer/media/audio_device_factory.h" | 12 #include "content/renderer/media/audio_device_factory.h" |
10 #include "content/renderer/render_frame_impl.h" | 13 #include "content/renderer/render_frame_impl.h" |
11 #include "media/audio/audio_output_device.h" | 14 #include "media/audio/audio_output_device.h" |
15 #include "media/audio/null_audio_sink.h" | |
16 #include "media/base/audio_timestamp_helper.h" | |
12 #include "media/base/media_switches.h" | 17 #include "media/base/media_switches.h" |
13 #include "third_party/WebKit/public/web/WebLocalFrame.h" | 18 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
14 #include "third_party/WebKit/public/web/WebView.h" | 19 #include "third_party/WebKit/public/web/WebView.h" |
15 | 20 |
16 using blink::WebAudioDevice; | 21 using blink::WebAudioDevice; |
17 using blink::WebLocalFrame; | 22 using blink::WebLocalFrame; |
18 using blink::WebVector; | 23 using blink::WebVector; |
19 using blink::WebView; | 24 using blink::WebView; |
20 | 25 |
21 namespace content { | 26 namespace content { |
22 | 27 |
28 static const int kSilenceInSecondsToEnterIdleMode = 30.0; | |
29 | |
23 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( | 30 RendererWebAudioDeviceImpl::RendererWebAudioDeviceImpl( |
24 const media::AudioParameters& params, | 31 const media::AudioParameters& params, |
25 WebAudioDevice::RenderCallback* callback, | 32 WebAudioDevice::RenderCallback* callback, |
26 int session_id) | 33 int session_id) |
27 : params_(params), | 34 : params_(params), |
28 client_callback_(callback), | 35 client_callback_(callback), |
29 session_id_(session_id) { | 36 session_id_(session_id), |
37 audio_timestamp_helper_( | |
38 new media::AudioTimestampHelper(params.sample_rate())), | |
39 task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
40 null_audio_sink_(new media::NullAudioSink(task_runner_)), | |
41 is_using_null_audio_sink_(false), | |
42 weak_factory_(this) { | |
30 DCHECK(client_callback_); | 43 DCHECK(client_callback_); |
44 audio_timestamp_helper_->SetBaseTimestamp(base::TimeDelta()); | |
45 null_audio_sink_->Initialize(params_, this); | |
46 weak_this_ = weak_factory_.GetWeakPtr(); | |
31 } | 47 } |
32 | 48 |
33 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { | 49 RendererWebAudioDeviceImpl::~RendererWebAudioDeviceImpl() { |
34 DCHECK(!output_device_.get()); | 50 DCHECK(!output_device_); |
35 } | 51 } |
36 | 52 |
37 void RendererWebAudioDeviceImpl::start() { | 53 void RendererWebAudioDeviceImpl::start() { |
38 DCHECK(thread_checker_.CalledOnValidThread()); | 54 DCHECK(thread_checker_.CalledOnValidThread()); |
39 | 55 |
40 if (output_device_.get()) | 56 if (output_device_) |
41 return; // Already started. | 57 return; // Already started. |
42 | 58 |
43 // Assumption: This method is being invoked within a V8 call stack. CHECKs | 59 // Assumption: This method is being invoked within a V8 call stack. CHECKs |
44 // will fail in the call to frameForCurrentContext() otherwise. | 60 // will fail in the call to frameForCurrentContext() otherwise. |
45 // | 61 // |
46 // Therefore, we can perform look-ups to determine which RenderView is | 62 // Therefore, we can perform look-ups to determine which RenderView is |
47 // starting the audio device. The reason for all this is because the creator | 63 // starting the audio device. The reason for all this is because the creator |
48 // of the WebAudio objects might not be the actual source of the audio (e.g., | 64 // of the WebAudio objects might not be the actual source of the audio (e.g., |
49 // an extension creates a object that is passed and used within a page). | 65 // an extension creates a object that is passed and used within a page). |
50 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); | 66 WebLocalFrame* const web_frame = WebLocalFrame::frameForCurrentContext(); |
51 RenderFrame* const render_frame = | 67 RenderFrame* const render_frame = |
52 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; | 68 web_frame ? RenderFrame::FromWebFrame(web_frame) : NULL; |
53 output_device_ = AudioDeviceFactory::NewOutputDevice( | 69 output_device_ = AudioDeviceFactory::NewOutputDevice( |
54 render_frame ? render_frame->GetRoutingID(): MSG_ROUTING_NONE); | 70 render_frame ? render_frame->GetRoutingID(): MSG_ROUTING_NONE); |
55 output_device_->InitializeWithSessionId(params_, this, session_id_); | 71 output_device_->InitializeWithSessionId(params_, this, session_id_); |
56 output_device_->Start(); | 72 output_device_->Start(); |
57 // Note: Default behavior is to auto-play on start. | 73 // Note: Default behavior is to auto-play on start. |
58 } | 74 } |
59 | 75 |
60 void RendererWebAudioDeviceImpl::stop() { | 76 void RendererWebAudioDeviceImpl::stop() { |
61 DCHECK(thread_checker_.CalledOnValidThread()); | 77 DCHECK(thread_checker_.CalledOnValidThread()); |
62 | 78 |
63 if (output_device_.get()) { | 79 if (output_device_) { |
64 output_device_->Stop(); | 80 output_device_->Stop(); |
65 output_device_ = NULL; | 81 output_device_ = NULL; |
66 } | 82 } |
83 StopNullAudioSink(); | |
DaleCurtis
2015/06/22 19:14:40
Stop can be called multiple times, so I'd just cal
qinmin
2015/06/22 22:23:16
Done.
| |
67 } | 84 } |
68 | 85 |
69 double RendererWebAudioDeviceImpl::sampleRate() { | 86 double RendererWebAudioDeviceImpl::sampleRate() { |
70 return params_.sample_rate(); | 87 return params_.sample_rate(); |
71 } | 88 } |
72 | 89 |
73 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, | 90 int RendererWebAudioDeviceImpl::Render(media::AudioBus* dest, |
74 int audio_delay_milliseconds) { | 91 int audio_delay_milliseconds) { |
75 if (client_callback_) { | 92 if (client_callback_) { |
76 // Wrap the output pointers using WebVector. | 93 // Wrap the output pointers using WebVector. |
77 WebVector<float*> web_audio_dest_data( | 94 WebVector<float*> web_audio_dest_data( |
78 static_cast<size_t>(dest->channels())); | 95 static_cast<size_t>(dest->channels())); |
79 for (int i = 0; i < dest->channels(); ++i) | 96 for (int i = 0; i < dest->channels(); ++i) |
80 web_audio_dest_data[i] = dest->channel(i); | 97 web_audio_dest_data[i] = dest->channel(i); |
81 | 98 |
82 // TODO(xians): Remove the following |web_audio_source_data| after | 99 // TODO(xians): Remove the following |web_audio_source_data| after |
83 // changing the blink interface. | 100 // changing the blink interface. |
84 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); | 101 WebVector<float*> web_audio_source_data(static_cast<size_t>(0)); |
85 client_callback_->render(web_audio_source_data, | 102 client_callback_->render(web_audio_source_data, |
86 web_audio_dest_data, | 103 web_audio_dest_data, |
87 dest->frames()); | 104 dest->frames()); |
88 } | 105 } |
89 | 106 |
107 #if defined(OS_ANDROID) | |
108 if (!dest->AreFramesZero() && is_using_null_audio_sink_) { | |
DaleCurtis
2015/06/22 19:14:40
We lose this buffer in this case, do we want to sa
qinmin
2015/06/22 22:23:16
I had the same question here. Changed the code to
| |
109 // This is called on the main render thread when audio is detected. | |
110 output_device_->Play(); | |
111 is_using_null_audio_sink_ = false; | |
112 task_runner_->PostTask( | |
113 FROM_HERE, | |
114 base::Bind(&RendererWebAudioDeviceImpl::StopNullAudioSink, | |
115 weak_this_)); | |
116 } else if (!is_using_null_audio_sink_) { | |
117 // Called on the audio device thread. | |
118 audio_timestamp_helper_->AddFrames(dest->frames()); | |
DaleCurtis
2015/06/22 19:14:40
Instead of using a timestamp helper:
On, l.108:
c
qinmin
2015/06/22 22:23:16
If I remember clearly, TimtTicks::Now() are very e
DaleCurtis
2015/06/22 22:49:42
Do you have a source for TimeTicks::Now being expe
qinmin
2015/06/23 00:01:53
see http://gamasutra.com/view/feature/171774/getti
DaleCurtis
2015/06/23 16:38:14
Despite those early statements, the benchmarks sho
| |
119 if (audio_timestamp_helper_->GetTimestamp().InSecondsF() | |
120 > kSilenceInSecondsToEnterIdleMode) { | |
121 output_device_->Pause(); | |
122 audio_timestamp_helper_->SetBaseTimestamp(base::TimeDelta()); | |
123 is_using_null_audio_sink_ = true; | |
124 // If Stop() is called right after the task is posted, | |
125 // StartNullAudioSink() should do nothing. | |
126 task_runner_->PostDelayedTask( | |
127 FROM_HERE, | |
128 base::Bind(&RendererWebAudioDeviceImpl::StartNullAudioSink, | |
129 weak_this_), | |
130 audio_timestamp_helper_->GetFrameDuration(dest->frames())); | |
131 } | |
132 } | |
133 #endif | |
90 return dest->frames(); | 134 return dest->frames(); |
91 } | 135 } |
92 | 136 |
93 void RendererWebAudioDeviceImpl::OnRenderError() { | 137 void RendererWebAudioDeviceImpl::OnRenderError() { |
94 // TODO(crogers): implement error handling. | 138 // TODO(crogers): implement error handling. |
95 } | 139 } |
96 | 140 |
141 void RendererWebAudioDeviceImpl::StopNullAudioSink() { | |
142 if (null_audio_sink_) | |
DaleCurtis
2015/06/22 19:14:41
Shouldn't this never be null? Additionally since A
qinmin
2015/06/22 22:23:16
Done.
| |
143 null_audio_sink_->Stop(); | |
144 } | |
145 | |
146 void RendererWebAudioDeviceImpl::StartNullAudioSink() { | |
147 if (null_audio_sink_) | |
148 null_audio_sink_->Play(); | |
149 } | |
150 | |
97 } // namespace content | 151 } // namespace content |
OLD | NEW |