Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Side by Side Diff: content/renderer/media/media_stream_audio_source.cc

Issue 1647773002: MediaStream audio sourcing: Bypass audio processing for non-WebRTC cases. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: First attempt Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_audio_source.h" 5 #include "content/renderer/media/media_stream_audio_source.h"
6 6
7 #include <algorithm>
8
9 #include "content/public/renderer/media_stream_audio_sink.h"
10 #include "content/renderer/media/audio_device_factory.h"
11 #include "content/renderer/media/media_stream_audio_processor.h"
7 #include "content/renderer/render_frame_impl.h" 12 #include "content/renderer/render_frame_impl.h"
13 #include "media/audio/audio_input_device.h"
14 #include "media/audio/audio_parameters.h"
15 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
8 16
9 namespace content { 17 namespace content {
10 18
19 namespace {
20
21 // If |frames_per_buffer| is invalid, compute 10 ms worth of frames based on the
22 // |sample_rate|.
23 int FramesPerBufferOrDefault(int frames_per_buffer, int sample_rate) {
24 if (frames_per_buffer > 0)
25 return frames_per_buffer;
26
27 frames_per_buffer = sample_rate / 100;
28 DCHECK_GT(frames_per_buffer, 0);
29 return frames_per_buffer;
30 }
31
32 // Returns a media::AudioParameters initialized from the parameters found in
33 // |device_info|.
34 media::AudioParameters GetParametersFromStreamDeviceInfo(
35 const StreamDeviceInfo& device_info) {
36 const MediaStreamDevice::AudioDeviceParameters& input_params =
37 device_info.device.input;
38 DCHECK_GT(input_params.sample_rate, 0);
39 DCHECK_GT(input_params.channel_layout, 1);
40 return media::AudioParameters(
41 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
42 static_cast<media::ChannelLayout>(input_params.channel_layout),
43 input_params.sample_rate,
44 16, // Legacy parameter (data is always in 32-bit float format).
45 FramesPerBufferOrDefault(input_params.frames_per_buffer,
46 input_params.sample_rate));
47 }
48
49 } // namespace
50
51 class MediaStreamAudioSource::AudioTee : public MediaStreamAudioTrack {
52 public:
53 explicit AudioTee(base::WeakPtr<MediaStreamAudioSource> source)
54 : MediaStreamAudioTrack(!source->is_remote_),
55 params_(source->params_),
56 source_(source),
57 is_enabled_(true) {}
58
59 ~AudioTee() final { Stop(); }
60
61 void AddSink(MediaStreamAudioSink* sink) final {
62 DCHECK(thread_checker_.CalledOnValidThread());
63 DCHECK(sink);
64 {
65 base::AutoLock auto_lock(lock_);
66 DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end());
67 DCHECK(std::find(added_sinks_.begin(), added_sinks_.end(), sink) ==
68 added_sinks_.end());
69 added_sinks_.push_back(sink);
70 }
71 sink->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateLive);
72 }
73
74 void RemoveSink(MediaStreamAudioSink* sink) final {
75 DCHECK(thread_checker_.CalledOnValidThread());
76 {
77 base::AutoLock auto_lock(lock_);
78 auto it = std::find(added_sinks_.begin(), added_sinks_.end(), sink);
79 if (it != added_sinks_.end()) {
80 added_sinks_.erase(it);
81 } else {
82 it = std::find(sinks_.begin(), sinks_.end(), sink);
83 if (it != sinks_.end())
84 sinks_.erase(it);
85 }
86 }
87 sink->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
88 }
89
90 webrtc::AudioTrackInterface* GetAudioAdapter() final { return nullptr; }
91
92 media::AudioParameters GetOutputFormat() const final { return params_; }
93
94 void SetEnabled(bool enabled) final {
95 base::AutoLock auto_lock(lock_);
96 is_enabled_ = enabled;
97 }
98
99 void Stop() final {
100 DCHECK(thread_checker_.CalledOnValidThread());
101
102 if (source_)
103 source_->StopAudioDeliveryTo(this);
104
105 std::vector<MediaStreamAudioSink*> zombies;
106 {
107 base::AutoLock auto_lock(lock_);
108 zombies.swap(sinks_);
109 zombies.insert(zombies.end(), added_sinks_.begin(), added_sinks_.end());
110 added_sinks_.clear();
111 }
112 for (MediaStreamAudioSink* zombie : zombies)
113 zombie->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
114 }
115
116 // Called by the MediaStreamAudioSource to deliver audio data to this track,
117 // which in turn delivers the audio to one or more MediaStreamAudioSinks.
118 void DeliverDataToSinks(const media::AudioBus& audio_bus,
119 base::TimeTicks reference_time) {
120 // Lock sink lists while this audio thread is manipulating the lists and
121 // invoking the OnData() callback for each sink.
122 base::AutoLock auto_lock(lock_);
123
124 // If audio delivery is currently disabled, take no actions.
125 if (!is_enabled_)
126 return;
127
128 // For all newly-added sinks, call OnSetFormat() and move them to the active
129 // sink list.
130 if (!added_sinks_.empty()) {
131 for (MediaStreamAudioSink* sink : added_sinks_)
132 sink->OnSetFormat(params_);
133 sinks_.insert(sinks_.end(), added_sinks_.begin(), added_sinks_.end());
134 added_sinks_.clear();
135 }
136
137 // Deliver the audio data to each sink.
138 for (MediaStreamAudioSink* sink : sinks_)
139 sink->OnData(audio_bus, reference_time);
140 }
141
142 private:
143 // Source audio parameters.
144 const media::AudioParameters params_;
145
146 // A weak reference is held to notify the source when this instance is
147 // stopped.
148 const base::WeakPtr<MediaStreamAudioSource> source_;
149
150 // In debug builds, check that MediaStreamAudioTrack methods are being called
151 // on the same thread.
152 base::ThreadChecker thread_checker_;
153
154 // Lock protects concurrent access to the sink lists below and the
155 // |is_enabled_| state, between the main thread and the audio thread.
156 base::Lock lock_;
157
158 // Sinks added via AddSink() that need to have an initial OnSetFormat() call
159 // on the audio thread before audio data is first delivered.
160 std::vector<MediaStreamAudioSink*> added_sinks_;
161
162 // Sinks that have had OnSetFormat() called and are receiving audio data. On
163 // the audio thread, sinks are taken from |added_sinks_| and added to
164 // |sinks_|.
165 std::vector<MediaStreamAudioSink*> sinks_;
166
167 // When false, delivery of audio data is temporarily halted to the sinks.
168 bool is_enabled_;
169
170 DISALLOW_COPY_AND_ASSIGN(AudioTee);
171 };
172
173 MediaStreamAudioSource::MediaStreamAudioSource()
174 : params_(
175 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
176 media::CHANNEL_LAYOUT_STEREO,
177 media::AudioParameters::kAudioCDSampleRate,
178 16, // Legacy parameter (data is always in 32-bit float format).
179 FramesPerBufferOrDefault(0,
180 media::AudioParameters::kAudioCDSampleRate)),
181 is_remote_(false),
182 consumer_render_frame_id_(-1),
183 current_state_(NULL_SOURCE_NOT_STARTED),
184 pc_factory_(nullptr),
185 weak_factory_(this) {}
186
11 MediaStreamAudioSource::MediaStreamAudioSource( 187 MediaStreamAudioSource::MediaStreamAudioSource(
12 int render_frame_id, 188 int consumer_render_frame_id,
13 const StreamDeviceInfo& device_info, 189 const StreamDeviceInfo& device_info)
14 const SourceStoppedCallback& stop_callback, 190 : params_(GetParametersFromStreamDeviceInfo(device_info)),
15 PeerConnectionDependencyFactory* factory) 191 is_remote_(false),
16 : render_frame_id_(render_frame_id), factory_(factory) { 192 consumer_render_frame_id_(consumer_render_frame_id),
193 current_state_(INPUT_DEVICE_NOT_STARTED),
194 pc_factory_(nullptr),
195 weak_factory_(this) {
196 DCHECK(params_.IsValid());
197 DCHECK(RenderFrameImpl::FromRoutingID(consumer_render_frame_id_));
17 SetDeviceInfo(device_info); 198 SetDeviceInfo(device_info);
18 SetStopCallback(stop_callback); 199 }
19 } 200
20 201 MediaStreamAudioSource::MediaStreamAudioSource(
21 MediaStreamAudioSource::MediaStreamAudioSource() 202 const scoped_refptr<media::AudioCapturerSource>& source,
22 : render_frame_id_(-1), factory_(NULL) { 203 int sample_rate,
23 } 204 media::ChannelLayout channel_layout,
24 205 int frames_per_buffer,
25 MediaStreamAudioSource::~MediaStreamAudioSource() {} 206 bool is_remote)
207 : params_(media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
208 channel_layout,
209 sample_rate,
210 16, // Legacy parameter (data is always in 32-bit float format).
211 FramesPerBufferOrDefault(frames_per_buffer, sample_rate)),
212 is_remote_(is_remote),
213 consumer_render_frame_id_(-1),
214 current_state_(SOURCE_NOT_STARTED),
215 source_(source),
216 pc_factory_(nullptr),
217 weak_factory_(this) {
218 DCHECK(params_.IsValid());
219 }
220
221 MediaStreamAudioSource::~MediaStreamAudioSource() {
222 DCHECK(thread_checker_.CalledOnValidThread());
223 if (current_state_ != STOPPED)
224 StopSource();
225 DCHECK_EQ(current_state_, STOPPED);
226 }
227
228 bool MediaStreamAudioSource::ConnectToTrack(
229 const blink::WebMediaStreamTrack& track,
230 const blink::WebMediaConstraints& constraints) {
231 DCHECK(thread_checker_.CalledOnValidThread());
232 DCHECK(!track.isNull());
233
234 // Sanity-check that there is not already a MediaStreamAudioTrack instance
235 // associated with |track|.
236 if (MediaStreamAudioTrack::GetTrack(track)) {
237 NOTREACHED()
238 << "Attempting to connect another source to a WebMediaStreamTrack.";
239 return false;
240 }
241
242 if (!EnsureSourceIsStarted(constraints))
243 return false;
244
245 // Delegate track creation/connection to the WebRTC audio pipeline, if
246 // necessary.
247 if (current_state_ == STARTED_WEBRTC_PIPELINE) {
248 DCHECK(pc_factory_);
249 pc_factory_->CreateLocalAudioTrack(track);
250 return true;
251 }
252
253 // Create an AudioTee to deliver audio data directly from the |source_| to all
254 // sinks managed by the AudioTee. Pass ownership of it to the
255 // WebMediaStreamTrack.
256 DCHECK_EQ(current_state_, STARTED_DEFAULT_PIPELINE);
257 AudioTee* const tee = new AudioTee(weak_factory_.GetWeakPtr());
258 tee->SetEnabled(track.isEnabled());
259 blink::WebMediaStreamTrack mutable_track = track;
260 mutable_track.setExtraData(tee); // Takes ownership.
261 {
262 base::AutoLock auto_lock(lock_);
263 audio_tees_.push_back(tee);
264 }
265 return true;
266 }
26 267
27 void MediaStreamAudioSource::DoStopSource() { 268 void MediaStreamAudioSource::DoStopSource() {
269 DCHECK(thread_checker_.CalledOnValidThread());
270
271 if (current_state_ == STOPPED)
272 return;
273 current_state_ = STOPPED;
274
275 if (source_)
276 source_->Stop();
277
28 if (audio_capturer_.get()) 278 if (audio_capturer_.get())
29 audio_capturer_->Stop(); 279 audio_capturer_->Stop();
30 } 280 }
31 281
32 void MediaStreamAudioSource::AddTrack( 282 bool MediaStreamAudioSource::EnsureSourceIsStarted(
33 const blink::WebMediaStreamTrack& track, 283 const blink::WebMediaConstraints& constraints) {
34 const blink::WebMediaConstraints& constraints, 284 DCHECK(thread_checker_.CalledOnValidThread());
35 const ConstraintsCallback& callback) { 285
36 // TODO(xians): Properly implement for audio sources. 286 switch (current_state_) {
37 if (!local_audio_source_.get()) { 287 case NULL_SOURCE_NOT_STARTED:
38 if (!factory_->InitializeMediaStreamAudioSource(render_frame_id_, 288 if (pc_factory_) {
289 StartWebRtcPipeline(constraints);
290 } else {
291 StartDefaultPipeline();
292 }
293
294 if (current_state_ == STOPPED) {
295 VLOG(1) << "Failed to start null source.";
296 } else {
297 VLOG(1) << "Started null source using "
298 << (current_state_ == STARTED_DEFAULT_PIPELINE ? "default"
299 : "WebRTC")
300 << " audio pipeline, audio parameters={"
301 << params_.AsHumanReadableString() << "}.";
302 }
303 break;
304
305 case INPUT_DEVICE_NOT_STARTED:
306 // Sanity-check that the consuming RenderFrame still exists. This is
307 // required by AudioDeviceFactory.
308 if (!RenderFrameImpl::FromRoutingID(consumer_render_frame_id_))
309 break;
310
311 // If there is no PeerConnectionFactory instance, or if the audio
312 // processing pipeline should not be used, create a direct route for
313 // unmodified audio data from the local source.
314 if (!pc_factory_ ||
315 IsContentCaptureMediaType(device_info().device.type) ||
316 !MediaStreamAudioProcessor::ShouldRouteAudioThroughProcessor(
317 constraints, device_info().device.input.effects)) {
318 source_ = AudioDeviceFactory::NewInputDevice(consumer_render_frame_id_);
319 StartDefaultPipeline();
320 } else if (pc_factory_) {
321 StartWebRtcPipeline(constraints);
322 } else {
323 NOTREACHED() << "Failed to determine which audio pipeline to use.";
324 StopSource();
325 }
326
327 if (current_state_ == STOPPED) {
328 VLOG(1) << "Failed to start input device.";
329 } else {
330 VLOG(1) << "Started input device and using "
331 << (current_state_ == STARTED_DEFAULT_PIPELINE ? "default"
332 : "WebRTC")
333 << " audio pipeline, session_id=" << device_info().session_id
334 << ", audio parameters={" << params_.AsHumanReadableString()
335 << "}.";
336 }
337 break;
338
339 case SOURCE_NOT_STARTED:
340 StartDefaultPipeline();
341 DCHECK_EQ(current_state_, STARTED_DEFAULT_PIPELINE);
342 VLOG(1) << "Started externally-provided "
343 << (is_remote_ ? "remote" : "local")
344 << " source and using default audio pipeline with audio "
345 "parameters={"
346 << params_.AsHumanReadableString() << "}.";
347 break;
348
349 case STARTED_DEFAULT_PIPELINE:
350 case STARTED_WEBRTC_PIPELINE:
351 case STOPPED:
352 break;
353 }
354
355 return current_state_ == STARTED_DEFAULT_PIPELINE ||
356 current_state_ == STARTED_WEBRTC_PIPELINE;
357 }
358
359 void MediaStreamAudioSource::StartDefaultPipeline() {
360 DCHECK(thread_checker_.CalledOnValidThread());
361
362 if (source_) {
363 const int session_id = current_state_ == INPUT_DEVICE_NOT_STARTED
364 ? device_info().session_id
365 : -1;
366 source_->Initialize(params_, this, session_id);
367 source_->Start();
368 }
369
370 current_state_ = STARTED_DEFAULT_PIPELINE;
371 }
372
373 void MediaStreamAudioSource::StartWebRtcPipeline(
374 const blink::WebMediaConstraints& constraints) {
375 DCHECK(thread_checker_.CalledOnValidThread());
376 DCHECK(pc_factory_);
377
378 // TODO(xians): Constraints should only apply to the track instead of the
379 // source. See TODO comments in
380 // ./webrtc/peer_connection_dependency_factory.cc.
381 if (pc_factory_->InitializeMediaStreamAudioSource(consumer_render_frame_id_,
39 constraints, this)) { 382 constraints, this)) {
40 // The source failed to start. 383 DCHECK(audio_capturer_.get());
41 // UserMediaClientImpl rely on the |stop_callback| to be triggered when 384 current_state_ = STARTED_WEBRTC_PIPELINE;
42 // the last track is removed from the source. But in this case, the 385 } else {
43 // source is is not even started. So we need to fail both adding the 386 // The source failed to start.
44 // track and trigger |stop_callback|. 387 StopSource();
45 callback.Run(this, MEDIA_DEVICE_TRACK_START_FAILURE, ""); 388 }
46 StopSource(); 389 }
47 return; 390
48 } 391 void MediaStreamAudioSource::StopAudioDeliveryTo(
49 } 392 MediaStreamAudioSource::AudioTee* tee) {
50 393 DCHECK(thread_checker_.CalledOnValidThread());
51 factory_->CreateLocalAudioTrack(track); 394 DCHECK_NE(current_state_, STARTED_WEBRTC_PIPELINE);
52 callback.Run(this, MEDIA_DEVICE_OK, ""); 395
396 // Remove |tee| from the list of AudioTees. After the last AudioTee is
397 // removed, stop the source.
398 bool should_stop_source = false;
399 {
400 base::AutoLock auto_lock(lock_);
401 const auto it = std::find(audio_tees_.begin(), audio_tees_.end(), tee);
402 if (it != audio_tees_.end())
403 audio_tees_.erase(it);
404 should_stop_source = audio_tees_.empty();
405 }
406 if (should_stop_source)
407 StopSource();
408 }
409
410 void MediaStreamAudioSource::Capture(const media::AudioBus* audio_bus,
411 int audio_delay_milliseconds,
412 double volume,
413 bool key_pressed) {
414 // TODO(miu): Plumbing is needed to determine the actual capture timestamp
415 // of the audio, instead of just snapshotting TimeTicks::Now(), for proper
416 // audio/video sync. http://crbug.com/335335
417 const base::TimeTicks reference_time =
418 base::TimeTicks::Now() -
419 base::TimeDelta::FromMilliseconds(audio_delay_milliseconds);
420
421 // Deliver the audio data to each tee.
422 base::AutoLock auto_lock(lock_);
423 for (AudioTee* tee : audio_tees_) {
424 // As of this writing, |volume| is only used for an "automatic gain control"
425 // feature that does not apply here. Thus, assume the volume is always 1.0,
426 // which means the audio data should be passed through unmodified.
427 tee->DeliverDataToSinks(*audio_bus, reference_time);
428 }
429 }
430
431 void MediaStreamAudioSource::OnCaptureError(const std::string& message) {
432 // As of this writing, this method doesn't get called for anything uselful,
433 // and all other implementors just log the message, but don't disconnect sinks
434 // or take any other action. So, just log the error.
435 LOG(ERROR) << message;
53 } 436 }
54 437
55 } // namespace content 438 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698