Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(166)

Side by Side Diff: content/renderer/media/webrtc/peer_connection_dependency_factory.cc

Issue 1721273002: MediaStream audio object graph untangling and clean-ups. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Addressed mcasas's 1st round comments, plus REBASE. Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 8
9 #include <utility> 9 #include <utility>
10 #include <vector> 10 #include <vector>
11 11
12 #include "base/bind.h"
13 #include "base/bind_helpers.h"
12 #include "base/command_line.h" 14 #include "base/command_line.h"
13 #include "base/location.h" 15 #include "base/location.h"
14 #include "base/logging.h" 16 #include "base/logging.h"
15 #include "base/macros.h" 17 #include "base/macros.h"
16 #include "base/metrics/field_trial.h" 18 #include "base/metrics/field_trial.h"
17 #include "base/strings/string_util.h" 19 #include "base/strings/string_util.h"
18 #include "base/strings/utf_string_conversions.h" 20 #include "base/strings/utf_string_conversions.h"
19 #include "base/synchronization/waitable_event.h" 21 #include "base/synchronization/waitable_event.h"
20 #include "build/build_config.h" 22 #include "build/build_config.h"
21 #include "content/common/media/media_stream_messages.h" 23 #include "content/common/media/media_stream_messages.h"
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 // microphone or tab audio. 183 // microphone or tab audio.
182 RTCMediaConstraints native_audio_constraints(audio_constraints); 184 RTCMediaConstraints native_audio_constraints(audio_constraints);
183 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints); 185 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
184 186
185 StreamDeviceInfo device_info = source_data->device_info(); 187 StreamDeviceInfo device_info = source_data->device_info();
186 RTCMediaConstraints constraints = native_audio_constraints; 188 RTCMediaConstraints constraints = native_audio_constraints;
187 // May modify both |constraints| and |effects|. 189 // May modify both |constraints| and |effects|.
188 HarmonizeConstraintsAndEffects(&constraints, 190 HarmonizeConstraintsAndEffects(&constraints,
189 &device_info.device.input.effects); 191 &device_info.device.input.effects);
190 192
191 scoped_refptr<WebRtcAudioCapturer> capturer(CreateAudioCapturer( 193 scoped_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer(
192 render_frame_id, device_info, audio_constraints, source_data)); 194 render_frame_id, device_info, audio_constraints, source_data);
193 if (!capturer.get()) { 195 if (!capturer.get()) {
194 const std::string log_string = 196 const std::string log_string =
195 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; 197 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
196 WebRtcLogMessage(log_string); 198 WebRtcLogMessage(log_string);
197 DVLOG(1) << log_string; 199 DVLOG(1) << log_string;
198 // TODO(xians): Don't we need to check if source_observer is observing 200 // TODO(xians): Don't we need to check if source_observer is observing
199 // something? If not, then it looks like we have a leak here. 201 // something? If not, then it looks like we have a leak here.
200 // OTOH, if it _is_ observing something, then the callback might 202 // OTOH, if it _is_ observing something, then the callback might
201 // be called multiple times which is likely also a bug. 203 // be called multiple times which is likely also a bug.
202 return false; 204 return false;
203 } 205 }
204 source_data->SetAudioCapturer(capturer.get()); 206 source_data->SetAudioCapturer(std::move(capturer));
205 207
206 // Creates a LocalAudioSource object which holds audio options. 208 // Creates a LocalAudioSource object which holds audio options.
207 // TODO(xians): The option should apply to the track instead of the source. 209 // TODO(xians): The option should apply to the track instead of the source.
208 // TODO(perkj): Move audio constraints parsing to Chrome. 210 // TODO(perkj): Move audio constraints parsing to Chrome.
209 // Currently there are a few constraints that are parsed by libjingle and 211 // Currently there are a few constraints that are parsed by libjingle and
210 // the state is set to ended if parsing fails. 212 // the state is set to ended if parsing fails.
211 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( 213 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
212 CreateLocalAudioSource(&constraints).get()); 214 CreateLocalAudioSource(&constraints).get());
213 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { 215 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
214 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; 216 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 const webrtc::MediaConstraintsInterface* constraints) { 529 const webrtc::MediaConstraintsInterface* constraints) {
528 scoped_refptr<webrtc::AudioSourceInterface> source = 530 scoped_refptr<webrtc::AudioSourceInterface> source =
529 GetPcFactory()->CreateAudioSource(constraints).get(); 531 GetPcFactory()->CreateAudioSource(constraints).get();
530 return source; 532 return source;
531 } 533 }
532 534
533 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( 535 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
534 const blink::WebMediaStreamTrack& track) { 536 const blink::WebMediaStreamTrack& track) {
535 blink::WebMediaStreamSource source = track.source(); 537 blink::WebMediaStreamSource source = track.source();
536 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 538 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
537 DCHECK(!source.remote()); 539 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source);
538 MediaStreamAudioSource* source_data =
539 static_cast<MediaStreamAudioSource*>(source.extraData());
540 540
541 scoped_refptr<WebAudioCapturerSource> webaudio_source; 541 scoped_ptr<WebAudioCapturerSource> webaudio_source;
542 if (!source_data) { 542 if (!source_data) {
543 if (source.requiresAudioConsumer()) { 543 if (source.requiresAudioConsumer()) {
544 // We're adding a WebAudio MediaStream. 544 // We're adding a WebAudio MediaStream.
545 // Create a specific capturer for each WebAudio consumer. 545 // Create a specific capturer for each WebAudio consumer.
546 webaudio_source = CreateWebAudioSource(&source); 546 webaudio_source = CreateWebAudioSource(&source);
547 source_data = 547 source_data = MediaStreamAudioSource::From(source);
548 static_cast<MediaStreamAudioSource*>(source.extraData());
549 } else { 548 } else {
550 NOTREACHED() << "Local track missing source extra data."; 549 NOTREACHED() << "Local track missing MediaStreamAudioSource instance.";
551 return; 550 return;
552 } 551 }
553 } 552 }
554 553
555 // Creates an adapter to hold all the libjingle objects. 554 // Creates an adapter to hold all the libjingle objects.
556 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( 555 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
557 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), 556 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
558 source_data->local_audio_source())); 557 source_data->local_audio_source()));
559 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( 558 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
560 track.isEnabled()); 559 track.isEnabled());
561 560
562 // TODO(xians): Merge |source| to the capturer(). We can't do this today 561 // TODO(xians): Merge |source| to the capturer(). We can't do this today
563 // because only one capturer() is supported while one |source| is created 562 // because only one capturer() is supported while one |source| is created
564 // for each audio track. 563 // for each audio track.
565 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack( 564 scoped_ptr<WebRtcLocalAudioTrack> audio_track(
566 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get())); 565 new WebRtcLocalAudioTrack(adapter.get()));
567 566
568 StartLocalAudioTrack(audio_track.get()); 567 // Start the source and connect the audio data flow to the track.
568 if (webaudio_source.get()) {
569 webaudio_source->Start(audio_track.get());
570 // The stop callback takes ownership of the |webaudio_source|, which will
571 // cause it to be auto-destroyed when the track is stopped.
572 //
573 // TODO(miu): In a future change, WebAudioCapturerSource will become a
574 // subclass of MediaStreamAudioSource, and this will allow it to be owned by
575 // the blink::WebMediaStreamSource so we don't need this hacky thing here.
576 audio_track->AddStopObserver(base::Bind(
577 &WebAudioCapturerSource::Stop, base::Owned(webaudio_source.release())));
578 } else if (WebRtcAudioCapturer* capturer = source_data->audio_capturer()) {
579 capturer->AddTrack(audio_track.get());
580 } else {
581 NOTREACHED();
582 }
569 583
570 // Pass the ownership of the native local audio track to the blink track. 584 // Pass the ownership of the native local audio track to the blink track.
571 blink::WebMediaStreamTrack writable_track = track; 585 blink::WebMediaStreamTrack writable_track = track;
572 writable_track.setExtraData(audio_track.release()); 586 writable_track.setExtraData(audio_track.release());
573 } 587 }
574 588
575 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( 589 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack(
576 const blink::WebMediaStreamTrack& track) { 590 const blink::WebMediaStreamTrack& track) {
577 blink::WebMediaStreamSource source = track.source(); 591 blink::WebMediaStreamSource source = track.source();
578 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 592 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
579 DCHECK(source.remote()); 593 DCHECK(source.remote());
580 DCHECK(source.extraData()); 594 DCHECK(MediaStreamAudioSource::From(source));
581 595
582 blink::WebMediaStreamTrack writable_track = track; 596 blink::WebMediaStreamTrack writable_track = track;
583 writable_track.setExtraData( 597 writable_track.setExtraData(
584 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); 598 new MediaStreamRemoteAudioTrack(source, track.isEnabled()));
585 } 599 }
586 600
587 void PeerConnectionDependencyFactory::StartLocalAudioTrack( 601 scoped_ptr<WebAudioCapturerSource>
588 WebRtcLocalAudioTrack* audio_track) {
589 // Start the audio track. This will hook the |audio_track| to the capturer
590 // as the sink of the audio, and only start the source of the capturer if
591 // it is the first audio track connecting to the capturer.
592 audio_track->Start();
593 }
594
595 scoped_refptr<WebAudioCapturerSource>
596 PeerConnectionDependencyFactory::CreateWebAudioSource( 602 PeerConnectionDependencyFactory::CreateWebAudioSource(
597 blink::WebMediaStreamSource* source) { 603 blink::WebMediaStreamSource* source) {
598 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; 604 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
599 605
600 scoped_refptr<WebAudioCapturerSource>
601 webaudio_capturer_source(new WebAudioCapturerSource(*source));
602 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); 606 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
603 607
604 // Use the current default capturer for the WebAudio track so that the
605 // WebAudio track can pass a valid delay value and |need_audio_processing|
606 // flag to PeerConnection.
607 // TODO(xians): Remove this after moving APM to Chrome.
608 if (GetWebRtcAudioDevice()) {
609 source_data->SetAudioCapturer(
610 GetWebRtcAudioDevice()->GetDefaultCapturer());
611 }
612
613 // Create a LocalAudioSource object which holds audio options. 608 // Create a LocalAudioSource object which holds audio options.
614 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 609 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
615 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get()); 610 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
616 source->setExtraData(source_data); 611 source->setExtraData(source_data);
617 612
618 // Replace the default source with WebAudio as source instead. 613 return make_scoped_ptr(new WebAudioCapturerSource(source));
619 source->addAudioConsumer(webaudio_capturer_source.get());
620
621 return webaudio_capturer_source;
622 } 614 }
623 615
624 scoped_refptr<webrtc::VideoTrackInterface> 616 scoped_refptr<webrtc::VideoTrackInterface>
625 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 617 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
626 const std::string& id, 618 const std::string& id,
627 webrtc::VideoSourceInterface* source) { 619 webrtc::VideoSourceInterface* source) {
628 return GetPcFactory()->CreateVideoTrack(id, source).get(); 620 return GetPcFactory()->CreateVideoTrack(id, source).get();
629 } 621 }
630 622
631 scoped_refptr<webrtc::VideoTrackInterface> 623 scoped_refptr<webrtc::VideoTrackInterface>
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
752 // Stopping the thread will wait until all tasks have been 744 // Stopping the thread will wait until all tasks have been
753 // processed before returning. We wait for the above task to finish before 745 // processed before returning. We wait for the above task to finish before
754 // letting the the function continue to avoid any potential race issues. 746 // letting the the function continue to avoid any potential race issues.
755 chrome_worker_thread_.Stop(); 747 chrome_worker_thread_.Stop();
756 } else { 748 } else {
757 NOTREACHED() << "Worker thread not running."; 749 NOTREACHED() << "Worker thread not running.";
758 } 750 }
759 } 751 }
760 } 752 }
761 753
762 scoped_refptr<WebRtcAudioCapturer> 754 scoped_ptr<WebRtcAudioCapturer>
763 PeerConnectionDependencyFactory::CreateAudioCapturer( 755 PeerConnectionDependencyFactory::CreateAudioCapturer(
764 int render_frame_id, 756 int render_frame_id,
765 const StreamDeviceInfo& device_info, 757 const StreamDeviceInfo& device_info,
766 const blink::WebMediaConstraints& constraints, 758 const blink::WebMediaConstraints& constraints,
767 MediaStreamAudioSource* audio_source) { 759 MediaStreamAudioSource* audio_source) {
768 // TODO(xians): Handle the cases when gUM is called without a proper render 760 // TODO(xians): Handle the cases when gUM is called without a proper render
769 // view, for example, by an extension. 761 // view, for example, by an extension.
770 DCHECK_GE(render_frame_id, 0); 762 DCHECK_GE(render_frame_id, 0);
771 763
772 EnsureWebRtcAudioDeviceImpl(); 764 EnsureWebRtcAudioDeviceImpl();
(...skipping 24 matching lines...) Expand all
797 } 789 }
798 790
799 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { 791 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
800 if (audio_device_.get()) 792 if (audio_device_.get())
801 return; 793 return;
802 794
803 audio_device_ = new WebRtcAudioDeviceImpl(); 795 audio_device_ = new WebRtcAudioDeviceImpl();
804 } 796 }
805 797
806 } // namespace content 798 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698