Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(207)

Side by Side Diff: content/renderer/media/webrtc/peer_connection_dependency_factory.cc

Issue 1647773002: MediaStream audio sourcing: Bypass audio processing for non-WebRTC cases. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: NOT FOR REVIEW -- This will be broken-up across multiple CLs. Created 4 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 8
9 #include <utility> 9 #include <utility>
10 #include <vector> 10 #include <vector>
11 11
12 #include "base/command_line.h" 12 #include "base/command_line.h"
13 #include "base/location.h" 13 #include "base/location.h"
14 #include "base/logging.h" 14 #include "base/logging.h"
15 #include "base/macros.h" 15 #include "base/macros.h"
16 #include "base/metrics/field_trial.h" 16 #include "base/metrics/field_trial.h"
17 #include "base/strings/string_util.h" 17 #include "base/strings/string_util.h"
18 #include "base/strings/utf_string_conversions.h" 18 #include "base/strings/utf_string_conversions.h"
19 #include "base/synchronization/waitable_event.h" 19 #include "base/synchronization/waitable_event.h"
20 #include "build/build_config.h" 20 #include "build/build_config.h"
21 #include "content/common/media/media_stream_messages.h" 21 #include "content/common/media/media_stream_messages.h"
22 #include "content/public/common/content_client.h" 22 #include "content/public/common/content_client.h"
23 #include "content/public/common/content_switches.h" 23 #include "content/public/common/content_switches.h"
24 #include "content/public/common/renderer_preferences.h" 24 #include "content/public/common/renderer_preferences.h"
25 #include "content/public/common/webrtc_ip_handling_policy.h" 25 #include "content/public/common/webrtc_ip_handling_policy.h"
26 #include "content/public/renderer/content_renderer_client.h" 26 #include "content/public/renderer/content_renderer_client.h"
27 #include "content/renderer/media/media_stream.h" 27 #include "content/renderer/media/media_stream.h"
28 #include "content/renderer/media/media_stream_audio_processor.h" 28 #include "content/renderer/media/media_stream_audio_processor.h"
29 #include "content/renderer/media/media_stream_audio_processor_options.h"
30 #include "content/renderer/media/media_stream_audio_source.h"
31 #include "content/renderer/media/media_stream_video_source.h" 29 #include "content/renderer/media/media_stream_video_source.h"
32 #include "content/renderer/media/media_stream_video_track.h" 30 #include "content/renderer/media/media_stream_video_track.h"
33 #include "content/renderer/media/peer_connection_identity_store.h" 31 #include "content/renderer/media/peer_connection_identity_store.h"
34 #include "content/renderer/media/rtc_media_constraints.h" 32 #include "content/renderer/media/rtc_media_constraints.h"
35 #include "content/renderer/media/rtc_peer_connection_handler.h" 33 #include "content/renderer/media/rtc_peer_connection_handler.h"
36 #include "content/renderer/media/rtc_video_decoder_factory.h" 34 #include "content/renderer/media/rtc_video_decoder_factory.h"
37 #include "content/renderer/media/rtc_video_encoder_factory.h" 35 #include "content/renderer/media/rtc_video_encoder_factory.h"
38 #include "content/renderer/media/webaudio_capturer_source.h"
39 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h"
40 #include "content/renderer/media/webrtc/stun_field_trial.h" 36 #include "content/renderer/media/webrtc/stun_field_trial.h"
41 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
42 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" 37 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
43 #include "content/renderer/media/webrtc_audio_device_impl.h" 38 #include "content/renderer/media/webrtc_audio_device_impl.h"
44 #include "content/renderer/media/webrtc_local_audio_track.h"
45 #include "content/renderer/media/webrtc_logging.h"
46 #include "content/renderer/media/webrtc_uma_histograms.h" 39 #include "content/renderer/media/webrtc_uma_histograms.h"
47 #include "content/renderer/p2p/empty_network_manager.h" 40 #include "content/renderer/p2p/empty_network_manager.h"
48 #include "content/renderer/p2p/filtering_network_manager.h" 41 #include "content/renderer/p2p/filtering_network_manager.h"
49 #include "content/renderer/p2p/ipc_network_manager.h" 42 #include "content/renderer/p2p/ipc_network_manager.h"
50 #include "content/renderer/p2p/ipc_socket_factory.h" 43 #include "content/renderer/p2p/ipc_socket_factory.h"
51 #include "content/renderer/p2p/port_allocator.h" 44 #include "content/renderer/p2p/port_allocator.h"
52 #include "content/renderer/render_frame_impl.h" 45 #include "content/renderer/render_frame_impl.h"
53 #include "content/renderer/render_thread_impl.h" 46 #include "content/renderer/render_thread_impl.h"
54 #include "content/renderer/render_view_impl.h" 47 #include "content/renderer/render_view_impl.h"
55 #include "content/renderer/renderer_features.h" 48 #include "content/renderer/renderer_features.h"
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
90 return DEFAULT_PUBLIC_AND_PRIVATE_INTERFACES; 83 return DEFAULT_PUBLIC_AND_PRIVATE_INTERFACES;
91 if (preference == kWebRTCIPHandlingDefaultPublicInterfaceOnly) 84 if (preference == kWebRTCIPHandlingDefaultPublicInterfaceOnly)
92 return DEFAULT_PUBLIC_INTERFACE_ONLY; 85 return DEFAULT_PUBLIC_INTERFACE_ONLY;
93 if (preference == kWebRTCIPHandlingDisableNonProxiedUdp) 86 if (preference == kWebRTCIPHandlingDisableNonProxiedUdp)
94 return DISABLE_NON_PROXIED_UDP; 87 return DISABLE_NON_PROXIED_UDP;
95 return DEFAULT; 88 return DEFAULT;
96 } 89 }
97 90
98 } // namespace 91 } // namespace
99 92
100 // Map of corresponding media constraints and platform effects.
101 struct {
102 const char* constraint;
103 const media::AudioParameters::PlatformEffectsMask effect;
104 } const kConstraintEffectMap[] = {
105 { webrtc::MediaConstraintsInterface::kGoogEchoCancellation,
106 media::AudioParameters::ECHO_CANCELLER },
107 };
108
109 // If any platform effects are available, check them against the constraints.
110 // Disable effects to match false constraints, but if a constraint is true, set
111 // the constraint to false to later disable the software effect.
112 //
113 // This function may modify both |constraints| and |effects|.
114 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
115 int* effects) {
116 if (*effects != media::AudioParameters::NO_EFFECTS) {
117 for (size_t i = 0; i < arraysize(kConstraintEffectMap); ++i) {
118 bool value;
119 size_t is_mandatory = 0;
120 if (!webrtc::FindConstraint(constraints,
121 kConstraintEffectMap[i].constraint,
122 &value,
123 &is_mandatory) || !value) {
124 // If the constraint is false, or does not exist, disable the platform
125 // effect.
126 *effects &= ~kConstraintEffectMap[i].effect;
127 DVLOG(1) << "Disabling platform effect: "
128 << kConstraintEffectMap[i].effect;
129 } else if (*effects & kConstraintEffectMap[i].effect) {
130 // If the constraint is true, leave the platform effect enabled, and
131 // set the constraint to false to later disable the software effect.
132 if (is_mandatory) {
133 constraints->AddMandatory(kConstraintEffectMap[i].constraint,
134 webrtc::MediaConstraintsInterface::kValueFalse, true);
135 } else {
136 constraints->AddOptional(kConstraintEffectMap[i].constraint,
137 webrtc::MediaConstraintsInterface::kValueFalse, true);
138 }
139 DVLOG(1) << "Disabling constraint: "
140 << kConstraintEffectMap[i].constraint;
141 }
142 }
143 }
144 }
145
146 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory( 93 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
147 P2PSocketDispatcher* p2p_socket_dispatcher) 94 P2PSocketDispatcher* p2p_socket_dispatcher)
148 : network_manager_(NULL), 95 : network_manager_(NULL),
149 p2p_socket_dispatcher_(p2p_socket_dispatcher), 96 p2p_socket_dispatcher_(p2p_socket_dispatcher),
150 signaling_thread_(NULL), 97 signaling_thread_(NULL),
151 worker_thread_(NULL), 98 worker_thread_(NULL),
152 chrome_signaling_thread_("Chrome_libJingle_Signaling"), 99 chrome_signaling_thread_("Chrome_libJingle_Signaling"),
153 chrome_worker_thread_("Chrome_libJingle_WorkerThread") { 100 chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
154 TryScheduleStunProbeTrial(); 101 TryScheduleStunProbeTrial();
155 } 102 }
156 103
157 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() { 104 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
158 DVLOG(1) << "~PeerConnectionDependencyFactory()"; 105 DVLOG(1) << "~PeerConnectionDependencyFactory()";
159 DCHECK(pc_factory_ == NULL); 106 DCHECK(pc_factory_ == NULL);
160 } 107 }
161 108
162 blink::WebRTCPeerConnectionHandler* 109 blink::WebRTCPeerConnectionHandler*
163 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( 110 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
164 blink::WebRTCPeerConnectionHandlerClient* client) { 111 blink::WebRTCPeerConnectionHandlerClient* client) {
165 // Save histogram data so we can see how much PeerConnetion is used. 112 // Save histogram data so we can see how much PeerConnetion is used.
166 // The histogram counts the number of calls to the JS API 113 // The histogram counts the number of calls to the JS API
167 // webKitRTCPeerConnection. 114 // webKitRTCPeerConnection.
168 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 115 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
169 116
170 return new RTCPeerConnectionHandler(client, this); 117 return new RTCPeerConnectionHandler(client, this);
171 } 118 }
172 119
173 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
174 int render_frame_id,
175 const blink::WebMediaConstraints& audio_constraints,
176 MediaStreamAudioSource* source_data) {
177 DVLOG(1) << "InitializeMediaStreamAudioSources()";
178
179 // Do additional source initialization if the audio source is a valid
180 // microphone or tab audio.
181 RTCMediaConstraints native_audio_constraints(audio_constraints);
182 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
183
184 StreamDeviceInfo device_info = source_data->device_info();
185 RTCMediaConstraints constraints = native_audio_constraints;
186 // May modify both |constraints| and |effects|.
187 HarmonizeConstraintsAndEffects(&constraints,
188 &device_info.device.input.effects);
189
190 scoped_refptr<WebRtcAudioCapturer> capturer(CreateAudioCapturer(
191 render_frame_id, device_info, audio_constraints, source_data));
192 if (!capturer.get()) {
193 const std::string log_string =
194 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
195 WebRtcLogMessage(log_string);
196 DVLOG(1) << log_string;
197 // TODO(xians): Don't we need to check if source_observer is observing
198 // something? If not, then it looks like we have a leak here.
199 // OTOH, if it _is_ observing something, then the callback might
200 // be called multiple times which is likely also a bug.
201 return false;
202 }
203 source_data->SetAudioCapturer(capturer.get());
204
205 // Creates a LocalAudioSource object which holds audio options.
206 // TODO(xians): The option should apply to the track instead of the source.
207 // TODO(perkj): Move audio constraints parsing to Chrome.
208 // Currently there are a few constraints that are parsed by libjingle and
209 // the state is set to ended if parsing fails.
210 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
211 CreateLocalAudioSource(&constraints).get());
212 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
213 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
214 return false;
215 }
216 source_data->SetLocalAudioSource(rtc_source.get());
217 return true;
218 }
219
220 WebRtcVideoCapturerAdapter* 120 WebRtcVideoCapturerAdapter*
221 PeerConnectionDependencyFactory::CreateVideoCapturer( 121 PeerConnectionDependencyFactory::CreateVideoCapturer(
222 bool is_screeencast) { 122 bool is_screeencast) {
223 // We need to make sure the libjingle thread wrappers have been created 123 // We need to make sure the libjingle thread wrappers have been created
224 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is 124 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
225 // since the base class of WebRtcVideoCapturerAdapter is a 125 // since the base class of WebRtcVideoCapturerAdapter is a
226 // cricket::VideoCapturer and it uses the libjingle thread wrappers. 126 // cricket::VideoCapturer and it uses the libjingle thread wrappers.
227 if (!GetPcFactory().get()) 127 if (!GetPcFactory().get())
228 return NULL; 128 return NULL;
229 return new WebRtcVideoCapturerAdapter(is_screeencast); 129 return new WebRtcVideoCapturerAdapter(is_screeencast);
(...skipping 296 matching lines...) Expand 10 before | Expand all | Expand 10 after
526 } 426 }
527 427
528 scoped_refptr<webrtc::AudioSourceInterface> 428 scoped_refptr<webrtc::AudioSourceInterface>
529 PeerConnectionDependencyFactory::CreateLocalAudioSource( 429 PeerConnectionDependencyFactory::CreateLocalAudioSource(
530 const webrtc::MediaConstraintsInterface* constraints) { 430 const webrtc::MediaConstraintsInterface* constraints) {
531 scoped_refptr<webrtc::AudioSourceInterface> source = 431 scoped_refptr<webrtc::AudioSourceInterface> source =
532 GetPcFactory()->CreateAudioSource(constraints).get(); 432 GetPcFactory()->CreateAudioSource(constraints).get();
533 return source; 433 return source;
534 } 434 }
535 435
536 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
537 const blink::WebMediaStreamTrack& track) {
538 blink::WebMediaStreamSource source = track.source();
539 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
540 DCHECK(!source.remote());
541 MediaStreamAudioSource* source_data =
542 static_cast<MediaStreamAudioSource*>(source.extraData());
543
544 scoped_refptr<WebAudioCapturerSource> webaudio_source;
545 if (!source_data) {
546 if (source.requiresAudioConsumer()) {
547 // We're adding a WebAudio MediaStream.
548 // Create a specific capturer for each WebAudio consumer.
549 webaudio_source = CreateWebAudioSource(&source);
550 source_data =
551 static_cast<MediaStreamAudioSource*>(source.extraData());
552 } else {
553 NOTREACHED() << "Local track missing source extra data.";
554 return;
555 }
556 }
557
558 // Creates an adapter to hold all the libjingle objects.
559 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
560 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
561 source_data->local_audio_source()));
562 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
563 track.isEnabled());
564
565 // TODO(xians): Merge |source| to the capturer(). We can't do this today
566 // because only one capturer() is supported while one |source| is created
567 // for each audio track.
568 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack(
569 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get()));
570
571 StartLocalAudioTrack(audio_track.get());
572
573 // Pass the ownership of the native local audio track to the blink track.
574 blink::WebMediaStreamTrack writable_track = track;
575 writable_track.setExtraData(audio_track.release());
576 }
577
578 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack(
579 const blink::WebMediaStreamTrack& track) {
580 blink::WebMediaStreamSource source = track.source();
581 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
582 DCHECK(source.remote());
583 DCHECK(source.extraData());
584
585 blink::WebMediaStreamTrack writable_track = track;
586 writable_track.setExtraData(
587 new MediaStreamRemoteAudioTrack(source, track.isEnabled()));
588 }
589
590 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
591 WebRtcLocalAudioTrack* audio_track) {
592 // Start the audio track. This will hook the |audio_track| to the capturer
593 // as the sink of the audio, and only start the source of the capturer if
594 // it is the first audio track connecting to the capturer.
595 audio_track->Start();
596 }
597
598 scoped_refptr<WebAudioCapturerSource>
599 PeerConnectionDependencyFactory::CreateWebAudioSource(
600 blink::WebMediaStreamSource* source) {
601 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
602
603 scoped_refptr<WebAudioCapturerSource>
604 webaudio_capturer_source(new WebAudioCapturerSource(*source));
605 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
606
607 // Use the current default capturer for the WebAudio track so that the
608 // WebAudio track can pass a valid delay value and |need_audio_processing|
609 // flag to PeerConnection.
610 // TODO(xians): Remove this after moving APM to Chrome.
611 if (GetWebRtcAudioDevice()) {
612 source_data->SetAudioCapturer(
613 GetWebRtcAudioDevice()->GetDefaultCapturer());
614 }
615
616 // Create a LocalAudioSource object which holds audio options.
617 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
618 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
619 source->setExtraData(source_data);
620
621 // Replace the default source with WebAudio as source instead.
622 source->addAudioConsumer(webaudio_capturer_source.get());
623
624 return webaudio_capturer_source;
625 }
626
627 scoped_refptr<webrtc::VideoTrackInterface> 436 scoped_refptr<webrtc::VideoTrackInterface>
628 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 437 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
629 const std::string& id, 438 const std::string& id,
630 webrtc::VideoSourceInterface* source) { 439 webrtc::VideoSourceInterface* source) {
631 return GetPcFactory()->CreateVideoTrack(id, source).get(); 440 return GetPcFactory()->CreateVideoTrack(id, source).get();
632 } 441 }
633 442
634 scoped_refptr<webrtc::VideoTrackInterface> 443 scoped_refptr<webrtc::VideoTrackInterface>
635 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 444 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
636 const std::string& id, cricket::VideoCapturer* capturer) { 445 const std::string& id, cricket::VideoCapturer* capturer) {
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
755 // Stopping the thread will wait until all tasks have been 564 // Stopping the thread will wait until all tasks have been
756 // processed before returning. We wait for the above task to finish before 565 // processed before returning. We wait for the above task to finish before
757 // letting the the function continue to avoid any potential race issues. 566 // letting the the function continue to avoid any potential race issues.
758 chrome_worker_thread_.Stop(); 567 chrome_worker_thread_.Stop();
759 } else { 568 } else {
760 NOTREACHED() << "Worker thread not running."; 569 NOTREACHED() << "Worker thread not running.";
761 } 570 }
762 } 571 }
763 } 572 }
764 573
765 scoped_refptr<WebRtcAudioCapturer>
766 PeerConnectionDependencyFactory::CreateAudioCapturer(
767 int render_frame_id,
768 const StreamDeviceInfo& device_info,
769 const blink::WebMediaConstraints& constraints,
770 MediaStreamAudioSource* audio_source) {
771 // TODO(xians): Handle the cases when gUM is called without a proper render
772 // view, for example, by an extension.
773 DCHECK_GE(render_frame_id, 0);
774
775 EnsureWebRtcAudioDeviceImpl();
776 DCHECK(GetWebRtcAudioDevice());
777 return WebRtcAudioCapturer::CreateCapturer(
778 render_frame_id, device_info, constraints, GetWebRtcAudioDevice(),
779 audio_source);
780 }
781
782 void PeerConnectionDependencyFactory::EnsureInitialized() { 574 void PeerConnectionDependencyFactory::EnsureInitialized() {
783 DCHECK(CalledOnValidThread()); 575 DCHECK(CalledOnValidThread());
784 GetPcFactory(); 576 GetPcFactory();
785 } 577 }
786 578
787 scoped_refptr<base::SingleThreadTaskRunner> 579 scoped_refptr<base::SingleThreadTaskRunner>
788 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { 580 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
789 DCHECK(CalledOnValidThread()); 581 DCHECK(CalledOnValidThread());
790 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() 582 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner()
791 : nullptr; 583 : nullptr;
792 } 584 }
793 585
794 scoped_refptr<base::SingleThreadTaskRunner> 586 scoped_refptr<base::SingleThreadTaskRunner>
795 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { 587 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const {
796 DCHECK(CalledOnValidThread()); 588 DCHECK(CalledOnValidThread());
797 return chrome_signaling_thread_.IsRunning() 589 return chrome_signaling_thread_.IsRunning()
798 ? chrome_signaling_thread_.task_runner() 590 ? chrome_signaling_thread_.task_runner()
799 : nullptr; 591 : nullptr;
800 } 592 }
801 593
802 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { 594 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
803 if (audio_device_.get()) 595 if (audio_device_.get())
804 return; 596 return;
805 597
806 audio_device_ = new WebRtcAudioDeviceImpl(); 598 audio_device_ = new WebRtcAudioDeviceImpl();
807 } 599 }
808 600
809 } // namespace content 601 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698