Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(542)

Side by Side Diff: content/renderer/media/webrtc/peer_connection_dependency_factory.cc

Issue 1834323002: MediaStream audio: Refactor 3 separate "glue" implementations into one. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: REBASE + Workaround to ensure MediaStreamAudioProcessor is destroyed on the main thread. Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 8
9 #include <utility> 9 #include <utility>
10 #include <vector> 10 #include <vector>
(...skipping 12 matching lines...) Expand all
23 #include "build/build_config.h" 23 #include "build/build_config.h"
24 #include "content/common/media/media_stream_messages.h" 24 #include "content/common/media/media_stream_messages.h"
25 #include "content/public/common/content_client.h" 25 #include "content/public/common/content_client.h"
26 #include "content/public/common/content_switches.h" 26 #include "content/public/common/content_switches.h"
27 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" 27 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h"
28 #include "content/public/common/features.h" 28 #include "content/public/common/features.h"
29 #include "content/public/common/renderer_preferences.h" 29 #include "content/public/common/renderer_preferences.h"
30 #include "content/public/common/webrtc_ip_handling_policy.h" 30 #include "content/public/common/webrtc_ip_handling_policy.h"
31 #include "content/public/renderer/content_renderer_client.h" 31 #include "content/public/renderer/content_renderer_client.h"
32 #include "content/renderer/media/media_stream.h" 32 #include "content/renderer/media/media_stream.h"
33 #include "content/renderer/media/media_stream_audio_processor.h"
34 #include "content/renderer/media/media_stream_audio_processor_options.h"
35 #include "content/renderer/media/media_stream_audio_source.h"
36 #include "content/renderer/media/media_stream_constraints_util.h"
37 #include "content/renderer/media/media_stream_video_source.h" 33 #include "content/renderer/media/media_stream_video_source.h"
38 #include "content/renderer/media/media_stream_video_track.h" 34 #include "content/renderer/media/media_stream_video_track.h"
39 #include "content/renderer/media/peer_connection_identity_store.h" 35 #include "content/renderer/media/peer_connection_identity_store.h"
40 #include "content/renderer/media/rtc_peer_connection_handler.h" 36 #include "content/renderer/media/rtc_peer_connection_handler.h"
41 #include "content/renderer/media/rtc_video_decoder_factory.h" 37 #include "content/renderer/media/rtc_video_decoder_factory.h"
42 #include "content/renderer/media/rtc_video_encoder_factory.h" 38 #include "content/renderer/media/rtc_video_encoder_factory.h"
43 #include "content/renderer/media/webaudio_capturer_source.h"
44 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h"
45 #include "content/renderer/media/webrtc/stun_field_trial.h" 39 #include "content/renderer/media/webrtc/stun_field_trial.h"
46 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
47 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" 40 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
48 #include "content/renderer/media/webrtc_audio_device_impl.h" 41 #include "content/renderer/media/webrtc_audio_device_impl.h"
49 #include "content/renderer/media/webrtc_local_audio_track.h"
50 #include "content/renderer/media/webrtc_logging.h" 42 #include "content/renderer/media/webrtc_logging.h"
51 #include "content/renderer/media/webrtc_uma_histograms.h" 43 #include "content/renderer/media/webrtc_uma_histograms.h"
52 #include "content/renderer/p2p/empty_network_manager.h" 44 #include "content/renderer/p2p/empty_network_manager.h"
53 #include "content/renderer/p2p/filtering_network_manager.h" 45 #include "content/renderer/p2p/filtering_network_manager.h"
54 #include "content/renderer/p2p/ipc_network_manager.h" 46 #include "content/renderer/p2p/ipc_network_manager.h"
55 #include "content/renderer/p2p/ipc_socket_factory.h" 47 #include "content/renderer/p2p/ipc_socket_factory.h"
56 #include "content/renderer/p2p/port_allocator.h" 48 #include "content/renderer/p2p/port_allocator.h"
57 #include "content/renderer/render_frame_impl.h" 49 #include "content/renderer/render_frame_impl.h"
58 #include "content/renderer/render_thread_impl.h" 50 #include "content/renderer/render_thread_impl.h"
59 #include "content/renderer/render_view_impl.h" 51 #include "content/renderer/render_view_impl.h"
60 #include "crypto/openssl_util.h" 52 #include "crypto/openssl_util.h"
61 #include "jingle/glue/thread_wrapper.h" 53 #include "jingle/glue/thread_wrapper.h"
62 #include "media/base/media_permission.h" 54 #include "media/base/media_permission.h"
63 #include "media/filters/ffmpeg_glue.h" 55 #include "media/filters/ffmpeg_glue.h"
64 #include "media/renderers/gpu_video_accelerator_factories.h" 56 #include "media/renderers/gpu_video_accelerator_factories.h"
65 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" 57 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
66 #include "third_party/WebKit/public/platform/WebMediaStream.h" 58 #include "third_party/WebKit/public/platform/WebMediaStream.h"
67 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" 59 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
68 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" 60 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
69 #include "third_party/WebKit/public/platform/WebURL.h" 61 #include "third_party/WebKit/public/platform/WebURL.h"
70 #include "third_party/WebKit/public/web/WebDocument.h" 62 #include "third_party/WebKit/public/web/WebDocument.h"
71 #include "third_party/WebKit/public/web/WebFrame.h" 63 #include "third_party/WebKit/public/web/WebFrame.h"
72 #include "third_party/webrtc/api/dtlsidentitystore.h" 64 #include "third_party/webrtc/api/dtlsidentitystore.h"
73 #include "third_party/webrtc/api/mediaconstraintsinterface.h" 65 #include "third_party/webrtc/api/mediaconstraintsinterface.h"
74 #include "third_party/webrtc/base/ssladapter.h" 66 #include "third_party/webrtc/base/ssladapter.h"
75 #include "third_party/webrtc/media/base/mediachannel.h"
76 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" 67 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h"
77 68
78 #if defined(OS_ANDROID) 69 #if defined(OS_ANDROID)
79 #include "media/base/android/media_codec_util.h" 70 #include "media/base/android/media_codec_util.h"
80 #endif 71 #endif
81 72
82 namespace content { 73 namespace content {
83 74
84 namespace { 75 namespace {
85 76
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
123 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( 114 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
124 blink::WebRTCPeerConnectionHandlerClient* client) { 115 blink::WebRTCPeerConnectionHandlerClient* client) {
125 // Save histogram data so we can see how much PeerConnetion is used. 116 // Save histogram data so we can see how much PeerConnetion is used.
126 // The histogram counts the number of calls to the JS API 117 // The histogram counts the number of calls to the JS API
127 // webKitRTCPeerConnection. 118 // webKitRTCPeerConnection.
128 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 119 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
129 120
130 return new RTCPeerConnectionHandler(client, this); 121 return new RTCPeerConnectionHandler(client, this);
131 } 122 }
132 123
133 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
134 int render_frame_id,
135 const blink::WebMediaConstraints& audio_constraints,
136 MediaStreamAudioSource* source_data) {
137 DVLOG(1) << "InitializeMediaStreamAudioSources()";
138
139 // Do additional source initialization if the audio source is a valid
140 // microphone or tab audio.
141
142 StreamDeviceInfo device_info = source_data->device_info();
143
144 cricket::AudioOptions options;
145 // Apply relevant constraints.
146 options.echo_cancellation = ConstraintToOptional(
147 audio_constraints, &blink::WebMediaTrackConstraintSet::echoCancellation);
148 options.delay_agnostic_aec = ConstraintToOptional(
149 audio_constraints,
150 &blink::WebMediaTrackConstraintSet::googDAEchoCancellation);
151 options.auto_gain_control = ConstraintToOptional(
152 audio_constraints,
153 &blink::WebMediaTrackConstraintSet::googAutoGainControl);
154 options.experimental_agc = ConstraintToOptional(
155 audio_constraints,
156 &blink::WebMediaTrackConstraintSet::googExperimentalAutoGainControl);
157 options.noise_suppression = ConstraintToOptional(
158 audio_constraints,
159 &blink::WebMediaTrackConstraintSet::googNoiseSuppression);
160 options.experimental_ns = ConstraintToOptional(
161 audio_constraints,
162 &blink::WebMediaTrackConstraintSet::googExperimentalNoiseSuppression);
163 options.highpass_filter = ConstraintToOptional(
164 audio_constraints,
165 &blink::WebMediaTrackConstraintSet::googHighpassFilter);
166 options.typing_detection = ConstraintToOptional(
167 audio_constraints,
168 &blink::WebMediaTrackConstraintSet::googTypingNoiseDetection);
169 options.stereo_swapping = ConstraintToOptional(
170 audio_constraints,
171 &blink::WebMediaTrackConstraintSet::googAudioMirroring);
172
173 MediaAudioConstraints::ApplyFixedAudioConstraints(&options);
174
175 if (device_info.device.input.effects &
176 media::AudioParameters::ECHO_CANCELLER) {
177 // TODO(hta): Figure out if we should be looking at echoCancellation.
178 // Previous code had googEchoCancellation only.
179 const blink::BooleanConstraint& echoCancellation =
180 audio_constraints.basic().googEchoCancellation;
181 if (echoCancellation.hasExact() && !echoCancellation.exact()) {
182 device_info.device.input.effects &=
183 ~media::AudioParameters::ECHO_CANCELLER;
184 }
185 options.echo_cancellation = rtc::Optional<bool>(false);
186 }
187
188 std::unique_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer(
189 render_frame_id, device_info, audio_constraints, source_data);
190 if (!capturer.get()) {
191 const std::string log_string =
192 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
193 WebRtcLogMessage(log_string);
194 DVLOG(1) << log_string;
195 // TODO(xians): Don't we need to check if source_observer is observing
196 // something? If not, then it looks like we have a leak here.
197 // OTOH, if it _is_ observing something, then the callback might
198 // be called multiple times which is likely also a bug.
199 return false;
200 }
201 source_data->SetAudioCapturer(std::move(capturer));
202
203 // Creates a LocalAudioSource object which holds audio options.
204 // TODO(xians): The option should apply to the track instead of the source.
205 // TODO(perkj): Move audio constraints parsing to Chrome.
206 // Currently there are a few constraints that are parsed by libjingle and
207 // the state is set to ended if parsing fails.
208 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
209 CreateLocalAudioSource(options).get());
210 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
211 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
212 return false;
213 }
214 source_data->SetLocalAudioSource(rtc_source.get());
215 return true;
216 }
217
218 WebRtcVideoCapturerAdapter* 124 WebRtcVideoCapturerAdapter*
219 PeerConnectionDependencyFactory::CreateVideoCapturer( 125 PeerConnectionDependencyFactory::CreateVideoCapturer(
220 bool is_screeencast) { 126 bool is_screeencast) {
221 // We need to make sure the libjingle thread wrappers have been created 127 // We need to make sure the libjingle thread wrappers have been created
222 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is 128 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
223 // since the base class of WebRtcVideoCapturerAdapter is a 129 // since the base class of WebRtcVideoCapturerAdapter is a
224 // cricket::VideoCapturer and it uses the libjingle thread wrappers. 130 // cricket::VideoCapturer and it uses the libjingle thread wrappers.
225 if (!GetPcFactory().get()) 131 if (!GetPcFactory().get())
226 return NULL; 132 return NULL;
227 return new WebRtcVideoCapturerAdapter(is_screeencast); 133 return new WebRtcVideoCapturerAdapter(is_screeencast);
(...skipping 290 matching lines...) Expand 10 before | Expand all | Expand 10 after
518 } 424 }
519 425
520 scoped_refptr<webrtc::AudioSourceInterface> 426 scoped_refptr<webrtc::AudioSourceInterface>
521 PeerConnectionDependencyFactory::CreateLocalAudioSource( 427 PeerConnectionDependencyFactory::CreateLocalAudioSource(
522 const cricket::AudioOptions& options) { 428 const cricket::AudioOptions& options) {
523 scoped_refptr<webrtc::AudioSourceInterface> source = 429 scoped_refptr<webrtc::AudioSourceInterface> source =
524 GetPcFactory()->CreateAudioSource(options).get(); 430 GetPcFactory()->CreateAudioSource(options).get();
525 return source; 431 return source;
526 } 432 }
527 433
528 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
529 const blink::WebMediaStreamTrack& track) {
530 blink::WebMediaStreamSource source = track.source();
531 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio);
532 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source);
533
534 if (!source_data) {
535 if (source.requiresAudioConsumer()) {
536 // We're adding a WebAudio MediaStream.
537 // Create a specific capturer for each WebAudio consumer.
538 CreateWebAudioSource(&source);
539 source_data = MediaStreamAudioSource::From(source);
540 DCHECK(source_data->webaudio_capturer());
541 } else {
542 NOTREACHED() << "Local track missing MediaStreamAudioSource instance.";
543 return;
544 }
545 }
546
547 // Creates an adapter to hold all the libjingle objects.
548 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
549 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
550 source_data->local_audio_source()));
551 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
552 track.isEnabled());
553
554 // TODO(xians): Merge |source| to the capturer(). We can't do this today
555 // because only one capturer() is supported while one |source| is created
556 // for each audio track.
557 std::unique_ptr<WebRtcLocalAudioTrack> audio_track(
558 new WebRtcLocalAudioTrack(adapter.get()));
559
560 // Start the source and connect the audio data flow to the track.
561 //
562 // TODO(miu): This logic will me moved to MediaStreamAudioSource (or a
563 // subclass of it) in soon-upcoming changes.
564 audio_track->Start(base::Bind(&MediaStreamAudioSource::StopAudioDeliveryTo,
565 source_data->GetWeakPtr(),
566 audio_track.get()));
567 if (source_data->webaudio_capturer())
568 source_data->webaudio_capturer()->Start(audio_track.get());
569 else if (source_data->audio_capturer())
570 source_data->audio_capturer()->AddTrack(audio_track.get());
571 else
572 NOTREACHED();
573
574 // Pass the ownership of the native local audio track to the blink track.
575 blink::WebMediaStreamTrack writable_track = track;
576 writable_track.setExtraData(audio_track.release());
577 }
578
579 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack(
580 const blink::WebMediaStreamTrack& track) {
581 blink::WebMediaStreamSource source = track.source();
582 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio);
583 DCHECK(source.remote());
584 DCHECK(MediaStreamAudioSource::From(source));
585
586 blink::WebMediaStreamTrack writable_track = track;
587 writable_track.setExtraData(
588 new MediaStreamRemoteAudioTrack(source, track.isEnabled()));
589 }
590
591 void PeerConnectionDependencyFactory::CreateWebAudioSource(
592 blink::WebMediaStreamSource* source) {
593 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
594
595 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
596 source_data->SetWebAudioCapturer(
597 base::WrapUnique(new WebAudioCapturerSource(source)));
598
599 // Create a LocalAudioSource object which holds audio options.
600 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
601 cricket::AudioOptions options;
602 source_data->SetLocalAudioSource(CreateLocalAudioSource(options).get());
603 source->setExtraData(source_data);
604 }
605
606 scoped_refptr<webrtc::VideoTrackInterface> 434 scoped_refptr<webrtc::VideoTrackInterface>
607 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 435 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
608 const std::string& id, 436 const std::string& id,
609 webrtc::VideoTrackSourceInterface* source) { 437 webrtc::VideoTrackSourceInterface* source) {
610 return GetPcFactory()->CreateVideoTrack(id, source).get(); 438 return GetPcFactory()->CreateVideoTrack(id, source).get();
611 } 439 }
612 440
613 scoped_refptr<webrtc::VideoTrackInterface> 441 scoped_refptr<webrtc::VideoTrackInterface>
614 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 442 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
615 const std::string& id, cricket::VideoCapturer* capturer) { 443 const std::string& id, cricket::VideoCapturer* capturer) {
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
734 // Stopping the thread will wait until all tasks have been 562 // Stopping the thread will wait until all tasks have been
735 // processed before returning. We wait for the above task to finish before 563 // processed before returning. We wait for the above task to finish before
736 // letting the the function continue to avoid any potential race issues. 564 // letting the the function continue to avoid any potential race issues.
737 chrome_worker_thread_.Stop(); 565 chrome_worker_thread_.Stop();
738 } else { 566 } else {
739 NOTREACHED() << "Worker thread not running."; 567 NOTREACHED() << "Worker thread not running.";
740 } 568 }
741 } 569 }
742 } 570 }
743 571
744 std::unique_ptr<WebRtcAudioCapturer>
745 PeerConnectionDependencyFactory::CreateAudioCapturer(
746 int render_frame_id,
747 const StreamDeviceInfo& device_info,
748 const blink::WebMediaConstraints& constraints,
749 MediaStreamAudioSource* audio_source) {
750 // TODO(xians): Handle the cases when gUM is called without a proper render
751 // view, for example, by an extension.
752 DCHECK_GE(render_frame_id, 0);
753
754 EnsureWebRtcAudioDeviceImpl();
755 DCHECK(GetWebRtcAudioDevice());
756 return WebRtcAudioCapturer::CreateCapturer(
757 render_frame_id, device_info, constraints, GetWebRtcAudioDevice(),
758 audio_source);
759 }
760
761 void PeerConnectionDependencyFactory::EnsureInitialized() { 572 void PeerConnectionDependencyFactory::EnsureInitialized() {
762 DCHECK(CalledOnValidThread()); 573 DCHECK(CalledOnValidThread());
763 GetPcFactory(); 574 GetPcFactory();
764 } 575 }
765 576
766 scoped_refptr<base::SingleThreadTaskRunner> 577 scoped_refptr<base::SingleThreadTaskRunner>
767 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { 578 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
768 DCHECK(CalledOnValidThread()); 579 DCHECK(CalledOnValidThread());
769 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() 580 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner()
770 : nullptr; 581 : nullptr;
771 } 582 }
772 583
773 scoped_refptr<base::SingleThreadTaskRunner> 584 scoped_refptr<base::SingleThreadTaskRunner>
774 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { 585 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const {
775 DCHECK(CalledOnValidThread()); 586 DCHECK(CalledOnValidThread());
776 return chrome_signaling_thread_.IsRunning() 587 return chrome_signaling_thread_.IsRunning()
777 ? chrome_signaling_thread_.task_runner() 588 ? chrome_signaling_thread_.task_runner()
778 : nullptr; 589 : nullptr;
779 } 590 }
780 591
781 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { 592 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
782 if (audio_device_.get()) 593 if (audio_device_.get())
783 return; 594 return;
784 595
785 audio_device_ = new WebRtcAudioDeviceImpl(); 596 audio_device_ = new WebRtcAudioDeviceImpl();
786 } 597 }
787 598
788 } // namespace content 599 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698