OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <utility> | 9 #include <utility> |
10 #include <vector> | 10 #include <vector> |
(...skipping 11 matching lines...) Expand all Loading... |
22 #include "build/build_config.h" | 22 #include "build/build_config.h" |
23 #include "content/common/media/media_stream_messages.h" | 23 #include "content/common/media/media_stream_messages.h" |
24 #include "content/public/common/content_client.h" | 24 #include "content/public/common/content_client.h" |
25 #include "content/public/common/content_switches.h" | 25 #include "content/public/common/content_switches.h" |
26 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" | 26 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" |
27 #include "content/public/common/features.h" | 27 #include "content/public/common/features.h" |
28 #include "content/public/common/renderer_preferences.h" | 28 #include "content/public/common/renderer_preferences.h" |
29 #include "content/public/common/webrtc_ip_handling_policy.h" | 29 #include "content/public/common/webrtc_ip_handling_policy.h" |
30 #include "content/public/renderer/content_renderer_client.h" | 30 #include "content/public/renderer/content_renderer_client.h" |
31 #include "content/renderer/media/media_stream.h" | 31 #include "content/renderer/media/media_stream.h" |
32 #include "content/renderer/media/media_stream_audio_processor.h" | |
33 #include "content/renderer/media/media_stream_audio_processor_options.h" | |
34 #include "content/renderer/media/media_stream_audio_source.h" | |
35 #include "content/renderer/media/media_stream_constraints_util.h" | |
36 #include "content/renderer/media/media_stream_video_source.h" | 32 #include "content/renderer/media/media_stream_video_source.h" |
37 #include "content/renderer/media/media_stream_video_track.h" | 33 #include "content/renderer/media/media_stream_video_track.h" |
38 #include "content/renderer/media/peer_connection_identity_store.h" | 34 #include "content/renderer/media/peer_connection_identity_store.h" |
39 #include "content/renderer/media/rtc_peer_connection_handler.h" | 35 #include "content/renderer/media/rtc_peer_connection_handler.h" |
40 #include "content/renderer/media/rtc_video_decoder_factory.h" | 36 #include "content/renderer/media/rtc_video_decoder_factory.h" |
41 #include "content/renderer/media/rtc_video_encoder_factory.h" | 37 #include "content/renderer/media/rtc_video_encoder_factory.h" |
42 #include "content/renderer/media/webaudio_capturer_source.h" | |
43 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" | |
44 #include "content/renderer/media/webrtc/stun_field_trial.h" | 38 #include "content/renderer/media/webrtc/stun_field_trial.h" |
45 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" | |
46 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 39 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
47 #include "content/renderer/media/webrtc_audio_device_impl.h" | 40 #include "content/renderer/media/webrtc_audio_device_impl.h" |
48 #include "content/renderer/media/webrtc_local_audio_track.h" | |
49 #include "content/renderer/media/webrtc_logging.h" | 41 #include "content/renderer/media/webrtc_logging.h" |
50 #include "content/renderer/media/webrtc_uma_histograms.h" | 42 #include "content/renderer/media/webrtc_uma_histograms.h" |
51 #include "content/renderer/p2p/empty_network_manager.h" | 43 #include "content/renderer/p2p/empty_network_manager.h" |
52 #include "content/renderer/p2p/filtering_network_manager.h" | 44 #include "content/renderer/p2p/filtering_network_manager.h" |
53 #include "content/renderer/p2p/ipc_network_manager.h" | 45 #include "content/renderer/p2p/ipc_network_manager.h" |
54 #include "content/renderer/p2p/ipc_socket_factory.h" | 46 #include "content/renderer/p2p/ipc_socket_factory.h" |
55 #include "content/renderer/p2p/port_allocator.h" | 47 #include "content/renderer/p2p/port_allocator.h" |
56 #include "content/renderer/render_frame_impl.h" | 48 #include "content/renderer/render_frame_impl.h" |
57 #include "content/renderer/render_thread_impl.h" | 49 #include "content/renderer/render_thread_impl.h" |
58 #include "content/renderer/render_view_impl.h" | 50 #include "content/renderer/render_view_impl.h" |
59 #include "crypto/openssl_util.h" | 51 #include "crypto/openssl_util.h" |
60 #include "jingle/glue/thread_wrapper.h" | 52 #include "jingle/glue/thread_wrapper.h" |
61 #include "media/base/media_permission.h" | 53 #include "media/base/media_permission.h" |
62 #include "media/filters/ffmpeg_glue.h" | 54 #include "media/filters/ffmpeg_glue.h" |
63 #include "media/renderers/gpu_video_accelerator_factories.h" | 55 #include "media/renderers/gpu_video_accelerator_factories.h" |
64 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | 56 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" |
65 #include "third_party/WebKit/public/platform/WebMediaStream.h" | 57 #include "third_party/WebKit/public/platform/WebMediaStream.h" |
66 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" | 58 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" |
67 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | 59 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
68 #include "third_party/WebKit/public/platform/WebURL.h" | 60 #include "third_party/WebKit/public/platform/WebURL.h" |
69 #include "third_party/WebKit/public/web/WebDocument.h" | 61 #include "third_party/WebKit/public/web/WebDocument.h" |
70 #include "third_party/WebKit/public/web/WebFrame.h" | 62 #include "third_party/WebKit/public/web/WebFrame.h" |
71 #include "third_party/webrtc/api/mediaconstraintsinterface.h" | 63 #include "third_party/webrtc/api/mediaconstraintsinterface.h" |
72 #include "third_party/webrtc/base/ssladapter.h" | 64 #include "third_party/webrtc/base/ssladapter.h" |
73 #include "third_party/webrtc/media/base/mediachannel.h" | |
74 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" | 65 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" |
75 | 66 |
76 #if defined(OS_ANDROID) | 67 #if defined(OS_ANDROID) |
77 #include "media/base/android/media_codec_util.h" | 68 #include "media/base/android/media_codec_util.h" |
78 #endif | 69 #endif |
79 | 70 |
80 namespace content { | 71 namespace content { |
81 | 72 |
82 namespace { | 73 namespace { |
83 | 74 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
121 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( | 112 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( |
122 blink::WebRTCPeerConnectionHandlerClient* client) { | 113 blink::WebRTCPeerConnectionHandlerClient* client) { |
123 // Save histogram data so we can see how much PeerConnetion is used. | 114 // Save histogram data so we can see how much PeerConnetion is used. |
124 // The histogram counts the number of calls to the JS API | 115 // The histogram counts the number of calls to the JS API |
125 // webKitRTCPeerConnection. | 116 // webKitRTCPeerConnection. |
126 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 117 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
127 | 118 |
128 return new RTCPeerConnectionHandler(client, this); | 119 return new RTCPeerConnectionHandler(client, this); |
129 } | 120 } |
130 | 121 |
131 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource( | |
132 int render_frame_id, | |
133 const blink::WebMediaConstraints& audio_constraints, | |
134 MediaStreamAudioSource* source_data) { | |
135 DVLOG(1) << "InitializeMediaStreamAudioSources()"; | |
136 | |
137 // Do additional source initialization if the audio source is a valid | |
138 // microphone or tab audio. | |
139 | |
140 StreamDeviceInfo device_info = source_data->device_info(); | |
141 | |
142 cricket::AudioOptions options; | |
143 // Apply relevant constraints. | |
144 options.echo_cancellation = ConstraintToOptional( | |
145 audio_constraints, &blink::WebMediaTrackConstraintSet::echoCancellation); | |
146 options.delay_agnostic_aec = ConstraintToOptional( | |
147 audio_constraints, | |
148 &blink::WebMediaTrackConstraintSet::googDAEchoCancellation); | |
149 options.auto_gain_control = ConstraintToOptional( | |
150 audio_constraints, | |
151 &blink::WebMediaTrackConstraintSet::googAutoGainControl); | |
152 options.experimental_agc = ConstraintToOptional( | |
153 audio_constraints, | |
154 &blink::WebMediaTrackConstraintSet::googExperimentalAutoGainControl); | |
155 options.noise_suppression = ConstraintToOptional( | |
156 audio_constraints, | |
157 &blink::WebMediaTrackConstraintSet::googNoiseSuppression); | |
158 options.experimental_ns = ConstraintToOptional( | |
159 audio_constraints, | |
160 &blink::WebMediaTrackConstraintSet::googExperimentalNoiseSuppression); | |
161 options.highpass_filter = ConstraintToOptional( | |
162 audio_constraints, | |
163 &blink::WebMediaTrackConstraintSet::googHighpassFilter); | |
164 options.typing_detection = ConstraintToOptional( | |
165 audio_constraints, | |
166 &blink::WebMediaTrackConstraintSet::googTypingNoiseDetection); | |
167 options.stereo_swapping = ConstraintToOptional( | |
168 audio_constraints, | |
169 &blink::WebMediaTrackConstraintSet::googAudioMirroring); | |
170 | |
171 MediaAudioConstraints::ApplyFixedAudioConstraints(&options); | |
172 | |
173 if (device_info.device.input.effects & | |
174 media::AudioParameters::ECHO_CANCELLER) { | |
175 // TODO(hta): Figure out if we should be looking at echoCancellation. | |
176 // Previous code had googEchoCancellation only. | |
177 const blink::BooleanConstraint& echoCancellation = | |
178 audio_constraints.basic().googEchoCancellation; | |
179 if (echoCancellation.hasExact() && !echoCancellation.exact()) { | |
180 device_info.device.input.effects &= | |
181 ~media::AudioParameters::ECHO_CANCELLER; | |
182 } | |
183 options.echo_cancellation = rtc::Optional<bool>(false); | |
184 } | |
185 | |
186 scoped_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer( | |
187 render_frame_id, device_info, audio_constraints, source_data); | |
188 if (!capturer.get()) { | |
189 const std::string log_string = | |
190 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; | |
191 WebRtcLogMessage(log_string); | |
192 DVLOG(1) << log_string; | |
193 // TODO(xians): Don't we need to check if source_observer is observing | |
194 // something? If not, then it looks like we have a leak here. | |
195 // OTOH, if it _is_ observing something, then the callback might | |
196 // be called multiple times which is likely also a bug. | |
197 return false; | |
198 } | |
199 source_data->SetAudioCapturer(std::move(capturer)); | |
200 | |
201 // Creates a LocalAudioSource object which holds audio options. | |
202 // TODO(xians): The option should apply to the track instead of the source. | |
203 // TODO(perkj): Move audio constraints parsing to Chrome. | |
204 // Currently there are a few constraints that are parsed by libjingle and | |
205 // the state is set to ended if parsing fails. | |
206 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( | |
207 CreateLocalAudioSource(options).get()); | |
208 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { | |
209 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; | |
210 return false; | |
211 } | |
212 source_data->SetLocalAudioSource(rtc_source.get()); | |
213 return true; | |
214 } | |
215 | |
216 WebRtcVideoCapturerAdapter* | 122 WebRtcVideoCapturerAdapter* |
217 PeerConnectionDependencyFactory::CreateVideoCapturer( | 123 PeerConnectionDependencyFactory::CreateVideoCapturer( |
218 bool is_screeencast) { | 124 bool is_screeencast) { |
219 // We need to make sure the libjingle thread wrappers have been created | 125 // We need to make sure the libjingle thread wrappers have been created |
220 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is | 126 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is |
221 // since the base class of WebRtcVideoCapturerAdapter is a | 127 // since the base class of WebRtcVideoCapturerAdapter is a |
222 // cricket::VideoCapturer and it uses the libjingle thread wrappers. | 128 // cricket::VideoCapturer and it uses the libjingle thread wrappers. |
223 if (!GetPcFactory().get()) | 129 if (!GetPcFactory().get()) |
224 return NULL; | 130 return NULL; |
225 return new WebRtcVideoCapturerAdapter(is_screeencast); | 131 return new WebRtcVideoCapturerAdapter(is_screeencast); |
(...skipping 283 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
509 } | 415 } |
510 | 416 |
511 scoped_refptr<webrtc::AudioSourceInterface> | 417 scoped_refptr<webrtc::AudioSourceInterface> |
512 PeerConnectionDependencyFactory::CreateLocalAudioSource( | 418 PeerConnectionDependencyFactory::CreateLocalAudioSource( |
513 const cricket::AudioOptions& options) { | 419 const cricket::AudioOptions& options) { |
514 scoped_refptr<webrtc::AudioSourceInterface> source = | 420 scoped_refptr<webrtc::AudioSourceInterface> source = |
515 GetPcFactory()->CreateAudioSource(options).get(); | 421 GetPcFactory()->CreateAudioSource(options).get(); |
516 return source; | 422 return source; |
517 } | 423 } |
518 | 424 |
519 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( | |
520 const blink::WebMediaStreamTrack& track) { | |
521 blink::WebMediaStreamSource source = track.source(); | |
522 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); | |
523 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source); | |
524 | |
525 if (!source_data) { | |
526 if (source.requiresAudioConsumer()) { | |
527 // We're adding a WebAudio MediaStream. | |
528 // Create a specific capturer for each WebAudio consumer. | |
529 CreateWebAudioSource(&source); | |
530 source_data = MediaStreamAudioSource::From(source); | |
531 DCHECK(source_data->webaudio_capturer()); | |
532 } else { | |
533 NOTREACHED() << "Local track missing MediaStreamAudioSource instance."; | |
534 return; | |
535 } | |
536 } | |
537 | |
538 // Creates an adapter to hold all the libjingle objects. | |
539 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( | |
540 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), | |
541 source_data->local_audio_source())); | |
542 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( | |
543 track.isEnabled()); | |
544 | |
545 // TODO(xians): Merge |source| to the capturer(). We can't do this today | |
546 // because only one capturer() is supported while one |source| is created | |
547 // for each audio track. | |
548 scoped_ptr<WebRtcLocalAudioTrack> audio_track( | |
549 new WebRtcLocalAudioTrack(adapter.get())); | |
550 | |
551 // Start the source and connect the audio data flow to the track. | |
552 // | |
553 // TODO(miu): This logic will me moved to MediaStreamAudioSource (or a | |
554 // subclass of it) in soon-upcoming changes. | |
555 audio_track->Start(base::Bind(&MediaStreamAudioSource::StopAudioDeliveryTo, | |
556 source_data->GetWeakPtr(), | |
557 audio_track.get())); | |
558 if (source_data->webaudio_capturer()) | |
559 source_data->webaudio_capturer()->Start(audio_track.get()); | |
560 else if (source_data->audio_capturer()) | |
561 source_data->audio_capturer()->AddTrack(audio_track.get()); | |
562 else | |
563 NOTREACHED(); | |
564 | |
565 // Pass the ownership of the native local audio track to the blink track. | |
566 blink::WebMediaStreamTrack writable_track = track; | |
567 writable_track.setExtraData(audio_track.release()); | |
568 } | |
569 | |
570 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( | |
571 const blink::WebMediaStreamTrack& track) { | |
572 blink::WebMediaStreamSource source = track.source(); | |
573 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); | |
574 DCHECK(source.remote()); | |
575 DCHECK(MediaStreamAudioSource::From(source)); | |
576 | |
577 blink::WebMediaStreamTrack writable_track = track; | |
578 writable_track.setExtraData( | |
579 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); | |
580 } | |
581 | |
582 void PeerConnectionDependencyFactory::CreateWebAudioSource( | |
583 blink::WebMediaStreamSource* source) { | |
584 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; | |
585 | |
586 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); | |
587 source_data->SetWebAudioCapturer( | |
588 make_scoped_ptr(new WebAudioCapturerSource(source))); | |
589 | |
590 // Create a LocalAudioSource object which holds audio options. | |
591 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | |
592 cricket::AudioOptions options; | |
593 source_data->SetLocalAudioSource(CreateLocalAudioSource(options).get()); | |
594 source->setExtraData(source_data); | |
595 } | |
596 | |
597 scoped_refptr<webrtc::VideoTrackInterface> | 425 scoped_refptr<webrtc::VideoTrackInterface> |
598 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 426 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
599 const std::string& id, | 427 const std::string& id, |
600 webrtc::VideoTrackSourceInterface* source) { | 428 webrtc::VideoTrackSourceInterface* source) { |
601 return GetPcFactory()->CreateVideoTrack(id, source).get(); | 429 return GetPcFactory()->CreateVideoTrack(id, source).get(); |
602 } | 430 } |
603 | 431 |
604 scoped_refptr<webrtc::VideoTrackInterface> | 432 scoped_refptr<webrtc::VideoTrackInterface> |
605 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 433 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
606 const std::string& id, cricket::VideoCapturer* capturer) { | 434 const std::string& id, cricket::VideoCapturer* capturer) { |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
725 // Stopping the thread will wait until all tasks have been | 553 // Stopping the thread will wait until all tasks have been |
726 // processed before returning. We wait for the above task to finish before | 554 // processed before returning. We wait for the above task to finish before |
727 // letting the the function continue to avoid any potential race issues. | 555 // letting the the function continue to avoid any potential race issues. |
728 chrome_worker_thread_.Stop(); | 556 chrome_worker_thread_.Stop(); |
729 } else { | 557 } else { |
730 NOTREACHED() << "Worker thread not running."; | 558 NOTREACHED() << "Worker thread not running."; |
731 } | 559 } |
732 } | 560 } |
733 } | 561 } |
734 | 562 |
735 scoped_ptr<WebRtcAudioCapturer> | |
736 PeerConnectionDependencyFactory::CreateAudioCapturer( | |
737 int render_frame_id, | |
738 const StreamDeviceInfo& device_info, | |
739 const blink::WebMediaConstraints& constraints, | |
740 MediaStreamAudioSource* audio_source) { | |
741 // TODO(xians): Handle the cases when gUM is called without a proper render | |
742 // view, for example, by an extension. | |
743 DCHECK_GE(render_frame_id, 0); | |
744 | |
745 EnsureWebRtcAudioDeviceImpl(); | |
746 DCHECK(GetWebRtcAudioDevice()); | |
747 return WebRtcAudioCapturer::CreateCapturer( | |
748 render_frame_id, device_info, constraints, GetWebRtcAudioDevice(), | |
749 audio_source); | |
750 } | |
751 | |
752 void PeerConnectionDependencyFactory::EnsureInitialized() { | 563 void PeerConnectionDependencyFactory::EnsureInitialized() { |
753 DCHECK(CalledOnValidThread()); | 564 DCHECK(CalledOnValidThread()); |
754 GetPcFactory(); | 565 GetPcFactory(); |
755 } | 566 } |
756 | 567 |
757 scoped_refptr<base::SingleThreadTaskRunner> | 568 scoped_refptr<base::SingleThreadTaskRunner> |
758 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { | 569 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { |
759 DCHECK(CalledOnValidThread()); | 570 DCHECK(CalledOnValidThread()); |
760 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() | 571 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() |
761 : nullptr; | 572 : nullptr; |
762 } | 573 } |
763 | 574 |
764 scoped_refptr<base::SingleThreadTaskRunner> | 575 scoped_refptr<base::SingleThreadTaskRunner> |
765 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { | 576 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { |
766 DCHECK(CalledOnValidThread()); | 577 DCHECK(CalledOnValidThread()); |
767 return chrome_signaling_thread_.IsRunning() | 578 return chrome_signaling_thread_.IsRunning() |
768 ? chrome_signaling_thread_.task_runner() | 579 ? chrome_signaling_thread_.task_runner() |
769 : nullptr; | 580 : nullptr; |
770 } | 581 } |
771 | 582 |
772 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { | 583 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { |
773 if (audio_device_.get()) | 584 if (audio_device_.get()) |
774 return; | 585 return; |
775 | 586 |
776 audio_device_ = new WebRtcAudioDeviceImpl(); | 587 audio_device_ = new WebRtcAudioDeviceImpl(); |
777 } | 588 } |
778 | 589 |
779 } // namespace content | 590 } // namespace content |
OLD | NEW |