OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <utility> | 9 #include <utility> |
10 #include <vector> | 10 #include <vector> |
(...skipping 11 matching lines...) Expand all Loading... |
22 #include "build/build_config.h" | 22 #include "build/build_config.h" |
23 #include "content/common/media/media_stream_messages.h" | 23 #include "content/common/media/media_stream_messages.h" |
24 #include "content/public/common/content_client.h" | 24 #include "content/public/common/content_client.h" |
25 #include "content/public/common/content_switches.h" | 25 #include "content/public/common/content_switches.h" |
26 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" | 26 #include "content/public/common/feature_h264_with_openh264_ffmpeg.h" |
27 #include "content/public/common/features.h" | 27 #include "content/public/common/features.h" |
28 #include "content/public/common/renderer_preferences.h" | 28 #include "content/public/common/renderer_preferences.h" |
29 #include "content/public/common/webrtc_ip_handling_policy.h" | 29 #include "content/public/common/webrtc_ip_handling_policy.h" |
30 #include "content/public/renderer/content_renderer_client.h" | 30 #include "content/public/renderer/content_renderer_client.h" |
31 #include "content/renderer/media/media_stream.h" | 31 #include "content/renderer/media/media_stream.h" |
32 #include "content/renderer/media/media_stream_audio_processor.h" | |
33 #include "content/renderer/media/media_stream_audio_processor_options.h" | |
34 #include "content/renderer/media/media_stream_audio_source.h" | |
35 #include "content/renderer/media/media_stream_constraints_util.h" | |
36 #include "content/renderer/media/media_stream_video_source.h" | 32 #include "content/renderer/media/media_stream_video_source.h" |
37 #include "content/renderer/media/media_stream_video_track.h" | 33 #include "content/renderer/media/media_stream_video_track.h" |
38 #include "content/renderer/media/peer_connection_identity_store.h" | 34 #include "content/renderer/media/peer_connection_identity_store.h" |
39 #include "content/renderer/media/rtc_peer_connection_handler.h" | 35 #include "content/renderer/media/rtc_peer_connection_handler.h" |
40 #include "content/renderer/media/rtc_video_decoder_factory.h" | 36 #include "content/renderer/media/rtc_video_decoder_factory.h" |
41 #include "content/renderer/media/rtc_video_encoder_factory.h" | 37 #include "content/renderer/media/rtc_video_encoder_factory.h" |
42 #include "content/renderer/media/webaudio_capturer_source.h" | |
43 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" | |
44 #include "content/renderer/media/webrtc/stun_field_trial.h" | 38 #include "content/renderer/media/webrtc/stun_field_trial.h" |
45 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" | |
46 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 39 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
47 #include "content/renderer/media/webrtc_audio_device_impl.h" | 40 #include "content/renderer/media/webrtc_audio_device_impl.h" |
48 #include "content/renderer/media/webrtc_local_audio_track.h" | |
49 #include "content/renderer/media/webrtc_logging.h" | 41 #include "content/renderer/media/webrtc_logging.h" |
50 #include "content/renderer/media/webrtc_uma_histograms.h" | 42 #include "content/renderer/media/webrtc_uma_histograms.h" |
51 #include "content/renderer/p2p/empty_network_manager.h" | 43 #include "content/renderer/p2p/empty_network_manager.h" |
52 #include "content/renderer/p2p/filtering_network_manager.h" | 44 #include "content/renderer/p2p/filtering_network_manager.h" |
53 #include "content/renderer/p2p/ipc_network_manager.h" | 45 #include "content/renderer/p2p/ipc_network_manager.h" |
54 #include "content/renderer/p2p/ipc_socket_factory.h" | 46 #include "content/renderer/p2p/ipc_socket_factory.h" |
55 #include "content/renderer/p2p/port_allocator.h" | 47 #include "content/renderer/p2p/port_allocator.h" |
56 #include "content/renderer/render_frame_impl.h" | 48 #include "content/renderer/render_frame_impl.h" |
57 #include "content/renderer/render_thread_impl.h" | 49 #include "content/renderer/render_thread_impl.h" |
58 #include "content/renderer/render_view_impl.h" | 50 #include "content/renderer/render_view_impl.h" |
59 #include "crypto/openssl_util.h" | 51 #include "crypto/openssl_util.h" |
60 #include "jingle/glue/thread_wrapper.h" | 52 #include "jingle/glue/thread_wrapper.h" |
61 #include "media/base/media_permission.h" | 53 #include "media/base/media_permission.h" |
62 #include "media/filters/ffmpeg_glue.h" | 54 #include "media/filters/ffmpeg_glue.h" |
63 #include "media/renderers/gpu_video_accelerator_factories.h" | 55 #include "media/renderers/gpu_video_accelerator_factories.h" |
64 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | 56 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" |
65 #include "third_party/WebKit/public/platform/WebMediaStream.h" | 57 #include "third_party/WebKit/public/platform/WebMediaStream.h" |
66 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" | 58 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" |
67 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | 59 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
68 #include "third_party/WebKit/public/platform/WebURL.h" | 60 #include "third_party/WebKit/public/platform/WebURL.h" |
69 #include "third_party/WebKit/public/web/WebDocument.h" | 61 #include "third_party/WebKit/public/web/WebDocument.h" |
70 #include "third_party/WebKit/public/web/WebFrame.h" | 62 #include "third_party/WebKit/public/web/WebFrame.h" |
71 #include "third_party/webrtc/api/mediaconstraintsinterface.h" | 63 #include "third_party/webrtc/api/mediaconstraintsinterface.h" |
72 #include "third_party/webrtc/base/ssladapter.h" | 64 #include "third_party/webrtc/base/ssladapter.h" |
73 #include "third_party/webrtc/media/base/mediachannel.h" | |
74 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" | 65 #include "third_party/webrtc/modules/video_coding/codecs/h264/include/h264.h" |
75 | 66 |
76 #if defined(OS_ANDROID) | 67 #if defined(OS_ANDROID) |
77 #include "media/base/android/media_codec_util.h" | 68 #include "media/base/android/media_codec_util.h" |
78 #endif | 69 #endif |
79 | 70 |
80 namespace content { | 71 namespace content { |
81 | 72 |
82 namespace { | 73 namespace { |
83 | 74 |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
121 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( | 112 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler( |
122 blink::WebRTCPeerConnectionHandlerClient* client) { | 113 blink::WebRTCPeerConnectionHandlerClient* client) { |
123 // Save histogram data so we can see how much PeerConnetion is used. | 114 // Save histogram data so we can see how much PeerConnetion is used. |
124 // The histogram counts the number of calls to the JS API | 115 // The histogram counts the number of calls to the JS API |
125 // webKitRTCPeerConnection. | 116 // webKitRTCPeerConnection. |
126 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 117 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
127 | 118 |
128 return new RTCPeerConnectionHandler(client, this); | 119 return new RTCPeerConnectionHandler(client, this); |
129 } | 120 } |
130 | 121 |
131 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource( | |
132 int render_frame_id, | |
133 const blink::WebMediaConstraints& audio_constraints, | |
134 MediaStreamAudioSource* source_data) { | |
135 DVLOG(1) << "InitializeMediaStreamAudioSources()"; | |
136 | |
137 // Do additional source initialization if the audio source is a valid | |
138 // microphone or tab audio. | |
139 | |
140 StreamDeviceInfo device_info = source_data->device_info(); | |
141 | |
142 cricket::AudioOptions options; | |
143 // Apply relevant constraints. | |
144 options.echo_cancellation = ConstraintToOptional( | |
145 audio_constraints, &blink::WebMediaTrackConstraintSet::echoCancellation); | |
146 options.delay_agnostic_aec = ConstraintToOptional( | |
147 audio_constraints, | |
148 &blink::WebMediaTrackConstraintSet::googDAEchoCancellation); | |
149 options.auto_gain_control = ConstraintToOptional( | |
150 audio_constraints, | |
151 &blink::WebMediaTrackConstraintSet::googAutoGainControl); | |
152 options.experimental_agc = ConstraintToOptional( | |
153 audio_constraints, | |
154 &blink::WebMediaTrackConstraintSet::googExperimentalAutoGainControl); | |
155 options.noise_suppression = ConstraintToOptional( | |
156 audio_constraints, | |
157 &blink::WebMediaTrackConstraintSet::googNoiseSuppression); | |
158 options.experimental_ns = ConstraintToOptional( | |
159 audio_constraints, | |
160 &blink::WebMediaTrackConstraintSet::googExperimentalNoiseSuppression); | |
161 options.highpass_filter = ConstraintToOptional( | |
162 audio_constraints, | |
163 &blink::WebMediaTrackConstraintSet::googHighpassFilter); | |
164 options.typing_detection = ConstraintToOptional( | |
165 audio_constraints, | |
166 &blink::WebMediaTrackConstraintSet::googTypingNoiseDetection); | |
167 options.stereo_swapping = ConstraintToOptional( | |
168 audio_constraints, | |
169 &blink::WebMediaTrackConstraintSet::googAudioMirroring); | |
170 | |
171 MediaAudioConstraints::ApplyFixedAudioConstraints(&options); | |
172 | |
173 if (device_info.device.input.effects & | |
174 media::AudioParameters::ECHO_CANCELLER) { | |
175 // TODO(hta): Figure out if we should be looking at echoCancellation. | |
176 // Previous code had googEchoCancellation only. | |
177 const blink::BooleanConstraint& echoCancellation = | |
178 audio_constraints.basic().googEchoCancellation; | |
179 if (echoCancellation.hasExact() && !echoCancellation.exact()) { | |
180 device_info.device.input.effects &= | |
181 ~media::AudioParameters::ECHO_CANCELLER; | |
182 } | |
183 options.echo_cancellation = rtc::Optional<bool>(false); | |
184 } | |
185 | |
186 scoped_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer( | |
187 render_frame_id, device_info, audio_constraints, source_data); | |
188 if (!capturer.get()) { | |
189 const std::string log_string = | |
190 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; | |
191 WebRtcLogMessage(log_string); | |
192 DVLOG(1) << log_string; | |
193 // TODO(xians): Don't we need to check if source_observer is observing | |
194 // something? If not, then it looks like we have a leak here. | |
195 // OTOH, if it _is_ observing something, then the callback might | |
196 // be called multiple times which is likely also a bug. | |
197 return false; | |
198 } | |
199 source_data->SetAudioCapturer(std::move(capturer)); | |
200 | |
201 // Creates a LocalAudioSource object which holds audio options. | |
202 // TODO(xians): The option should apply to the track instead of the source. | |
203 // TODO(perkj): Move audio constraints parsing to Chrome. | |
204 // Currently there are a few constraints that are parsed by libjingle and | |
205 // the state is set to ended if parsing fails. | |
206 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( | |
207 CreateLocalAudioSource(options).get()); | |
208 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { | |
209 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; | |
210 return false; | |
211 } | |
212 source_data->SetLocalAudioSource(rtc_source.get()); | |
213 return true; | |
214 } | |
215 | |
216 WebRtcVideoCapturerAdapter* | 122 WebRtcVideoCapturerAdapter* |
217 PeerConnectionDependencyFactory::CreateVideoCapturer( | 123 PeerConnectionDependencyFactory::CreateVideoCapturer( |
218 bool is_screeencast) { | 124 bool is_screeencast) { |
219 // We need to make sure the libjingle thread wrappers have been created | 125 // We need to make sure the libjingle thread wrappers have been created |
220 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is | 126 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is |
221 // since the base class of WebRtcVideoCapturerAdapter is a | 127 // since the base class of WebRtcVideoCapturerAdapter is a |
222 // cricket::VideoCapturer and it uses the libjingle thread wrappers. | 128 // cricket::VideoCapturer and it uses the libjingle thread wrappers. |
223 if (!GetPcFactory().get()) | 129 if (!GetPcFactory().get()) |
224 return NULL; | 130 return NULL; |
225 return new WebRtcVideoCapturerAdapter(is_screeencast); | 131 return new WebRtcVideoCapturerAdapter(is_screeencast); |
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
515 } | 421 } |
516 | 422 |
517 scoped_refptr<webrtc::AudioSourceInterface> | 423 scoped_refptr<webrtc::AudioSourceInterface> |
518 PeerConnectionDependencyFactory::CreateLocalAudioSource( | 424 PeerConnectionDependencyFactory::CreateLocalAudioSource( |
519 const cricket::AudioOptions& options) { | 425 const cricket::AudioOptions& options) { |
520 scoped_refptr<webrtc::AudioSourceInterface> source = | 426 scoped_refptr<webrtc::AudioSourceInterface> source = |
521 GetPcFactory()->CreateAudioSource(options).get(); | 427 GetPcFactory()->CreateAudioSource(options).get(); |
522 return source; | 428 return source; |
523 } | 429 } |
524 | 430 |
525 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( | |
526 const blink::WebMediaStreamTrack& track) { | |
527 blink::WebMediaStreamSource source = track.source(); | |
528 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); | |
529 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source); | |
530 | |
531 if (!source_data) { | |
532 if (source.requiresAudioConsumer()) { | |
533 // We're adding a WebAudio MediaStream. | |
534 // Create a specific capturer for each WebAudio consumer. | |
535 CreateWebAudioSource(&source); | |
536 source_data = MediaStreamAudioSource::From(source); | |
537 DCHECK(source_data->webaudio_capturer()); | |
538 } else { | |
539 NOTREACHED() << "Local track missing MediaStreamAudioSource instance."; | |
540 return; | |
541 } | |
542 } | |
543 | |
544 // Creates an adapter to hold all the libjingle objects. | |
545 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( | |
546 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), | |
547 source_data->local_audio_source())); | |
548 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( | |
549 track.isEnabled()); | |
550 | |
551 // TODO(xians): Merge |source| to the capturer(). We can't do this today | |
552 // because only one capturer() is supported while one |source| is created | |
553 // for each audio track. | |
554 scoped_ptr<WebRtcLocalAudioTrack> audio_track( | |
555 new WebRtcLocalAudioTrack(adapter.get())); | |
556 | |
557 // Start the source and connect the audio data flow to the track. | |
558 // | |
559 // TODO(miu): This logic will me moved to MediaStreamAudioSource (or a | |
560 // subclass of it) in soon-upcoming changes. | |
561 audio_track->Start(base::Bind(&MediaStreamAudioSource::StopAudioDeliveryTo, | |
562 source_data->GetWeakPtr(), | |
563 audio_track.get())); | |
564 if (source_data->webaudio_capturer()) | |
565 source_data->webaudio_capturer()->Start(audio_track.get()); | |
566 else if (source_data->audio_capturer()) | |
567 source_data->audio_capturer()->AddTrack(audio_track.get()); | |
568 else | |
569 NOTREACHED(); | |
570 | |
571 // Pass the ownership of the native local audio track to the blink track. | |
572 blink::WebMediaStreamTrack writable_track = track; | |
573 writable_track.setExtraData(audio_track.release()); | |
574 } | |
575 | |
576 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( | |
577 const blink::WebMediaStreamTrack& track) { | |
578 blink::WebMediaStreamSource source = track.source(); | |
579 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); | |
580 DCHECK(source.remote()); | |
581 DCHECK(MediaStreamAudioSource::From(source)); | |
582 | |
583 blink::WebMediaStreamTrack writable_track = track; | |
584 writable_track.setExtraData( | |
585 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); | |
586 } | |
587 | |
588 void PeerConnectionDependencyFactory::CreateWebAudioSource( | |
589 blink::WebMediaStreamSource* source) { | |
590 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; | |
591 | |
592 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); | |
593 source_data->SetWebAudioCapturer( | |
594 make_scoped_ptr(new WebAudioCapturerSource(source))); | |
595 | |
596 // Create a LocalAudioSource object which holds audio options. | |
597 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | |
598 cricket::AudioOptions options; | |
599 source_data->SetLocalAudioSource(CreateLocalAudioSource(options).get()); | |
600 source->setExtraData(source_data); | |
601 } | |
602 | |
603 scoped_refptr<webrtc::VideoTrackInterface> | 431 scoped_refptr<webrtc::VideoTrackInterface> |
604 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 432 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
605 const std::string& id, | 433 const std::string& id, |
606 webrtc::VideoTrackSourceInterface* source) { | 434 webrtc::VideoTrackSourceInterface* source) { |
607 return GetPcFactory()->CreateVideoTrack(id, source).get(); | 435 return GetPcFactory()->CreateVideoTrack(id, source).get(); |
608 } | 436 } |
609 | 437 |
610 scoped_refptr<webrtc::VideoTrackInterface> | 438 scoped_refptr<webrtc::VideoTrackInterface> |
611 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 439 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
612 const std::string& id, cricket::VideoCapturer* capturer) { | 440 const std::string& id, cricket::VideoCapturer* capturer) { |
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
731 // Stopping the thread will wait until all tasks have been | 559 // Stopping the thread will wait until all tasks have been |
732 // processed before returning. We wait for the above task to finish before | 560 // processed before returning. We wait for the above task to finish before |
733 // letting the the function continue to avoid any potential race issues. | 561 // letting the the function continue to avoid any potential race issues. |
734 chrome_worker_thread_.Stop(); | 562 chrome_worker_thread_.Stop(); |
735 } else { | 563 } else { |
736 NOTREACHED() << "Worker thread not running."; | 564 NOTREACHED() << "Worker thread not running."; |
737 } | 565 } |
738 } | 566 } |
739 } | 567 } |
740 | 568 |
741 scoped_ptr<WebRtcAudioCapturer> | |
742 PeerConnectionDependencyFactory::CreateAudioCapturer( | |
743 int render_frame_id, | |
744 const StreamDeviceInfo& device_info, | |
745 const blink::WebMediaConstraints& constraints, | |
746 MediaStreamAudioSource* audio_source) { | |
747 // TODO(xians): Handle the cases when gUM is called without a proper render | |
748 // view, for example, by an extension. | |
749 DCHECK_GE(render_frame_id, 0); | |
750 | |
751 EnsureWebRtcAudioDeviceImpl(); | |
752 DCHECK(GetWebRtcAudioDevice()); | |
753 return WebRtcAudioCapturer::CreateCapturer( | |
754 render_frame_id, device_info, constraints, GetWebRtcAudioDevice(), | |
755 audio_source); | |
756 } | |
757 | |
758 void PeerConnectionDependencyFactory::EnsureInitialized() { | 569 void PeerConnectionDependencyFactory::EnsureInitialized() { |
759 DCHECK(CalledOnValidThread()); | 570 DCHECK(CalledOnValidThread()); |
760 GetPcFactory(); | 571 GetPcFactory(); |
761 } | 572 } |
762 | 573 |
763 scoped_refptr<base::SingleThreadTaskRunner> | 574 scoped_refptr<base::SingleThreadTaskRunner> |
764 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { | 575 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const { |
765 DCHECK(CalledOnValidThread()); | 576 DCHECK(CalledOnValidThread()); |
766 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() | 577 return chrome_worker_thread_.IsRunning() ? chrome_worker_thread_.task_runner() |
767 : nullptr; | 578 : nullptr; |
768 } | 579 } |
769 | 580 |
770 scoped_refptr<base::SingleThreadTaskRunner> | 581 scoped_refptr<base::SingleThreadTaskRunner> |
771 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { | 582 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const { |
772 DCHECK(CalledOnValidThread()); | 583 DCHECK(CalledOnValidThread()); |
773 return chrome_signaling_thread_.IsRunning() | 584 return chrome_signaling_thread_.IsRunning() |
774 ? chrome_signaling_thread_.task_runner() | 585 ? chrome_signaling_thread_.task_runner() |
775 : nullptr; | 586 : nullptr; |
776 } | 587 } |
777 | 588 |
778 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { | 589 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { |
779 if (audio_device_.get()) | 590 if (audio_device_.get()) |
780 return; | 591 return; |
781 | 592 |
782 audio_device_ = new WebRtcAudioDeviceImpl(); | 593 audio_device_ = new WebRtcAudioDeviceImpl(); |
783 } | 594 } |
784 | 595 |
785 } // namespace content | 596 } // namespace content |
OLD | NEW |