OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/media_stream_dependency_factory.h" | |
6 | |
7 #include <vector> | |
8 | |
9 #include "base/command_line.h" | |
10 #include "base/strings/utf_string_conversions.h" | |
11 #include "base/synchronization/waitable_event.h" | |
12 #include "content/common/media/media_stream_messages.h" | |
13 #include "content/public/common/content_switches.h" | |
14 #include "content/renderer/media/media_stream.h" | |
15 #include "content/renderer/media/media_stream_audio_processor_options.h" | |
16 #include "content/renderer/media/media_stream_audio_source.h" | |
17 #include "content/renderer/media/media_stream_video_source.h" | |
18 #include "content/renderer/media/media_stream_video_track.h" | |
19 #include "content/renderer/media/peer_connection_identity_service.h" | |
20 #include "content/renderer/media/rtc_media_constraints.h" | |
21 #include "content/renderer/media/rtc_peer_connection_handler.h" | |
22 #include "content/renderer/media/rtc_video_decoder_factory.h" | |
23 #include "content/renderer/media/rtc_video_encoder_factory.h" | |
24 #include "content/renderer/media/webaudio_capturer_source.h" | |
25 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" | |
26 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | |
27 #include "content/renderer/media/webrtc_audio_device_impl.h" | |
28 #include "content/renderer/media/webrtc_local_audio_track.h" | |
29 #include "content/renderer/media/webrtc_uma_histograms.h" | |
30 #include "content/renderer/p2p/ipc_network_manager.h" | |
31 #include "content/renderer/p2p/ipc_socket_factory.h" | |
32 #include "content/renderer/p2p/port_allocator.h" | |
33 #include "content/renderer/render_thread_impl.h" | |
34 #include "jingle/glue/thread_wrapper.h" | |
35 #include "media/filters/gpu_video_accelerator_factories.h" | |
36 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | |
37 #include "third_party/WebKit/public/platform/WebMediaStream.h" | |
38 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" | |
39 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | |
40 #include "third_party/WebKit/public/platform/WebURL.h" | |
41 #include "third_party/WebKit/public/web/WebDocument.h" | |
42 #include "third_party/WebKit/public/web/WebFrame.h" | |
43 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface
.h" | |
44 | |
45 #if defined(USE_OPENSSL) | |
46 #include "third_party/libjingle/source/talk/base/ssladapter.h" | |
47 #else | |
48 #include "net/socket/nss_ssl_util.h" | |
49 #endif | |
50 | |
51 #if defined(OS_ANDROID) | |
52 #include "media/base/android/media_codec_bridge.h" | |
53 #endif | |
54 | |
55 namespace content { | |
56 | |
57 // Map of corresponding media constraints and platform effects. | |
58 struct { | |
59 const char* constraint; | |
60 const media::AudioParameters::PlatformEffectsMask effect; | |
61 } const kConstraintEffectMap[] = { | |
62 { content::kMediaStreamAudioDucking, | |
63 media::AudioParameters::DUCKING }, | |
64 { webrtc::MediaConstraintsInterface::kEchoCancellation, | |
65 media::AudioParameters::ECHO_CANCELLER }, | |
66 }; | |
67 | |
68 // If any platform effects are available, check them against the constraints. | |
69 // Disable effects to match false constraints, but if a constraint is true, set | |
70 // the constraint to false to later disable the software effect. | |
71 // | |
72 // This function may modify both |constraints| and |effects|. | |
73 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints, | |
74 int* effects) { | |
75 if (*effects != media::AudioParameters::NO_EFFECTS) { | |
76 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { | |
77 bool value; | |
78 size_t is_mandatory = 0; | |
79 if (!webrtc::FindConstraint(constraints, | |
80 kConstraintEffectMap[i].constraint, | |
81 &value, | |
82 &is_mandatory) || !value) { | |
83 // If the constraint is false, or does not exist, disable the platform | |
84 // effect. | |
85 *effects &= ~kConstraintEffectMap[i].effect; | |
86 DVLOG(1) << "Disabling platform effect: " | |
87 << kConstraintEffectMap[i].effect; | |
88 } else if (*effects & kConstraintEffectMap[i].effect) { | |
89 // If the constraint is true, leave the platform effect enabled, and | |
90 // set the constraint to false to later disable the software effect. | |
91 if (is_mandatory) { | |
92 constraints->AddMandatory(kConstraintEffectMap[i].constraint, | |
93 webrtc::MediaConstraintsInterface::kValueFalse, true); | |
94 } else { | |
95 constraints->AddOptional(kConstraintEffectMap[i].constraint, | |
96 webrtc::MediaConstraintsInterface::kValueFalse, true); | |
97 } | |
98 DVLOG(1) << "Disabling constraint: " | |
99 << kConstraintEffectMap[i].constraint; | |
100 } | |
101 } | |
102 } | |
103 } | |
104 | |
105 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { | |
106 public: | |
107 P2PPortAllocatorFactory( | |
108 P2PSocketDispatcher* socket_dispatcher, | |
109 talk_base::NetworkManager* network_manager, | |
110 talk_base::PacketSocketFactory* socket_factory, | |
111 blink::WebFrame* web_frame) | |
112 : socket_dispatcher_(socket_dispatcher), | |
113 network_manager_(network_manager), | |
114 socket_factory_(socket_factory), | |
115 web_frame_(web_frame) { | |
116 } | |
117 | |
118 virtual cricket::PortAllocator* CreatePortAllocator( | |
119 const std::vector<StunConfiguration>& stun_servers, | |
120 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE { | |
121 CHECK(web_frame_); | |
122 P2PPortAllocator::Config config; | |
123 if (stun_servers.size() > 0) { | |
124 config.stun_server = stun_servers[0].server.hostname(); | |
125 config.stun_server_port = stun_servers[0].server.port(); | |
126 } | |
127 config.legacy_relay = false; | |
128 for (size_t i = 0; i < turn_configurations.size(); ++i) { | |
129 P2PPortAllocator::Config::RelayServerConfig relay_config; | |
130 relay_config.server_address = turn_configurations[i].server.hostname(); | |
131 relay_config.port = turn_configurations[i].server.port(); | |
132 relay_config.username = turn_configurations[i].username; | |
133 relay_config.password = turn_configurations[i].password; | |
134 relay_config.transport_type = turn_configurations[i].transport_type; | |
135 relay_config.secure = turn_configurations[i].secure; | |
136 config.relays.push_back(relay_config); | |
137 } | |
138 | |
139 // Use first turn server as the stun server. | |
140 if (turn_configurations.size() > 0) { | |
141 config.stun_server = config.relays[0].server_address; | |
142 config.stun_server_port = config.relays[0].port; | |
143 } | |
144 | |
145 return new P2PPortAllocator( | |
146 web_frame_, socket_dispatcher_.get(), network_manager_, | |
147 socket_factory_, config); | |
148 } | |
149 | |
150 protected: | |
151 virtual ~P2PPortAllocatorFactory() {} | |
152 | |
153 private: | |
154 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; | |
155 // |network_manager_| and |socket_factory_| are a weak references, owned by | |
156 // MediaStreamDependencyFactory. | |
157 talk_base::NetworkManager* network_manager_; | |
158 talk_base::PacketSocketFactory* socket_factory_; | |
159 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. | |
160 blink::WebFrame* web_frame_; | |
161 }; | |
162 | |
163 MediaStreamDependencyFactory::MediaStreamDependencyFactory( | |
164 P2PSocketDispatcher* p2p_socket_dispatcher) | |
165 : network_manager_(NULL), | |
166 p2p_socket_dispatcher_(p2p_socket_dispatcher), | |
167 signaling_thread_(NULL), | |
168 worker_thread_(NULL), | |
169 chrome_worker_thread_("Chrome_libJingle_WorkerThread") { | |
170 } | |
171 | |
172 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { | |
173 CleanupPeerConnectionFactory(); | |
174 } | |
175 | |
176 blink::WebRTCPeerConnectionHandler* | |
177 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( | |
178 blink::WebRTCPeerConnectionHandlerClient* client) { | |
179 // Save histogram data so we can see how much PeerConnetion is used. | |
180 // The histogram counts the number of calls to the JS API | |
181 // webKitRTCPeerConnection. | |
182 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | |
183 | |
184 return new RTCPeerConnectionHandler(client, this); | |
185 } | |
186 | |
187 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource( | |
188 int render_view_id, | |
189 const blink::WebMediaConstraints& audio_constraints, | |
190 MediaStreamAudioSource* source_data) { | |
191 DVLOG(1) << "InitializeMediaStreamAudioSources()"; | |
192 | |
193 // Do additional source initialization if the audio source is a valid | |
194 // microphone or tab audio. | |
195 RTCMediaConstraints native_audio_constraints(audio_constraints); | |
196 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints); | |
197 | |
198 StreamDeviceInfo device_info = source_data->device_info(); | |
199 RTCMediaConstraints constraints = native_audio_constraints; | |
200 // May modify both |constraints| and |effects|. | |
201 HarmonizeConstraintsAndEffects(&constraints, | |
202 &device_info.device.input.effects); | |
203 | |
204 scoped_refptr<WebRtcAudioCapturer> capturer( | |
205 CreateAudioCapturer(render_view_id, device_info, audio_constraints, | |
206 source_data)); | |
207 if (!capturer.get()) { | |
208 DLOG(WARNING) << "Failed to create the capturer for device " | |
209 << device_info.device.id; | |
210 // TODO(xians): Don't we need to check if source_observer is observing | |
211 // something? If not, then it looks like we have a leak here. | |
212 // OTOH, if it _is_ observing something, then the callback might | |
213 // be called multiple times which is likely also a bug. | |
214 return false; | |
215 } | |
216 source_data->SetAudioCapturer(capturer); | |
217 | |
218 // Creates a LocalAudioSource object which holds audio options. | |
219 // TODO(xians): The option should apply to the track instead of the source. | |
220 // TODO(perkj): Move audio constraints parsing to Chrome. | |
221 // Currently there are a few constraints that are parsed by libjingle and | |
222 // the state is set to ended if parsing fails. | |
223 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( | |
224 CreateLocalAudioSource(&constraints).get()); | |
225 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { | |
226 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; | |
227 return false; | |
228 } | |
229 source_data->SetLocalAudioSource(rtc_source); | |
230 return true; | |
231 } | |
232 | |
233 WebRtcVideoCapturerAdapter* MediaStreamDependencyFactory::CreateVideoCapturer( | |
234 bool is_screeencast) { | |
235 // We need to make sure the libjingle thread wrappers have been created | |
236 // before we can use an instance of a WebRtcVideoCapturerAdapter. This is | |
237 // since the base class of WebRtcVideoCapturerAdapter is a | |
238 // cricket::VideoCapturer and it uses the libjingle thread wrappers. | |
239 if (!GetPcFactory()) | |
240 return NULL; | |
241 return new WebRtcVideoCapturerAdapter(is_screeencast); | |
242 } | |
243 | |
244 scoped_refptr<webrtc::VideoSourceInterface> | |
245 MediaStreamDependencyFactory::CreateVideoSource( | |
246 cricket::VideoCapturer* capturer, | |
247 const blink::WebMediaConstraints& constraints) { | |
248 RTCMediaConstraints webrtc_constraints(constraints); | |
249 scoped_refptr<webrtc::VideoSourceInterface> source = | |
250 GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get(); | |
251 return source; | |
252 } | |
253 | |
254 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>& | |
255 MediaStreamDependencyFactory::GetPcFactory() { | |
256 if (!pc_factory_) | |
257 CreatePeerConnectionFactory(); | |
258 CHECK(pc_factory_); | |
259 return pc_factory_; | |
260 } | |
261 | |
262 void MediaStreamDependencyFactory::CreatePeerConnectionFactory() { | |
263 DCHECK(!pc_factory_.get()); | |
264 DCHECK(!signaling_thread_); | |
265 DCHECK(!worker_thread_); | |
266 DCHECK(!network_manager_); | |
267 DCHECK(!socket_factory_); | |
268 DCHECK(!chrome_worker_thread_.IsRunning()); | |
269 | |
270 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; | |
271 | |
272 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); | |
273 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); | |
274 signaling_thread_ = jingle_glue::JingleThreadWrapper::current(); | |
275 CHECK(signaling_thread_); | |
276 | |
277 CHECK(chrome_worker_thread_.Start()); | |
278 | |
279 base::WaitableEvent start_worker_event(true, false); | |
280 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
281 &MediaStreamDependencyFactory::InitializeWorkerThread, | |
282 base::Unretained(this), | |
283 &worker_thread_, | |
284 &start_worker_event)); | |
285 start_worker_event.Wait(); | |
286 CHECK(worker_thread_); | |
287 | |
288 base::WaitableEvent create_network_manager_event(true, false); | |
289 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
290 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread, | |
291 base::Unretained(this), | |
292 &create_network_manager_event)); | |
293 create_network_manager_event.Wait(); | |
294 | |
295 socket_factory_.reset( | |
296 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get())); | |
297 | |
298 // Init SSL, which will be needed by PeerConnection. | |
299 #if defined(USE_OPENSSL) | |
300 if (!talk_base::InitializeSSL()) { | |
301 LOG(ERROR) << "Failed on InitializeSSL."; | |
302 NOTREACHED(); | |
303 return; | |
304 } | |
305 #else | |
306 // TODO(ronghuawu): Replace this call with InitializeSSL. | |
307 net::EnsureNSSSSLInit(); | |
308 #endif | |
309 | |
310 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; | |
311 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; | |
312 | |
313 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | |
314 scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories = | |
315 RenderThreadImpl::current()->GetGpuFactories(); | |
316 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) { | |
317 if (gpu_factories) | |
318 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories)); | |
319 } | |
320 | |
321 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) { | |
322 if (gpu_factories) | |
323 encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories)); | |
324 } | |
325 | |
326 #if defined(OS_ANDROID) | |
327 if (!media::MediaCodecBridge::SupportsSetParameters()) | |
328 encoder_factory.reset(); | |
329 #endif | |
330 | |
331 EnsureWebRtcAudioDeviceImpl(); | |
332 | |
333 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( | |
334 webrtc::CreatePeerConnectionFactory(worker_thread_, | |
335 signaling_thread_, | |
336 audio_device_.get(), | |
337 encoder_factory.release(), | |
338 decoder_factory.release())); | |
339 CHECK(factory); | |
340 | |
341 pc_factory_ = factory; | |
342 webrtc::PeerConnectionFactoryInterface::Options factory_options; | |
343 factory_options.disable_sctp_data_channels = false; | |
344 factory_options.disable_encryption = | |
345 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption); | |
346 pc_factory_->SetOptions(factory_options); | |
347 | |
348 // |aec_dump_file| will be invalid when dump is not enabled. | |
349 if (aec_dump_file_.IsValid()) | |
350 StartAecDump(aec_dump_file_.Pass()); | |
351 } | |
352 | |
353 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { | |
354 return pc_factory_.get() != NULL; | |
355 } | |
356 | |
357 scoped_refptr<webrtc::PeerConnectionInterface> | |
358 MediaStreamDependencyFactory::CreatePeerConnection( | |
359 const webrtc::PeerConnectionInterface::IceServers& ice_servers, | |
360 const webrtc::MediaConstraintsInterface* constraints, | |
361 blink::WebFrame* web_frame, | |
362 webrtc::PeerConnectionObserver* observer) { | |
363 CHECK(web_frame); | |
364 CHECK(observer); | |
365 if (!GetPcFactory()) | |
366 return NULL; | |
367 | |
368 scoped_refptr<P2PPortAllocatorFactory> pa_factory = | |
369 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( | |
370 p2p_socket_dispatcher_.get(), | |
371 network_manager_, | |
372 socket_factory_.get(), | |
373 web_frame); | |
374 | |
375 PeerConnectionIdentityService* identity_service = | |
376 new PeerConnectionIdentityService( | |
377 GURL(web_frame->document().url().spec()).GetOrigin()); | |
378 | |
379 return GetPcFactory()->CreatePeerConnection(ice_servers, | |
380 constraints, | |
381 pa_factory.get(), | |
382 identity_service, | |
383 observer).get(); | |
384 } | |
385 | |
386 scoped_refptr<webrtc::MediaStreamInterface> | |
387 MediaStreamDependencyFactory::CreateLocalMediaStream( | |
388 const std::string& label) { | |
389 return GetPcFactory()->CreateLocalMediaStream(label).get(); | |
390 } | |
391 | |
392 scoped_refptr<webrtc::AudioSourceInterface> | |
393 MediaStreamDependencyFactory::CreateLocalAudioSource( | |
394 const webrtc::MediaConstraintsInterface* constraints) { | |
395 scoped_refptr<webrtc::AudioSourceInterface> source = | |
396 GetPcFactory()->CreateAudioSource(constraints).get(); | |
397 return source; | |
398 } | |
399 | |
400 void MediaStreamDependencyFactory::CreateLocalAudioTrack( | |
401 const blink::WebMediaStreamTrack& track) { | |
402 blink::WebMediaStreamSource source = track.source(); | |
403 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); | |
404 MediaStreamAudioSource* source_data = | |
405 static_cast<MediaStreamAudioSource*>(source.extraData()); | |
406 | |
407 scoped_refptr<WebAudioCapturerSource> webaudio_source; | |
408 if (!source_data) { | |
409 if (source.requiresAudioConsumer()) { | |
410 // We're adding a WebAudio MediaStream. | |
411 // Create a specific capturer for each WebAudio consumer. | |
412 webaudio_source = CreateWebAudioSource(&source); | |
413 source_data = | |
414 static_cast<MediaStreamAudioSource*>(source.extraData()); | |
415 } else { | |
416 // TODO(perkj): Implement support for sources from | |
417 // remote MediaStreams. | |
418 NOTIMPLEMENTED(); | |
419 return; | |
420 } | |
421 } | |
422 | |
423 // Creates an adapter to hold all the libjingle objects. | |
424 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( | |
425 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), | |
426 source_data->local_audio_source())); | |
427 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( | |
428 track.isEnabled()); | |
429 | |
430 // TODO(xians): Merge |source| to the capturer(). We can't do this today | |
431 // because only one capturer() is supported while one |source| is created | |
432 // for each audio track. | |
433 scoped_ptr<WebRtcLocalAudioTrack> audio_track( | |
434 new WebRtcLocalAudioTrack(adapter, | |
435 source_data->GetAudioCapturer(), | |
436 webaudio_source)); | |
437 | |
438 StartLocalAudioTrack(audio_track.get()); | |
439 | |
440 // Pass the ownership of the native local audio track to the blink track. | |
441 blink::WebMediaStreamTrack writable_track = track; | |
442 writable_track.setExtraData(audio_track.release()); | |
443 } | |
444 | |
445 void MediaStreamDependencyFactory::StartLocalAudioTrack( | |
446 WebRtcLocalAudioTrack* audio_track) { | |
447 // Add the WebRtcAudioDevice as the sink to the local audio track. | |
448 // TODO(xians): Implement a PeerConnection sink adapter and remove this | |
449 // AddSink() call. | |
450 audio_track->AddSink(GetWebRtcAudioDevice()); | |
451 // Start the audio track. This will hook the |audio_track| to the capturer | |
452 // as the sink of the audio, and only start the source of the capturer if | |
453 // it is the first audio track connecting to the capturer. | |
454 audio_track->Start(); | |
455 } | |
456 | |
457 scoped_refptr<WebAudioCapturerSource> | |
458 MediaStreamDependencyFactory::CreateWebAudioSource( | |
459 blink::WebMediaStreamSource* source) { | |
460 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; | |
461 | |
462 scoped_refptr<WebAudioCapturerSource> | |
463 webaudio_capturer_source(new WebAudioCapturerSource()); | |
464 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); | |
465 | |
466 // Use the current default capturer for the WebAudio track so that the | |
467 // WebAudio track can pass a valid delay value and |need_audio_processing| | |
468 // flag to PeerConnection. | |
469 // TODO(xians): Remove this after moving APM to Chrome. | |
470 if (GetWebRtcAudioDevice()) { | |
471 source_data->SetAudioCapturer( | |
472 GetWebRtcAudioDevice()->GetDefaultCapturer()); | |
473 } | |
474 | |
475 // Create a LocalAudioSource object which holds audio options. | |
476 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | |
477 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get()); | |
478 source->setExtraData(source_data); | |
479 | |
480 // Replace the default source with WebAudio as source instead. | |
481 source->addAudioConsumer(webaudio_capturer_source.get()); | |
482 | |
483 return webaudio_capturer_source; | |
484 } | |
485 | |
486 scoped_refptr<webrtc::VideoTrackInterface> | |
487 MediaStreamDependencyFactory::CreateLocalVideoTrack( | |
488 const std::string& id, | |
489 webrtc::VideoSourceInterface* source) { | |
490 return GetPcFactory()->CreateVideoTrack(id, source).get(); | |
491 } | |
492 | |
493 scoped_refptr<webrtc::VideoTrackInterface> | |
494 MediaStreamDependencyFactory::CreateLocalVideoTrack( | |
495 const std::string& id, cricket::VideoCapturer* capturer) { | |
496 if (!capturer) { | |
497 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer."; | |
498 return NULL; | |
499 } | |
500 | |
501 // Create video source from the |capturer|. | |
502 scoped_refptr<webrtc::VideoSourceInterface> source = | |
503 GetPcFactory()->CreateVideoSource(capturer, NULL).get(); | |
504 | |
505 // Create native track from the source. | |
506 return GetPcFactory()->CreateVideoTrack(id, source.get()).get(); | |
507 } | |
508 | |
509 webrtc::SessionDescriptionInterface* | |
510 MediaStreamDependencyFactory::CreateSessionDescription( | |
511 const std::string& type, | |
512 const std::string& sdp, | |
513 webrtc::SdpParseError* error) { | |
514 return webrtc::CreateSessionDescription(type, sdp, error); | |
515 } | |
516 | |
517 webrtc::IceCandidateInterface* MediaStreamDependencyFactory::CreateIceCandidate( | |
518 const std::string& sdp_mid, | |
519 int sdp_mline_index, | |
520 const std::string& sdp) { | |
521 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); | |
522 } | |
523 | |
524 WebRtcAudioDeviceImpl* | |
525 MediaStreamDependencyFactory::GetWebRtcAudioDevice() { | |
526 return audio_device_.get(); | |
527 } | |
528 | |
529 void MediaStreamDependencyFactory::InitializeWorkerThread( | |
530 talk_base::Thread** thread, | |
531 base::WaitableEvent* event) { | |
532 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); | |
533 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); | |
534 *thread = jingle_glue::JingleThreadWrapper::current(); | |
535 event->Signal(); | |
536 } | |
537 | |
538 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread( | |
539 base::WaitableEvent* event) { | |
540 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); | |
541 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get()); | |
542 event->Signal(); | |
543 } | |
544 | |
545 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { | |
546 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); | |
547 delete network_manager_; | |
548 network_manager_ = NULL; | |
549 } | |
550 | |
551 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { | |
552 pc_factory_ = NULL; | |
553 if (network_manager_) { | |
554 // The network manager needs to free its resources on the thread they were | |
555 // created, which is the worked thread. | |
556 if (chrome_worker_thread_.IsRunning()) { | |
557 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
558 &MediaStreamDependencyFactory::DeleteIpcNetworkManager, | |
559 base::Unretained(this))); | |
560 // Stopping the thread will wait until all tasks have been | |
561 // processed before returning. We wait for the above task to finish before | |
562 // letting the the function continue to avoid any potential race issues. | |
563 chrome_worker_thread_.Stop(); | |
564 } else { | |
565 NOTREACHED() << "Worker thread not running."; | |
566 } | |
567 } | |
568 } | |
569 | |
570 scoped_refptr<WebRtcAudioCapturer> | |
571 MediaStreamDependencyFactory::CreateAudioCapturer( | |
572 int render_view_id, | |
573 const StreamDeviceInfo& device_info, | |
574 const blink::WebMediaConstraints& constraints, | |
575 MediaStreamAudioSource* audio_source) { | |
576 // TODO(xians): Handle the cases when gUM is called without a proper render | |
577 // view, for example, by an extension. | |
578 DCHECK_GE(render_view_id, 0); | |
579 | |
580 EnsureWebRtcAudioDeviceImpl(); | |
581 DCHECK(GetWebRtcAudioDevice()); | |
582 return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info, | |
583 constraints, | |
584 GetWebRtcAudioDevice(), | |
585 audio_source); | |
586 } | |
587 | |
588 void MediaStreamDependencyFactory::AddNativeAudioTrackToBlinkTrack( | |
589 webrtc::MediaStreamTrackInterface* native_track, | |
590 const blink::WebMediaStreamTrack& webkit_track, | |
591 bool is_local_track) { | |
592 DCHECK(!webkit_track.isNull() && !webkit_track.extraData()); | |
593 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, | |
594 webkit_track.source().type()); | |
595 blink::WebMediaStreamTrack track = webkit_track; | |
596 | |
597 DVLOG(1) << "AddNativeTrackToBlinkTrack() audio"; | |
598 track.setExtraData( | |
599 new MediaStreamTrack( | |
600 static_cast<webrtc::AudioTrackInterface*>(native_track), | |
601 is_local_track)); | |
602 } | |
603 | |
604 scoped_refptr<base::MessageLoopProxy> | |
605 MediaStreamDependencyFactory::GetWebRtcWorkerThread() const { | |
606 DCHECK(CalledOnValidThread()); | |
607 return chrome_worker_thread_.message_loop_proxy(); | |
608 } | |
609 | |
610 bool MediaStreamDependencyFactory::OnControlMessageReceived( | |
611 const IPC::Message& message) { | |
612 bool handled = true; | |
613 IPC_BEGIN_MESSAGE_MAP(MediaStreamDependencyFactory, message) | |
614 IPC_MESSAGE_HANDLER(MediaStreamMsg_EnableAecDump, OnAecDumpFile) | |
615 IPC_MESSAGE_HANDLER(MediaStreamMsg_DisableAecDump, OnDisableAecDump) | |
616 IPC_MESSAGE_UNHANDLED(handled = false) | |
617 IPC_END_MESSAGE_MAP() | |
618 return handled; | |
619 } | |
620 | |
621 void MediaStreamDependencyFactory::OnAecDumpFile( | |
622 IPC::PlatformFileForTransit file_handle) { | |
623 DCHECK(!aec_dump_file_.IsValid()); | |
624 base::File file = IPC::PlatformFileForTransitToFile(file_handle); | |
625 DCHECK(file.IsValid()); | |
626 | |
627 if (CommandLine::ForCurrentProcess()->HasSwitch( | |
628 switches::kEnableAudioTrackProcessing)) { | |
629 EnsureWebRtcAudioDeviceImpl(); | |
630 GetWebRtcAudioDevice()->EnableAecDump(file.Pass()); | |
631 return; | |
632 } | |
633 | |
634 // TODO(xians): Remove the following code after kEnableAudioTrackProcessing | |
635 // is removed. | |
636 if (PeerConnectionFactoryCreated()) | |
637 StartAecDump(file.Pass()); | |
638 else | |
639 aec_dump_file_ = file.Pass(); | |
640 } | |
641 | |
642 void MediaStreamDependencyFactory::OnDisableAecDump() { | |
643 if (CommandLine::ForCurrentProcess()->HasSwitch( | |
644 switches::kEnableAudioTrackProcessing)) { | |
645 GetWebRtcAudioDevice()->DisableAecDump(); | |
646 return; | |
647 } | |
648 | |
649 // TODO(xians): Remove the following code after kEnableAudioTrackProcessing | |
650 // is removed. | |
651 if (aec_dump_file_.IsValid()) | |
652 aec_dump_file_.Close(); | |
653 } | |
654 | |
655 void MediaStreamDependencyFactory::StartAecDump(base::File aec_dump_file) { | |
656 // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump() | |
657 // fails, |aec_dump_file| will be closed. | |
658 if (!GetPcFactory()->StartAecDump(aec_dump_file.TakePlatformFile())) | |
659 VLOG(1) << "Could not start AEC dump."; | |
660 } | |
661 | |
662 void MediaStreamDependencyFactory::EnsureWebRtcAudioDeviceImpl() { | |
663 if (audio_device_) | |
664 return; | |
665 | |
666 audio_device_ = new WebRtcAudioDeviceImpl(); | |
667 } | |
668 | |
669 } // namespace content | |
OLD | NEW |