OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/media_stream_dependency_factory.h" | 5 #include "content/renderer/media/media_stream_dependency_factory.h" |
6 | 6 |
7 #include <vector> | 7 #include <vector> |
8 | 8 |
9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
10 #include "base/strings/utf_string_conversions.h" | 10 #include "base/strings/utf_string_conversions.h" |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 } | 90 } |
91 } | 91 } |
92 } | 92 } |
93 | 93 |
94 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { | 94 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { |
95 public: | 95 public: |
96 P2PPortAllocatorFactory( | 96 P2PPortAllocatorFactory( |
97 P2PSocketDispatcher* socket_dispatcher, | 97 P2PSocketDispatcher* socket_dispatcher, |
98 talk_base::NetworkManager* network_manager, | 98 talk_base::NetworkManager* network_manager, |
99 talk_base::PacketSocketFactory* socket_factory, | 99 talk_base::PacketSocketFactory* socket_factory, |
100 WebKit::WebFrame* web_frame) | 100 blink::WebFrame* web_frame) |
101 : socket_dispatcher_(socket_dispatcher), | 101 : socket_dispatcher_(socket_dispatcher), |
102 network_manager_(network_manager), | 102 network_manager_(network_manager), |
103 socket_factory_(socket_factory), | 103 socket_factory_(socket_factory), |
104 web_frame_(web_frame) { | 104 web_frame_(web_frame) { |
105 } | 105 } |
106 | 106 |
107 virtual cricket::PortAllocator* CreatePortAllocator( | 107 virtual cricket::PortAllocator* CreatePortAllocator( |
108 const std::vector<StunConfiguration>& stun_servers, | 108 const std::vector<StunConfiguration>& stun_servers, |
109 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE { | 109 const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE { |
110 CHECK(web_frame_); | 110 CHECK(web_frame_); |
(...skipping 28 matching lines...) Expand all Loading... |
139 protected: | 139 protected: |
140 virtual ~P2PPortAllocatorFactory() {} | 140 virtual ~P2PPortAllocatorFactory() {} |
141 | 141 |
142 private: | 142 private: |
143 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; | 143 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; |
144 // |network_manager_| and |socket_factory_| are a weak references, owned by | 144 // |network_manager_| and |socket_factory_| are a weak references, owned by |
145 // MediaStreamDependencyFactory. | 145 // MediaStreamDependencyFactory. |
146 talk_base::NetworkManager* network_manager_; | 146 talk_base::NetworkManager* network_manager_; |
147 talk_base::PacketSocketFactory* socket_factory_; | 147 talk_base::PacketSocketFactory* socket_factory_; |
148 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. | 148 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. |
149 WebKit::WebFrame* web_frame_; | 149 blink::WebFrame* web_frame_; |
150 }; | 150 }; |
151 | 151 |
152 // SourceStateObserver is a help class used for observing the startup state | 152 // SourceStateObserver is a help class used for observing the startup state |
153 // transition of webrtc media sources such as a camera or microphone. | 153 // transition of webrtc media sources such as a camera or microphone. |
154 // An instance of the object deletes itself after use. | 154 // An instance of the object deletes itself after use. |
155 // Usage: | 155 // Usage: |
156 // 1. Create an instance of the object with the WebKit::WebMediaStream | 156 // 1. Create an instance of the object with the blink::WebMediaStream |
157 // the observed sources belongs to a callback. | 157 // the observed sources belongs to a callback. |
158 // 2. Add the sources to the observer using AddSource. | 158 // 2. Add the sources to the observer using AddSource. |
159 // 3. Call StartObserving() | 159 // 3. Call StartObserving() |
160 // 4. The callback will be triggered when all sources have transitioned from | 160 // 4. The callback will be triggered when all sources have transitioned from |
161 // webrtc::MediaSourceInterface::kInitializing. | 161 // webrtc::MediaSourceInterface::kInitializing. |
162 class SourceStateObserver : public webrtc::ObserverInterface, | 162 class SourceStateObserver : public webrtc::ObserverInterface, |
163 public base::NonThreadSafe { | 163 public base::NonThreadSafe { |
164 public: | 164 public: |
165 SourceStateObserver( | 165 SourceStateObserver( |
166 WebKit::WebMediaStream* web_stream, | 166 blink::WebMediaStream* web_stream, |
167 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) | 167 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) |
168 : web_stream_(web_stream), | 168 : web_stream_(web_stream), |
169 ready_callback_(callback), | 169 ready_callback_(callback), |
170 live_(true) { | 170 live_(true) { |
171 } | 171 } |
172 | 172 |
173 void AddSource(webrtc::MediaSourceInterface* source) { | 173 void AddSource(webrtc::MediaSourceInterface* source) { |
174 DCHECK(CalledOnValidThread()); | 174 DCHECK(CalledOnValidThread()); |
175 switch (source->state()) { | 175 switch (source->state()) { |
176 case webrtc::MediaSourceInterface::kInitializing: | 176 case webrtc::MediaSourceInterface::kInitializing: |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
210 } else { | 210 } else { |
211 ++it; | 211 ++it; |
212 } | 212 } |
213 } | 213 } |
214 if (sources_.empty()) { | 214 if (sources_.empty()) { |
215 ready_callback_.Run(web_stream_, live_); | 215 ready_callback_.Run(web_stream_, live_); |
216 delete this; | 216 delete this; |
217 } | 217 } |
218 } | 218 } |
219 | 219 |
220 WebKit::WebMediaStream* web_stream_; | 220 blink::WebMediaStream* web_stream_; |
221 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; | 221 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; |
222 bool live_; | 222 bool live_; |
223 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > | 223 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > |
224 ObservedSources; | 224 ObservedSources; |
225 ObservedSources sources_; | 225 ObservedSources sources_; |
226 }; | 226 }; |
227 | 227 |
228 MediaStreamDependencyFactory::MediaStreamDependencyFactory( | 228 MediaStreamDependencyFactory::MediaStreamDependencyFactory( |
229 VideoCaptureImplManager* vc_manager, | 229 VideoCaptureImplManager* vc_manager, |
230 P2PSocketDispatcher* p2p_socket_dispatcher) | 230 P2PSocketDispatcher* p2p_socket_dispatcher) |
231 : network_manager_(NULL), | 231 : network_manager_(NULL), |
232 #if defined(GOOGLE_TV) | 232 #if defined(GOOGLE_TV) |
233 decoder_factory_tv_(NULL), | 233 decoder_factory_tv_(NULL), |
234 #endif | 234 #endif |
235 vc_manager_(vc_manager), | 235 vc_manager_(vc_manager), |
236 p2p_socket_dispatcher_(p2p_socket_dispatcher), | 236 p2p_socket_dispatcher_(p2p_socket_dispatcher), |
237 signaling_thread_(NULL), | 237 signaling_thread_(NULL), |
238 worker_thread_(NULL), | 238 worker_thread_(NULL), |
239 chrome_worker_thread_("Chrome_libJingle_WorkerThread") { | 239 chrome_worker_thread_("Chrome_libJingle_WorkerThread") { |
240 } | 240 } |
241 | 241 |
242 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { | 242 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { |
243 CleanupPeerConnectionFactory(); | 243 CleanupPeerConnectionFactory(); |
244 } | 244 } |
245 | 245 |
246 WebKit::WebRTCPeerConnectionHandler* | 246 blink::WebRTCPeerConnectionHandler* |
247 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( | 247 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( |
248 WebKit::WebRTCPeerConnectionHandlerClient* client) { | 248 blink::WebRTCPeerConnectionHandlerClient* client) { |
249 // Save histogram data so we can see how much PeerConnetion is used. | 249 // Save histogram data so we can see how much PeerConnetion is used. |
250 // The histogram counts the number of calls to the JS API | 250 // The histogram counts the number of calls to the JS API |
251 // webKitRTCPeerConnection. | 251 // webKitRTCPeerConnection. |
252 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 252 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
253 | 253 |
254 if (!EnsurePeerConnectionFactory()) | 254 if (!EnsurePeerConnectionFactory()) |
255 return NULL; | 255 return NULL; |
256 | 256 |
257 return new RTCPeerConnectionHandler(client, this); | 257 return new RTCPeerConnectionHandler(client, this); |
258 } | 258 } |
259 | 259 |
260 void MediaStreamDependencyFactory::CreateNativeMediaSources( | 260 void MediaStreamDependencyFactory::CreateNativeMediaSources( |
261 int render_view_id, | 261 int render_view_id, |
262 const WebKit::WebMediaConstraints& audio_constraints, | 262 const blink::WebMediaConstraints& audio_constraints, |
263 const WebKit::WebMediaConstraints& video_constraints, | 263 const blink::WebMediaConstraints& video_constraints, |
264 WebKit::WebMediaStream* web_stream, | 264 blink::WebMediaStream* web_stream, |
265 const MediaSourcesCreatedCallback& sources_created) { | 265 const MediaSourcesCreatedCallback& sources_created) { |
266 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; | 266 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; |
267 if (!EnsurePeerConnectionFactory()) { | 267 if (!EnsurePeerConnectionFactory()) { |
268 sources_created.Run(web_stream, false); | 268 sources_created.Run(web_stream, false); |
269 return; | 269 return; |
270 } | 270 } |
271 | 271 |
272 // |source_observer| clean up itself when it has completed | 272 // |source_observer| clean up itself when it has completed |
273 // source_observer->StartObservering. | 273 // source_observer->StartObservering. |
274 SourceStateObserver* source_observer = | 274 SourceStateObserver* source_observer = |
275 new SourceStateObserver(web_stream, sources_created); | 275 new SourceStateObserver(web_stream, sources_created); |
276 | 276 |
277 // Create local video sources. | 277 // Create local video sources. |
278 RTCMediaConstraints native_video_constraints(video_constraints); | 278 RTCMediaConstraints native_video_constraints(video_constraints); |
279 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks; | 279 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
280 web_stream->videoTracks(video_tracks); | 280 web_stream->videoTracks(video_tracks); |
281 for (size_t i = 0; i < video_tracks.size(); ++i) { | 281 for (size_t i = 0; i < video_tracks.size(); ++i) { |
282 const WebKit::WebMediaStreamSource& source = video_tracks[i].source(); | 282 const blink::WebMediaStreamSource& source = video_tracks[i].source(); |
283 MediaStreamSourceExtraData* source_data = | 283 MediaStreamSourceExtraData* source_data = |
284 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 284 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
285 | 285 |
286 // Check if the source has already been created. This happens when the same | 286 // Check if the source has already been created. This happens when the same |
287 // source is used in multiple MediaStreams as a result of calling | 287 // source is used in multiple MediaStreams as a result of calling |
288 // getUserMedia. | 288 // getUserMedia. |
289 if (source_data->video_source()) | 289 if (source_data->video_source()) |
290 continue; | 290 continue; |
291 | 291 |
292 const bool is_screencast = | 292 const bool is_screencast = |
293 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || | 293 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || |
294 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; | 294 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
295 source_data->SetVideoSource( | 295 source_data->SetVideoSource( |
296 CreateLocalVideoSource(source_data->device_info().session_id, | 296 CreateLocalVideoSource(source_data->device_info().session_id, |
297 is_screencast, | 297 is_screencast, |
298 &native_video_constraints).get()); | 298 &native_video_constraints).get()); |
299 source_observer->AddSource(source_data->video_source()); | 299 source_observer->AddSource(source_data->video_source()); |
300 } | 300 } |
301 | 301 |
302 // Do additional source initialization if the audio source is a valid | 302 // Do additional source initialization if the audio source is a valid |
303 // microphone or tab audio. | 303 // microphone or tab audio. |
304 RTCMediaConstraints native_audio_constraints(audio_constraints); | 304 RTCMediaConstraints native_audio_constraints(audio_constraints); |
305 ApplyFixedAudioConstraints(&native_audio_constraints); | 305 ApplyFixedAudioConstraints(&native_audio_constraints); |
306 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks; | 306 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
307 web_stream->audioTracks(audio_tracks); | 307 web_stream->audioTracks(audio_tracks); |
308 for (size_t i = 0; i < audio_tracks.size(); ++i) { | 308 for (size_t i = 0; i < audio_tracks.size(); ++i) { |
309 const WebKit::WebMediaStreamSource& source = audio_tracks[i].source(); | 309 const blink::WebMediaStreamSource& source = audio_tracks[i].source(); |
310 MediaStreamSourceExtraData* source_data = | 310 MediaStreamSourceExtraData* source_data = |
311 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 311 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
312 | 312 |
313 // Check if the source has already been created. This happens when the same | 313 // Check if the source has already been created. This happens when the same |
314 // source is used in multiple MediaStreams as a result of calling | 314 // source is used in multiple MediaStreams as a result of calling |
315 // getUserMedia. | 315 // getUserMedia. |
316 if (source_data->local_audio_source()) | 316 if (source_data->local_audio_source()) |
317 continue; | 317 continue; |
318 | 318 |
319 // TODO(xians): Create a new capturer for difference microphones when we | 319 // TODO(xians): Create a new capturer for difference microphones when we |
(...skipping 17 matching lines...) Expand all Loading... |
337 // TODO(xians): The option should apply to the track instead of the source. | 337 // TODO(xians): The option should apply to the track instead of the source. |
338 source_data->SetLocalAudioSource( | 338 source_data->SetLocalAudioSource( |
339 CreateLocalAudioSource(&native_audio_constraints).get()); | 339 CreateLocalAudioSource(&native_audio_constraints).get()); |
340 source_observer->AddSource(source_data->local_audio_source()); | 340 source_observer->AddSource(source_data->local_audio_source()); |
341 } | 341 } |
342 | 342 |
343 source_observer->StartObservering(); | 343 source_observer->StartObservering(); |
344 } | 344 } |
345 | 345 |
346 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 346 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
347 WebKit::WebMediaStream* web_stream) { | 347 blink::WebMediaStream* web_stream) { |
348 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; | 348 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; |
349 if (!EnsurePeerConnectionFactory()) { | 349 if (!EnsurePeerConnectionFactory()) { |
350 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; | 350 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; |
351 return; | 351 return; |
352 } | 352 } |
353 | 353 |
354 std::string label = UTF16ToUTF8(web_stream->id()); | 354 std::string label = UTF16ToUTF8(web_stream->id()); |
355 scoped_refptr<webrtc::MediaStreamInterface> native_stream = | 355 scoped_refptr<webrtc::MediaStreamInterface> native_stream = |
356 CreateLocalMediaStream(label); | 356 CreateLocalMediaStream(label); |
357 MediaStreamExtraData* extra_data = | 357 MediaStreamExtraData* extra_data = |
358 new MediaStreamExtraData(native_stream.get(), true); | 358 new MediaStreamExtraData(native_stream.get(), true); |
359 web_stream->setExtraData(extra_data); | 359 web_stream->setExtraData(extra_data); |
360 | 360 |
361 // Add audio tracks. | 361 // Add audio tracks. |
362 WebKit::WebVector<WebKit::WebMediaStreamTrack> audio_tracks; | 362 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
363 web_stream->audioTracks(audio_tracks); | 363 web_stream->audioTracks(audio_tracks); |
364 for (size_t i = 0; i < audio_tracks.size(); ++i) { | 364 for (size_t i = 0; i < audio_tracks.size(); ++i) { |
365 AddNativeMediaStreamTrack(*web_stream, audio_tracks[i]); | 365 AddNativeMediaStreamTrack(*web_stream, audio_tracks[i]); |
366 } | 366 } |
367 | 367 |
368 // Add video tracks. | 368 // Add video tracks. |
369 WebKit::WebVector<WebKit::WebMediaStreamTrack> video_tracks; | 369 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
370 web_stream->videoTracks(video_tracks); | 370 web_stream->videoTracks(video_tracks); |
371 for (size_t i = 0; i < video_tracks.size(); ++i) { | 371 for (size_t i = 0; i < video_tracks.size(); ++i) { |
372 AddNativeMediaStreamTrack(*web_stream, video_tracks[i]); | 372 AddNativeMediaStreamTrack(*web_stream, video_tracks[i]); |
373 } | 373 } |
374 } | 374 } |
375 | 375 |
376 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 376 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
377 WebKit::WebMediaStream* web_stream, | 377 blink::WebMediaStream* web_stream, |
378 const MediaStreamExtraData::StreamStopCallback& stream_stop) { | 378 const MediaStreamExtraData::StreamStopCallback& stream_stop) { |
379 CreateNativeLocalMediaStream(web_stream); | 379 CreateNativeLocalMediaStream(web_stream); |
380 | 380 |
381 MediaStreamExtraData* extra_data = | 381 MediaStreamExtraData* extra_data = |
382 static_cast<MediaStreamExtraData*>(web_stream->extraData()); | 382 static_cast<MediaStreamExtraData*>(web_stream->extraData()); |
383 extra_data->SetLocalStreamStopCallback(stream_stop); | 383 extra_data->SetLocalStreamStopCallback(stream_stop); |
384 } | 384 } |
385 | 385 |
386 bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack( | 386 bool MediaStreamDependencyFactory::AddNativeMediaStreamTrack( |
387 const WebKit::WebMediaStream& stream, | 387 const blink::WebMediaStream& stream, |
388 const WebKit::WebMediaStreamTrack& track) { | 388 const blink::WebMediaStreamTrack& track) { |
389 MediaStreamExtraData* extra_data = | 389 MediaStreamExtraData* extra_data = |
390 static_cast<MediaStreamExtraData*>(stream.extraData()); | 390 static_cast<MediaStreamExtraData*>(stream.extraData()); |
391 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); | 391 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); |
392 DCHECK(native_stream); | 392 DCHECK(native_stream); |
393 | 393 |
394 WebKit::WebMediaStreamSource source = track.source(); | 394 blink::WebMediaStreamSource source = track.source(); |
395 MediaStreamSourceExtraData* source_data = | 395 MediaStreamSourceExtraData* source_data = |
396 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 396 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
397 | 397 |
398 // In the future the constraints will belong to the track itself, but | 398 // In the future the constraints will belong to the track itself, but |
399 // right now they're on the source, so we fetch them from there. | 399 // right now they're on the source, so we fetch them from there. |
400 RTCMediaConstraints track_constraints(source.constraints()); | 400 RTCMediaConstraints track_constraints(source.constraints()); |
401 | 401 |
402 WebKit::WebMediaStreamSource::Type type = track.source().type(); | 402 blink::WebMediaStreamSource::Type type = track.source().type(); |
403 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio || | 403 DCHECK(type == blink::WebMediaStreamSource::TypeAudio || |
404 type == WebKit::WebMediaStreamSource::TypeVideo); | 404 type == blink::WebMediaStreamSource::TypeVideo); |
405 | 405 |
406 if (type == WebKit::WebMediaStreamSource::TypeAudio) { | 406 if (type == blink::WebMediaStreamSource::TypeAudio) { |
407 // Apply default audio constraints that enable echo cancellation, | 407 // Apply default audio constraints that enable echo cancellation, |
408 // automatic gain control, noise suppression and high-pass filter. | 408 // automatic gain control, noise suppression and high-pass filter. |
409 ApplyFixedAudioConstraints(&track_constraints); | 409 ApplyFixedAudioConstraints(&track_constraints); |
410 } | 410 } |
411 | 411 |
412 scoped_refptr<WebAudioCapturerSource> webaudio_source; | 412 scoped_refptr<WebAudioCapturerSource> webaudio_source; |
413 if (!source_data) { | 413 if (!source_data) { |
414 if (source.requiresAudioConsumer()) { | 414 if (source.requiresAudioConsumer()) { |
415 // We're adding a WebAudio MediaStream. | 415 // We're adding a WebAudio MediaStream. |
416 // Create a specific capturer for each WebAudio consumer. | 416 // Create a specific capturer for each WebAudio consumer. |
417 webaudio_source = CreateWebAudioSource(&source, &track_constraints); | 417 webaudio_source = CreateWebAudioSource(&source, &track_constraints); |
418 source_data = | 418 source_data = |
419 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 419 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
420 } else { | 420 } else { |
421 // TODO(perkj): Implement support for sources from | 421 // TODO(perkj): Implement support for sources from |
422 // remote MediaStreams. | 422 // remote MediaStreams. |
423 NOTIMPLEMENTED(); | 423 NOTIMPLEMENTED(); |
424 return false; | 424 return false; |
425 } | 425 } |
426 } | 426 } |
427 | 427 |
428 std::string track_id = UTF16ToUTF8(track.id()); | 428 std::string track_id = UTF16ToUTF8(track.id()); |
429 if (source.type() == WebKit::WebMediaStreamSource::TypeAudio) { | 429 if (source.type() == blink::WebMediaStreamSource::TypeAudio) { |
430 scoped_refptr<WebRtcAudioCapturer> capturer; | 430 scoped_refptr<WebRtcAudioCapturer> capturer; |
431 if (GetWebRtcAudioDevice()) | 431 if (GetWebRtcAudioDevice()) |
432 capturer = GetWebRtcAudioDevice()->GetDefaultCapturer(); | 432 capturer = GetWebRtcAudioDevice()->GetDefaultCapturer(); |
433 | 433 |
434 scoped_refptr<webrtc::AudioTrackInterface> audio_track( | 434 scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
435 CreateLocalAudioTrack(track_id, | 435 CreateLocalAudioTrack(track_id, |
436 capturer, | 436 capturer, |
437 webaudio_source.get(), | 437 webaudio_source.get(), |
438 source_data->local_audio_source(), | 438 source_data->local_audio_source(), |
439 &track_constraints)); | 439 &track_constraints)); |
440 AddNativeTrackToBlinkTrack(audio_track.get(), track); | 440 AddNativeTrackToBlinkTrack(audio_track.get(), track); |
441 | 441 |
442 audio_track->set_enabled(track.isEnabled()); | 442 audio_track->set_enabled(track.isEnabled()); |
443 if (capturer.get()) { | 443 if (capturer.get()) { |
444 WebKit::WebMediaStreamTrack writable_track = track; | 444 blink::WebMediaStreamTrack writable_track = track; |
445 writable_track.setSourceProvider(capturer->audio_source_provider()); | 445 writable_track.setSourceProvider(capturer->audio_source_provider()); |
446 } | 446 } |
447 return native_stream->AddTrack(audio_track.get()); | 447 return native_stream->AddTrack(audio_track.get()); |
448 } else { | 448 } else { |
449 DCHECK(source.type() == WebKit::WebMediaStreamSource::TypeVideo); | 449 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo); |
450 scoped_refptr<webrtc::VideoTrackInterface> video_track( | 450 scoped_refptr<webrtc::VideoTrackInterface> video_track( |
451 CreateLocalVideoTrack(track_id, source_data->video_source())); | 451 CreateLocalVideoTrack(track_id, source_data->video_source())); |
452 AddNativeTrackToBlinkTrack(video_track.get(), track); | 452 AddNativeTrackToBlinkTrack(video_track.get(), track); |
453 video_track->set_enabled(track.isEnabled()); | 453 video_track->set_enabled(track.isEnabled()); |
454 return native_stream->AddTrack(video_track.get()); | 454 return native_stream->AddTrack(video_track.get()); |
455 } | 455 } |
456 } | 456 } |
457 | 457 |
458 bool MediaStreamDependencyFactory::AddNativeVideoMediaTrack( | 458 bool MediaStreamDependencyFactory::AddNativeVideoMediaTrack( |
459 const std::string& track_id, | 459 const std::string& track_id, |
460 WebKit::WebMediaStream* stream, | 460 blink::WebMediaStream* stream, |
461 cricket::VideoCapturer* capturer) { | 461 cricket::VideoCapturer* capturer) { |
462 if (!stream) { | 462 if (!stream) { |
463 LOG(ERROR) << "AddNativeVideoMediaTrack called with null WebMediaStream."; | 463 LOG(ERROR) << "AddNativeVideoMediaTrack called with null WebMediaStream."; |
464 return false; | 464 return false; |
465 } | 465 } |
466 | 466 |
467 // Create native track from the source. | 467 // Create native track from the source. |
468 scoped_refptr<webrtc::VideoTrackInterface> native_track = | 468 scoped_refptr<webrtc::VideoTrackInterface> native_track = |
469 CreateLocalVideoTrack(track_id, capturer); | 469 CreateLocalVideoTrack(track_id, capturer); |
470 | 470 |
471 // Add the native track to native stream | 471 // Add the native track to native stream |
472 MediaStreamExtraData* extra_data = | 472 MediaStreamExtraData* extra_data = |
473 static_cast<MediaStreamExtraData*>(stream->extraData()); | 473 static_cast<MediaStreamExtraData*>(stream->extraData()); |
474 DCHECK(extra_data); | 474 DCHECK(extra_data); |
475 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); | 475 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); |
476 native_stream->AddTrack(native_track.get()); | 476 native_stream->AddTrack(native_track.get()); |
477 | 477 |
478 // Create a new webkit video track. | 478 // Create a new webkit video track. |
479 WebKit::WebMediaStreamTrack webkit_track; | 479 blink::WebMediaStreamTrack webkit_track; |
480 WebKit::WebMediaStreamSource webkit_source; | 480 blink::WebMediaStreamSource webkit_source; |
481 WebKit::WebString webkit_track_id(UTF8ToUTF16(track_id)); | 481 blink::WebString webkit_track_id(UTF8ToUTF16(track_id)); |
482 WebKit::WebMediaStreamSource::Type type = | 482 blink::WebMediaStreamSource::Type type = |
483 WebKit::WebMediaStreamSource::TypeVideo; | 483 blink::WebMediaStreamSource::TypeVideo; |
484 webkit_source.initialize(webkit_track_id, type, webkit_track_id); | 484 webkit_source.initialize(webkit_track_id, type, webkit_track_id); |
485 | 485 |
486 webkit_track.initialize(webkit_track_id, webkit_source); | 486 webkit_track.initialize(webkit_track_id, webkit_source); |
487 AddNativeTrackToBlinkTrack(native_track.get(), webkit_track); | 487 AddNativeTrackToBlinkTrack(native_track.get(), webkit_track); |
488 | 488 |
489 // Add the track to WebMediaStream. | 489 // Add the track to WebMediaStream. |
490 stream->addTrack(webkit_track); | 490 stream->addTrack(webkit_track); |
491 return true; | 491 return true; |
492 } | 492 } |
493 | 493 |
494 bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack( | 494 bool MediaStreamDependencyFactory::RemoveNativeMediaStreamTrack( |
495 const WebKit::WebMediaStream& stream, | 495 const blink::WebMediaStream& stream, |
496 const WebKit::WebMediaStreamTrack& track) { | 496 const blink::WebMediaStreamTrack& track) { |
497 MediaStreamExtraData* extra_data = | 497 MediaStreamExtraData* extra_data = |
498 static_cast<MediaStreamExtraData*>(stream.extraData()); | 498 static_cast<MediaStreamExtraData*>(stream.extraData()); |
499 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); | 499 webrtc::MediaStreamInterface* native_stream = extra_data->stream().get(); |
500 DCHECK(native_stream); | 500 DCHECK(native_stream); |
501 | 501 |
502 WebKit::WebMediaStreamSource::Type type = track.source().type(); | 502 blink::WebMediaStreamSource::Type type = track.source().type(); |
503 DCHECK(type == WebKit::WebMediaStreamSource::TypeAudio || | 503 DCHECK(type == blink::WebMediaStreamSource::TypeAudio || |
504 type == WebKit::WebMediaStreamSource::TypeVideo); | 504 type == blink::WebMediaStreamSource::TypeVideo); |
505 | 505 |
506 WebKit::WebMediaStreamTrack writable_track = track; | 506 blink::WebMediaStreamTrack writable_track = track; |
507 writable_track.setExtraData(NULL); | 507 writable_track.setExtraData(NULL); |
508 | 508 |
509 std::string track_id = UTF16ToUTF8(track.id()); | 509 std::string track_id = UTF16ToUTF8(track.id()); |
510 if (type == WebKit::WebMediaStreamSource::TypeAudio) { | 510 if (type == blink::WebMediaStreamSource::TypeAudio) { |
511 // Remove the source provider as the track is going away. | 511 // Remove the source provider as the track is going away. |
512 writable_track.setSourceProvider(NULL); | 512 writable_track.setSourceProvider(NULL); |
513 return native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)); | 513 return native_stream->RemoveTrack(native_stream->FindAudioTrack(track_id)); |
514 } | 514 } |
515 | 515 |
516 CHECK_EQ(type, WebKit::WebMediaStreamSource::TypeVideo); | 516 CHECK_EQ(type, blink::WebMediaStreamSource::TypeVideo); |
517 return native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id)); | 517 return native_stream->RemoveTrack(native_stream->FindVideoTrack(track_id)); |
518 } | 518 } |
519 | 519 |
520 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { | 520 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { |
521 DCHECK(!pc_factory_.get()); | 521 DCHECK(!pc_factory_.get()); |
522 DCHECK(!audio_device_.get()); | 522 DCHECK(!audio_device_.get()); |
523 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; | 523 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; |
524 | 524 |
525 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; | 525 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; |
526 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; | 526 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
571 } | 571 } |
572 | 572 |
573 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { | 573 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { |
574 return pc_factory_.get() != NULL; | 574 return pc_factory_.get() != NULL; |
575 } | 575 } |
576 | 576 |
577 scoped_refptr<webrtc::PeerConnectionInterface> | 577 scoped_refptr<webrtc::PeerConnectionInterface> |
578 MediaStreamDependencyFactory::CreatePeerConnection( | 578 MediaStreamDependencyFactory::CreatePeerConnection( |
579 const webrtc::PeerConnectionInterface::IceServers& ice_servers, | 579 const webrtc::PeerConnectionInterface::IceServers& ice_servers, |
580 const webrtc::MediaConstraintsInterface* constraints, | 580 const webrtc::MediaConstraintsInterface* constraints, |
581 WebKit::WebFrame* web_frame, | 581 blink::WebFrame* web_frame, |
582 webrtc::PeerConnectionObserver* observer) { | 582 webrtc::PeerConnectionObserver* observer) { |
583 CHECK(web_frame); | 583 CHECK(web_frame); |
584 CHECK(observer); | 584 CHECK(observer); |
585 | 585 |
586 scoped_refptr<P2PPortAllocatorFactory> pa_factory = | 586 scoped_refptr<P2PPortAllocatorFactory> pa_factory = |
587 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( | 587 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( |
588 p2p_socket_dispatcher_.get(), | 588 p2p_socket_dispatcher_.get(), |
589 network_manager_, | 589 network_manager_, |
590 socket_factory_.get(), | 590 socket_factory_.get(), |
591 web_frame); | 591 web_frame); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
624 video_session_id, vc_manager_.get(), is_screencast); | 624 video_session_id, vc_manager_.get(), is_screencast); |
625 | 625 |
626 // The video source takes ownership of |capturer|. | 626 // The video source takes ownership of |capturer|. |
627 scoped_refptr<webrtc::VideoSourceInterface> source = | 627 scoped_refptr<webrtc::VideoSourceInterface> source = |
628 pc_factory_->CreateVideoSource(capturer, constraints).get(); | 628 pc_factory_->CreateVideoSource(capturer, constraints).get(); |
629 return source; | 629 return source; |
630 } | 630 } |
631 | 631 |
632 scoped_refptr<WebAudioCapturerSource> | 632 scoped_refptr<WebAudioCapturerSource> |
633 MediaStreamDependencyFactory::CreateWebAudioSource( | 633 MediaStreamDependencyFactory::CreateWebAudioSource( |
634 WebKit::WebMediaStreamSource* source, | 634 blink::WebMediaStreamSource* source, |
635 RTCMediaConstraints* constraints) { | 635 RTCMediaConstraints* constraints) { |
636 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; | 636 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; |
637 DCHECK(GetWebRtcAudioDevice()); | 637 DCHECK(GetWebRtcAudioDevice()); |
638 | 638 |
639 scoped_refptr<WebAudioCapturerSource> | 639 scoped_refptr<WebAudioCapturerSource> |
640 webaudio_capturer_source(new WebAudioCapturerSource()); | 640 webaudio_capturer_source(new WebAudioCapturerSource()); |
641 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); | 641 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); |
642 | 642 |
643 // Create a LocalAudioSource object which holds audio options. | 643 // Create a LocalAudioSource object which holds audio options. |
644 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | 644 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
(...skipping 209 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
854 | 854 |
855 // Add the capturer to the WebRtcAudioDeviceImpl if it is a new capturer. | 855 // Add the capturer to the WebRtcAudioDeviceImpl if it is a new capturer. |
856 if (is_new_capturer) | 856 if (is_new_capturer) |
857 GetWebRtcAudioDevice()->AddAudioCapturer(capturer); | 857 GetWebRtcAudioDevice()->AddAudioCapturer(capturer); |
858 | 858 |
859 return capturer; | 859 return capturer; |
860 } | 860 } |
861 | 861 |
862 void MediaStreamDependencyFactory::AddNativeTrackToBlinkTrack( | 862 void MediaStreamDependencyFactory::AddNativeTrackToBlinkTrack( |
863 webrtc::MediaStreamTrackInterface* native_track, | 863 webrtc::MediaStreamTrackInterface* native_track, |
864 const WebKit::WebMediaStreamTrack& webkit_track) { | 864 const blink::WebMediaStreamTrack& webkit_track) { |
865 DCHECK(!webkit_track.isNull() && !webkit_track.extraData()); | 865 DCHECK(!webkit_track.isNull() && !webkit_track.extraData()); |
866 WebKit::WebMediaStreamTrack track = webkit_track; | 866 blink::WebMediaStreamTrack track = webkit_track; |
867 track.setExtraData(new MediaStreamTrackExtraData(native_track)); | 867 track.setExtraData(new MediaStreamTrackExtraData(native_track)); |
868 } | 868 } |
869 | 869 |
870 webrtc::MediaStreamInterface* | 870 webrtc::MediaStreamInterface* |
871 MediaStreamDependencyFactory::GetNativeMediaStream( | 871 MediaStreamDependencyFactory::GetNativeMediaStream( |
872 const WebKit::WebMediaStream& stream) { | 872 const blink::WebMediaStream& stream) { |
873 if (stream.isNull()) | 873 if (stream.isNull()) |
874 return NULL; | 874 return NULL; |
875 MediaStreamExtraData* extra_data = | 875 MediaStreamExtraData* extra_data = |
876 static_cast<MediaStreamExtraData*>(stream.extraData()); | 876 static_cast<MediaStreamExtraData*>(stream.extraData()); |
877 return extra_data ? extra_data->stream().get() : NULL; | 877 return extra_data ? extra_data->stream().get() : NULL; |
878 } | 878 } |
879 | 879 |
880 webrtc::MediaStreamTrackInterface* | 880 webrtc::MediaStreamTrackInterface* |
881 MediaStreamDependencyFactory::GetNativeMediaStreamTrack( | 881 MediaStreamDependencyFactory::GetNativeMediaStreamTrack( |
882 const WebKit::WebMediaStreamTrack& track) { | 882 const blink::WebMediaStreamTrack& track) { |
883 if (track.isNull()) | 883 if (track.isNull()) |
884 return NULL; | 884 return NULL; |
885 MediaStreamTrackExtraData* extra_data = | 885 MediaStreamTrackExtraData* extra_data = |
886 static_cast<MediaStreamTrackExtraData*>(track.extraData()); | 886 static_cast<MediaStreamTrackExtraData*>(track.extraData()); |
887 return extra_data ? extra_data->track().get() : NULL; | 887 return extra_data ? extra_data->track().get() : NULL; |
888 } | 888 } |
889 | 889 |
890 } // namespace content | 890 } // namespace content |
OLD | NEW |