| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/media_stream_dependency_factory.h" | 5 #include "content/renderer/media/media_stream_dependency_factory.h" |
| 6 | 6 |
| 7 #include <vector> | 7 #include <vector> |
| 8 | 8 |
| 9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
| 10 #include "base/strings/utf_string_conversions.h" | 10 #include "base/strings/utf_string_conversions.h" |
| 11 #include "base/synchronization/waitable_event.h" | 11 #include "base/synchronization/waitable_event.h" |
| 12 #include "content/common/media/media_stream_messages.h" | 12 #include "content/common/media/media_stream_messages.h" |
| 13 #include "content/public/common/content_switches.h" | 13 #include "content/public/common/content_switches.h" |
| 14 #include "content/renderer/media/media_stream_source_extra_data.h" | 14 #include "content/renderer/media/media_stream_source.h" |
| 15 #include "content/renderer/media/media_stream_video_source.h" |
| 15 #include "content/renderer/media/media_stream_track_extra_data.h" | 16 #include "content/renderer/media/media_stream_track_extra_data.h" |
| 16 #include "content/renderer/media/media_stream_video_track.h" | 17 #include "content/renderer/media/media_stream_video_track.h" |
| 17 #include "content/renderer/media/peer_connection_identity_service.h" | 18 #include "content/renderer/media/peer_connection_identity_service.h" |
| 18 #include "content/renderer/media/rtc_media_constraints.h" | 19 #include "content/renderer/media/rtc_media_constraints.h" |
| 19 #include "content/renderer/media/rtc_peer_connection_handler.h" | 20 #include "content/renderer/media/rtc_peer_connection_handler.h" |
| 20 #include "content/renderer/media/rtc_video_capturer.h" | 21 #include "content/renderer/media/rtc_video_capturer.h" |
| 21 #include "content/renderer/media/rtc_video_decoder_factory.h" | 22 #include "content/renderer/media/rtc_video_decoder_factory.h" |
| 22 #include "content/renderer/media/rtc_video_encoder_factory.h" | 23 #include "content/renderer/media/rtc_video_encoder_factory.h" |
| 23 #include "content/renderer/media/webaudio_capturer_source.h" | 24 #include "content/renderer/media/webaudio_capturer_source.h" |
| 24 #include "content/renderer/media/webrtc_audio_device_impl.h" | 25 #include "content/renderer/media/webrtc_audio_device_impl.h" |
| (...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 156 private: | 157 private: |
| 157 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; | 158 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; |
| 158 // |network_manager_| and |socket_factory_| are a weak references, owned by | 159 // |network_manager_| and |socket_factory_| are a weak references, owned by |
| 159 // MediaStreamDependencyFactory. | 160 // MediaStreamDependencyFactory. |
| 160 talk_base::NetworkManager* network_manager_; | 161 talk_base::NetworkManager* network_manager_; |
| 161 talk_base::PacketSocketFactory* socket_factory_; | 162 talk_base::PacketSocketFactory* socket_factory_; |
| 162 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. | 163 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. |
| 163 blink::WebFrame* web_frame_; | 164 blink::WebFrame* web_frame_; |
| 164 }; | 165 }; |
| 165 | 166 |
| 166 // SourceStateObserver is a help class used for observing the startup state | |
| 167 // transition of webrtc media sources such as a camera or microphone. | |
| 168 // An instance of the object deletes itself after use. | |
| 169 // Usage: | |
| 170 // 1. Create an instance of the object with the blink::WebMediaStream | |
| 171 // the observed sources belongs to a callback. | |
| 172 // 2. Add the sources to the observer using AddSource. | |
| 173 // 3. Call StartObserving() | |
| 174 // 4. The callback will be triggered when all sources have transitioned from | |
| 175 // webrtc::MediaSourceInterface::kInitializing. | |
| 176 class SourceStateObserver : public webrtc::ObserverInterface, | |
| 177 public base::NonThreadSafe { | |
| 178 public: | |
| 179 SourceStateObserver( | |
| 180 blink::WebMediaStream* web_stream, | |
| 181 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) | |
| 182 : web_stream_(web_stream), | |
| 183 ready_callback_(callback), | |
| 184 live_(true) { | |
| 185 } | |
| 186 | |
| 187 void AddSource(webrtc::MediaSourceInterface* source) { | |
| 188 DCHECK(CalledOnValidThread()); | |
| 189 switch (source->state()) { | |
| 190 case webrtc::MediaSourceInterface::kInitializing: | |
| 191 sources_.push_back(source); | |
| 192 source->RegisterObserver(this); | |
| 193 break; | |
| 194 case webrtc::MediaSourceInterface::kLive: | |
| 195 // The source is already live so we don't need to wait for it. | |
| 196 break; | |
| 197 case webrtc::MediaSourceInterface::kEnded: | |
| 198 // The source have already failed. | |
| 199 live_ = false; | |
| 200 break; | |
| 201 default: | |
| 202 NOTREACHED(); | |
| 203 } | |
| 204 } | |
| 205 | |
| 206 void StartObservering() { | |
| 207 DCHECK(CalledOnValidThread()); | |
| 208 CheckIfSourcesAreLive(); | |
| 209 } | |
| 210 | |
| 211 virtual void OnChanged() OVERRIDE { | |
| 212 DCHECK(CalledOnValidThread()); | |
| 213 CheckIfSourcesAreLive(); | |
| 214 } | |
| 215 | |
| 216 private: | |
| 217 void CheckIfSourcesAreLive() { | |
| 218 ObservedSources::iterator it = sources_.begin(); | |
| 219 while (it != sources_.end()) { | |
| 220 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) { | |
| 221 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive; | |
| 222 (*it)->UnregisterObserver(this); | |
| 223 it = sources_.erase(it); | |
| 224 } else { | |
| 225 ++it; | |
| 226 } | |
| 227 } | |
| 228 if (sources_.empty()) { | |
| 229 ready_callback_.Run(web_stream_, live_); | |
| 230 delete this; | |
| 231 } | |
| 232 } | |
| 233 | |
| 234 blink::WebMediaStream* web_stream_; | |
| 235 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; | |
| 236 bool live_; | |
| 237 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > | |
| 238 ObservedSources; | |
| 239 ObservedSources sources_; | |
| 240 }; | |
| 241 | |
| 242 MediaStreamDependencyFactory::MediaStreamDependencyFactory( | 167 MediaStreamDependencyFactory::MediaStreamDependencyFactory( |
| 243 P2PSocketDispatcher* p2p_socket_dispatcher) | 168 P2PSocketDispatcher* p2p_socket_dispatcher) |
| 244 : network_manager_(NULL), | 169 : network_manager_(NULL), |
| 245 #if defined(GOOGLE_TV) | 170 #if defined(GOOGLE_TV) |
| 246 decoder_factory_tv_(NULL), | 171 decoder_factory_tv_(NULL), |
| 247 #endif | 172 #endif |
| 248 p2p_socket_dispatcher_(p2p_socket_dispatcher), | 173 p2p_socket_dispatcher_(p2p_socket_dispatcher), |
| 249 signaling_thread_(NULL), | 174 signaling_thread_(NULL), |
| 250 worker_thread_(NULL), | 175 worker_thread_(NULL), |
| 251 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), | 176 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), |
| (...skipping 13 matching lines...) Expand all Loading... |
| 265 // The histogram counts the number of calls to the JS API | 190 // The histogram counts the number of calls to the JS API |
| 266 // webKitRTCPeerConnection. | 191 // webKitRTCPeerConnection. |
| 267 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 192 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
| 268 | 193 |
| 269 if (!EnsurePeerConnectionFactory()) | 194 if (!EnsurePeerConnectionFactory()) |
| 270 return NULL; | 195 return NULL; |
| 271 | 196 |
| 272 return new RTCPeerConnectionHandler(client, this); | 197 return new RTCPeerConnectionHandler(client, this); |
| 273 } | 198 } |
| 274 | 199 |
| 275 void MediaStreamDependencyFactory::CreateNativeMediaSources( | 200 bool MediaStreamDependencyFactory::CreateNativeMediaStreamAudioSources( |
| 276 int render_view_id, | 201 int render_view_id, |
| 277 const blink::WebMediaConstraints& audio_constraints, | 202 const blink::WebMediaConstraints& audio_constraints, |
| 278 const blink::WebMediaConstraints& video_constraints, | 203 const blink::WebVector<blink::WebMediaStreamSource>& audio_sources) { |
| 279 blink::WebMediaStream* web_stream, | 204 DVLOG(1) << "CreateNativeMediaStreamAudioSources()"; |
| 280 const MediaSourcesCreatedCallback& sources_created) { | |
| 281 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; | |
| 282 if (!EnsurePeerConnectionFactory()) { | 205 if (!EnsurePeerConnectionFactory()) { |
| 283 sources_created.Run(web_stream, false); | 206 return false; |
| 284 return; | |
| 285 } | |
| 286 | |
| 287 // |source_observer| clean up itself when it has completed | |
| 288 // source_observer->StartObservering. | |
| 289 SourceStateObserver* source_observer = | |
| 290 new SourceStateObserver(web_stream, sources_created); | |
| 291 | |
| 292 // Create local video sources. | |
| 293 RTCMediaConstraints native_video_constraints(video_constraints); | |
| 294 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; | |
| 295 web_stream->videoTracks(video_tracks); | |
| 296 for (size_t i = 0; i < video_tracks.size(); ++i) { | |
| 297 const blink::WebMediaStreamSource& source = video_tracks[i].source(); | |
| 298 MediaStreamSourceExtraData* source_data = | |
| 299 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | |
| 300 | |
| 301 // Check if the source has already been created. This happens when the same | |
| 302 // source is used in multiple MediaStreams as a result of calling | |
| 303 // getUserMedia. | |
| 304 if (source_data->video_source()) | |
| 305 continue; | |
| 306 | |
| 307 const bool is_screencast = | |
| 308 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || | |
| 309 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; | |
| 310 source_data->SetVideoSource( | |
| 311 CreateLocalVideoSource(source_data->device_info().session_id, | |
| 312 is_screencast, | |
| 313 &native_video_constraints).get()); | |
| 314 source_observer->AddSource(source_data->video_source()); | |
| 315 } | 207 } |
| 316 | 208 |
| 317 // Do additional source initialization if the audio source is a valid | 209 // Do additional source initialization if the audio source is a valid |
| 318 // microphone or tab audio. | 210 // microphone or tab audio. |
| 319 RTCMediaConstraints native_audio_constraints(audio_constraints); | 211 RTCMediaConstraints native_audio_constraints(audio_constraints); |
| 320 ApplyFixedAudioConstraints(&native_audio_constraints); | 212 ApplyFixedAudioConstraints(&native_audio_constraints); |
| 321 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; | 213 for (size_t i = 0; i < audio_sources.size(); ++i) { |
| 322 web_stream->audioTracks(audio_tracks); | 214 const blink::WebMediaStreamSource& source = audio_sources[i]; |
| 323 for (size_t i = 0; i < audio_tracks.size(); ++i) { | 215 MediaStreamSource* source_data = |
| 324 const blink::WebMediaStreamSource& source = audio_tracks[i].source(); | 216 static_cast<MediaStreamSource*>(source.extraData()); |
| 325 MediaStreamSourceExtraData* source_data = | |
| 326 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | |
| 327 | 217 |
| 328 // Check if the source has already been created. This happens when the same | 218 // Check if the source has already been created. This happens when the same |
| 329 // source is used in multiple MediaStreams as a result of calling | 219 // source is used in multiple MediaStreams as a result of calling |
| 330 // getUserMedia. | 220 // getUserMedia. |
| 331 if (source_data->local_audio_source()) | 221 if (source_data->local_audio_source()) |
| 332 continue; | 222 continue; |
| 333 | 223 |
| 334 // TODO(xians): Create a new capturer for difference microphones when we | 224 // TODO(xians): Create a new capturer for difference microphones when we |
| 335 // support multiple microphones. See issue crbug/262117 . | 225 // support multiple microphones. See issue crbug/262117 . |
| 336 StreamDeviceInfo device_info = source_data->device_info(); | 226 StreamDeviceInfo device_info = source_data->device_info(); |
| (...skipping 24 matching lines...) Expand all Loading... |
| 361 } | 251 } |
| 362 } | 252 } |
| 363 device_info.device.input.effects = effects; | 253 device_info.device.input.effects = effects; |
| 364 } | 254 } |
| 365 | 255 |
| 366 scoped_refptr<WebRtcAudioCapturer> capturer( | 256 scoped_refptr<WebRtcAudioCapturer> capturer( |
| 367 MaybeCreateAudioCapturer(render_view_id, device_info)); | 257 MaybeCreateAudioCapturer(render_view_id, device_info)); |
| 368 if (!capturer.get()) { | 258 if (!capturer.get()) { |
| 369 DLOG(WARNING) << "Failed to create the capturer for device " | 259 DLOG(WARNING) << "Failed to create the capturer for device " |
| 370 << device_info.device.id; | 260 << device_info.device.id; |
| 371 sources_created.Run(web_stream, false); | |
| 372 // TODO(xians): Don't we need to check if source_observer is observing | 261 // TODO(xians): Don't we need to check if source_observer is observing |
| 373 // something? If not, then it looks like we have a leak here. | 262 // something? If not, then it looks like we have a leak here. |
| 374 // OTOH, if it _is_ observing something, then the callback might | 263 // OTOH, if it _is_ observing something, then the callback might |
| 375 // be called multiple times which is likely also a bug. | 264 // be called multiple times which is likely also a bug. |
| 376 return; | 265 return false; |
| 377 } | 266 } |
| 378 source_data->SetAudioCapturer(capturer); | 267 source_data->SetAudioCapturer(capturer); |
| 379 | 268 |
| 380 // Creates a LocalAudioSource object which holds audio options. | 269 // Creates a LocalAudioSource object which holds audio options. |
| 381 // TODO(xians): The option should apply to the track instead of the source. | 270 // TODO(xians): The option should apply to the track instead of the source. |
| 382 source_data->SetLocalAudioSource( | 271 source_data->SetLocalAudioSource( |
| 383 CreateLocalAudioSource(&constraints).get()); | 272 CreateLocalAudioSource(&constraints).get()); |
| 384 source_observer->AddSource(source_data->local_audio_source()); | |
| 385 } | 273 } |
| 274 return true; |
| 275 } |
| 386 | 276 |
| 387 source_observer->StartObservering(); | 277 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer( |
| 278 const StreamDeviceInfo& info) { |
| 279 bool is_screeencast = |
| 280 info.device.type == MEDIA_TAB_VIDEO_CAPTURE || |
| 281 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
| 282 return new RtcVideoCapturer(info.session_id, is_screeencast); |
| 283 } |
| 284 |
| 285 scoped_refptr<webrtc::VideoSourceInterface> |
| 286 MediaStreamDependencyFactory::CreateLocalVideoSource( |
| 287 cricket::VideoCapturer* capturer, |
| 288 const blink::WebMediaConstraints& video_constraints) { |
| 289 RTCMediaConstraints webrtc_constraints(video_constraints); |
| 290 // The video source takes ownership of |capturer|. |
| 291 scoped_refptr<webrtc::VideoSourceInterface> source = |
| 292 pc_factory_->CreateVideoSource(capturer, &webrtc_constraints).get(); |
| 293 return source; |
| 388 } | 294 } |
| 389 | 295 |
| 390 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 296 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
| 391 blink::WebMediaStream* web_stream) { | 297 blink::WebMediaStream* web_stream) { |
| 392 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; | 298 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; |
| 393 if (!EnsurePeerConnectionFactory()) { | 299 if (!EnsurePeerConnectionFactory()) { |
| 394 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; | 300 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; |
| 395 return; | 301 return; |
| 396 } | 302 } |
| 397 | 303 |
| (...skipping 27 matching lines...) Expand all Loading... |
| 425 MediaStreamExtraData* extra_data = | 331 MediaStreamExtraData* extra_data = |
| 426 static_cast<MediaStreamExtraData*>(web_stream->extraData()); | 332 static_cast<MediaStreamExtraData*>(web_stream->extraData()); |
| 427 extra_data->SetLocalStreamStopCallback(stream_stop); | 333 extra_data->SetLocalStreamStopCallback(stream_stop); |
| 428 } | 334 } |
| 429 | 335 |
| 430 scoped_refptr<webrtc::AudioTrackInterface> | 336 scoped_refptr<webrtc::AudioTrackInterface> |
| 431 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( | 337 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| 432 const blink::WebMediaStreamTrack& track) { | 338 const blink::WebMediaStreamTrack& track) { |
| 433 blink::WebMediaStreamSource source = track.source(); | 339 blink::WebMediaStreamSource source = track.source(); |
| 434 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); | 340 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
| 435 MediaStreamSourceExtraData* source_data = | 341 MediaStreamSource* source_data = |
| 436 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 342 static_cast<MediaStreamSource*>(source.extraData()); |
| 437 | 343 |
| 438 // In the future the constraints will belong to the track itself, but | 344 // In the future the constraints will belong to the track itself, but |
| 439 // right now they're on the source, so we fetch them from there. | 345 // right now they're on the source, so we fetch them from there. |
| 440 RTCMediaConstraints track_constraints(source.constraints()); | 346 RTCMediaConstraints track_constraints(source.constraints()); |
| 441 | 347 |
| 442 // Apply default audio constraints that enable echo cancellation, | 348 // Apply default audio constraints that enable echo cancellation, |
| 443 // automatic gain control, noise suppression and high-pass filter. | 349 // automatic gain control, noise suppression and high-pass filter. |
| 444 ApplyFixedAudioConstraints(&track_constraints); | 350 ApplyFixedAudioConstraints(&track_constraints); |
| 445 | 351 |
| 446 scoped_refptr<WebAudioCapturerSource> webaudio_source; | 352 scoped_refptr<WebAudioCapturerSource> webaudio_source; |
| 447 if (!source_data) { | 353 if (!source_data) { |
| 448 if (source.requiresAudioConsumer()) { | 354 if (source.requiresAudioConsumer()) { |
| 449 // We're adding a WebAudio MediaStream. | 355 // We're adding a WebAudio MediaStream. |
| 450 // Create a specific capturer for each WebAudio consumer. | 356 // Create a specific capturer for each WebAudio consumer. |
| 451 webaudio_source = CreateWebAudioSource(&source, &track_constraints); | 357 webaudio_source = CreateWebAudioSource(&source, &track_constraints); |
| 452 source_data = | 358 source_data = |
| 453 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 359 static_cast<MediaStreamSource*>(source.extraData()); |
| 454 } else { | 360 } else { |
| 455 // TODO(perkj): Implement support for sources from | 361 // TODO(perkj): Implement support for sources from |
| 456 // remote MediaStreams. | 362 // remote MediaStreams. |
| 457 NOTIMPLEMENTED(); | 363 NOTIMPLEMENTED(); |
| 458 return NULL; | 364 return NULL; |
| 459 } | 365 } |
| 460 } | 366 } |
| 461 | 367 |
| 462 std::string track_id = base::UTF16ToUTF8(track.id()); | 368 std::string track_id = base::UTF16ToUTF8(track.id()); |
| 463 scoped_refptr<WebRtcAudioCapturer> capturer; | 369 scoped_refptr<WebRtcAudioCapturer> capturer; |
| (...skipping 14 matching lines...) Expand all Loading... |
| 478 blink::WebMediaStreamTrack writable_track = track; | 384 blink::WebMediaStreamTrack writable_track = track; |
| 479 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( | 385 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( |
| 480 audio_track.get())->audio_source_provider()); | 386 audio_track.get())->audio_source_provider()); |
| 481 | 387 |
| 482 return audio_track; | 388 return audio_track; |
| 483 } | 389 } |
| 484 | 390 |
| 485 scoped_refptr<webrtc::VideoTrackInterface> | 391 scoped_refptr<webrtc::VideoTrackInterface> |
| 486 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( | 392 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| 487 const blink::WebMediaStreamTrack& track) { | 393 const blink::WebMediaStreamTrack& track) { |
| 394 DCHECK(track.extraData() == NULL); |
| 488 blink::WebMediaStreamSource source = track.source(); | 395 blink::WebMediaStreamSource source = track.source(); |
| 489 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); | 396 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); |
| 490 MediaStreamSourceExtraData* source_data = | 397 |
| 491 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 398 MediaStreamVideoSource* source_data = |
| 399 static_cast<MediaStreamVideoSource*>(source.extraData()); |
| 492 | 400 |
| 493 if (!source_data) { | 401 if (!source_data) { |
| 494 // TODO(perkj): Implement support for sources from | 402 // TODO(perkj): Implement support for sources from |
| 495 // remote MediaStreams. | 403 // remote MediaStreams. |
| 496 NOTIMPLEMENTED(); | 404 NOTIMPLEMENTED(); |
| 497 return NULL; | 405 return NULL; |
| 498 } | 406 } |
| 499 | 407 |
| 500 std::string track_id = base::UTF16ToUTF8(track.id()); | 408 // Create native track from the source. |
| 501 scoped_refptr<webrtc::VideoTrackInterface> video_track( | 409 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track = |
| 502 CreateLocalVideoTrack(track_id, source_data->video_source())); | 410 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter()); |
| 503 AddNativeTrackToBlinkTrack(video_track.get(), track, true); | |
| 504 | 411 |
| 505 video_track->set_enabled(track.isEnabled()); | 412 AddNativeTrackToBlinkTrack(webrtc_track, track, true); |
| 506 | 413 |
| 507 return video_track; | 414 webrtc_track->set_enabled(track.isEnabled()); |
| 415 |
| 416 return webrtc_track; |
| 508 } | 417 } |
| 509 | 418 |
| 510 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( | 419 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( |
| 511 const blink::WebMediaStreamTrack& track) { | 420 const blink::WebMediaStreamTrack& track) { |
| 512 DCHECK(!track.isNull() && !track.extraData()); | 421 DCHECK(!track.isNull() && !track.extraData()); |
| 513 DCHECK(!track.source().isNull()); | 422 DCHECK(!track.source().isNull()); |
| 514 | 423 |
| 515 switch (track.source().type()) { | 424 switch (track.source().type()) { |
| 516 case blink::WebMediaStreamSource::TypeAudio: | 425 case blink::WebMediaStreamSource::TypeAudio: |
| 517 CreateNativeAudioMediaStreamTrack(track); | 426 CreateNativeAudioMediaStreamTrack(track); |
| (...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 714 } | 623 } |
| 715 | 624 |
| 716 scoped_refptr<webrtc::AudioSourceInterface> | 625 scoped_refptr<webrtc::AudioSourceInterface> |
| 717 MediaStreamDependencyFactory::CreateLocalAudioSource( | 626 MediaStreamDependencyFactory::CreateLocalAudioSource( |
| 718 const webrtc::MediaConstraintsInterface* constraints) { | 627 const webrtc::MediaConstraintsInterface* constraints) { |
| 719 scoped_refptr<webrtc::AudioSourceInterface> source = | 628 scoped_refptr<webrtc::AudioSourceInterface> source = |
| 720 pc_factory_->CreateAudioSource(constraints).get(); | 629 pc_factory_->CreateAudioSource(constraints).get(); |
| 721 return source; | 630 return source; |
| 722 } | 631 } |
| 723 | 632 |
| 724 scoped_refptr<webrtc::VideoSourceInterface> | |
| 725 MediaStreamDependencyFactory::CreateLocalVideoSource( | |
| 726 int video_session_id, | |
| 727 bool is_screencast, | |
| 728 const webrtc::MediaConstraintsInterface* constraints) { | |
| 729 RtcVideoCapturer* capturer = new RtcVideoCapturer( | |
| 730 video_session_id, is_screencast); | |
| 731 | |
| 732 // The video source takes ownership of |capturer|. | |
| 733 scoped_refptr<webrtc::VideoSourceInterface> source = | |
| 734 pc_factory_->CreateVideoSource(capturer, constraints).get(); | |
| 735 return source; | |
| 736 } | |
| 737 | |
| 738 scoped_refptr<WebAudioCapturerSource> | 633 scoped_refptr<WebAudioCapturerSource> |
| 739 MediaStreamDependencyFactory::CreateWebAudioSource( | 634 MediaStreamDependencyFactory::CreateWebAudioSource( |
| 740 blink::WebMediaStreamSource* source, | 635 blink::WebMediaStreamSource* source, |
| 741 RTCMediaConstraints* constraints) { | 636 RTCMediaConstraints* constraints) { |
| 742 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; | 637 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; |
| 743 DCHECK(GetWebRtcAudioDevice()); | 638 DCHECK(GetWebRtcAudioDevice()); |
| 744 | 639 |
| 745 scoped_refptr<WebAudioCapturerSource> | 640 scoped_refptr<WebAudioCapturerSource> |
| 746 webaudio_capturer_source(new WebAudioCapturerSource()); | 641 webaudio_capturer_source(new WebAudioCapturerSource()); |
| 747 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); | 642 MediaStreamSource* source_data = new MediaStreamSource(); |
| 748 | 643 |
| 749 // Create a LocalAudioSource object which holds audio options. | 644 // Create a LocalAudioSource object which holds audio options. |
| 750 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | 645 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
| 751 source_data->SetLocalAudioSource(CreateLocalAudioSource(constraints).get()); | 646 source_data->SetLocalAudioSource(CreateLocalAudioSource(constraints).get()); |
| 752 source->setExtraData(source_data); | 647 source->setExtraData(source_data); |
| 753 | 648 |
| 754 // Replace the default source with WebAudio as source instead. | 649 // Replace the default source with WebAudio as source instead. |
| 755 source->addAudioConsumer(webaudio_capturer_source.get()); | 650 source->addAudioConsumer(webaudio_capturer_source.get()); |
| 756 | 651 |
| 757 return webaudio_capturer_source; | 652 return webaudio_capturer_source; |
| (...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1040 if (!aec_dump_file_stream) { | 935 if (!aec_dump_file_stream) { |
| 1041 VLOG(1) << "Could not open AEC dump file."; | 936 VLOG(1) << "Could not open AEC dump file."; |
| 1042 base::ClosePlatformFile(aec_dump_file); | 937 base::ClosePlatformFile(aec_dump_file); |
| 1043 } else { | 938 } else { |
| 1044 // |pc_factory_| takes ownership of |aec_dump_file_stream|. | 939 // |pc_factory_| takes ownership of |aec_dump_file_stream|. |
| 1045 pc_factory_->StartAecDump(aec_dump_file_stream); | 940 pc_factory_->StartAecDump(aec_dump_file_stream); |
| 1046 } | 941 } |
| 1047 } | 942 } |
| 1048 | 943 |
| 1049 } // namespace content | 944 } // namespace content |
| OLD | NEW |