| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/media_stream_dependency_factory.h" | 5 #include "content/renderer/media/media_stream_dependency_factory.h" |
| 6 | 6 |
| 7 #include <vector> | 7 #include <vector> |
| 8 | 8 |
| 9 #include "base/synchronization/waitable_event.h" | 9 #include "base/synchronization/waitable_event.h" |
| 10 #include "base/utf_string_conversions.h" | 10 #include "base/utf_string_conversions.h" |
| (...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 183 return NULL; | 183 return NULL; |
| 184 | 184 |
| 185 return new RTCPeerConnectionHandler(client, this); | 185 return new RTCPeerConnectionHandler(client, this); |
| 186 } | 186 } |
| 187 | 187 |
| 188 void MediaStreamDependencyFactory::CreateNativeMediaSources( | 188 void MediaStreamDependencyFactory::CreateNativeMediaSources( |
| 189 const WebKit::WebMediaConstraints& audio_constraints, | 189 const WebKit::WebMediaConstraints& audio_constraints, |
| 190 const WebKit::WebMediaConstraints& video_constraints, | 190 const WebKit::WebMediaConstraints& video_constraints, |
| 191 WebKit::WebMediaStreamDescriptor* description, | 191 WebKit::WebMediaStreamDescriptor* description, |
| 192 const MediaSourcesCreatedCallback& sources_created) { | 192 const MediaSourcesCreatedCallback& sources_created) { |
| 193 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; |
| 193 if (!EnsurePeerConnectionFactory()) { | 194 if (!EnsurePeerConnectionFactory()) { |
| 194 sources_created.Run(description, false); | 195 sources_created.Run(description, false); |
| 195 return; | 196 return; |
| 196 } | 197 } |
| 197 | 198 |
| 198 // |source_observer| clean up itself when it has completed | 199 // |source_observer| clean up itself when it has completed |
| 199 // source_observer->StartObservering. | 200 // source_observer->StartObservering. |
| 200 SourceStateObserver* source_observer = | 201 SourceStateObserver* source_observer = |
| 201 new SourceStateObserver(description, sources_created); | 202 new SourceStateObserver(description, sources_created); |
| 202 | 203 |
| 203 // TODO(perkj): Implement local audio sources. | |
| 204 | |
| 205 // Create local video sources. | 204 // Create local video sources. |
| 206 RTCMediaConstraints native_video_constraints(video_constraints); | 205 RTCMediaConstraints native_video_constraints(video_constraints); |
| 207 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; | 206 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; |
| 208 description->videoSources(video_components); | 207 description->videoSources(video_components); |
| 209 for (size_t i = 0; i < video_components.size(); ++i) { | 208 for (size_t i = 0; i < video_components.size(); ++i) { |
| 210 const WebKit::WebMediaStreamSource& source = video_components[i].source(); | 209 const WebKit::WebMediaStreamSource& source = video_components[i].source(); |
| 211 MediaStreamSourceExtraData* source_data = | 210 MediaStreamSourceExtraData* source_data = |
| 212 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 211 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| 213 if (!source_data) { | 212 if (!source_data) { |
| 214 // TODO(perkj): Implement support for sources from remote MediaStreams. | 213 // TODO(perkj): Implement support for sources from remote MediaStreams. |
| 215 NOTIMPLEMENTED(); | 214 NOTIMPLEMENTED(); |
| 216 continue; | 215 continue; |
| 217 } | 216 } |
| 218 const bool is_screencast = (source_data->device_info().device.type == | 217 const bool is_screencast = (source_data->device_info().device.type == |
| 219 content::MEDIA_TAB_VIDEO_CAPTURE); | 218 content::MEDIA_TAB_VIDEO_CAPTURE); |
| 220 source_data->SetVideoSource( | 219 source_data->SetVideoSource( |
| 221 CreateVideoSource(source_data->device_info().session_id, | 220 CreateVideoSource(source_data->device_info().session_id, |
| 222 is_screencast, | 221 is_screencast, |
| 223 &native_video_constraints)); | 222 &native_video_constraints)); |
| 224 source_observer->AddSource(source_data->video_source()); | 223 source_observer->AddSource(source_data->video_source()); |
| 225 } | 224 } |
| 225 |
| 226 // Do additional source initialization if the audio source is a valid |
| 227 // microphone. |
| 228 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; |
| 229 description->audioSources(audio_components); |
| 230 for (size_t i = 0; i < audio_components.size(); ++i) { |
| 231 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); |
| 232 MediaStreamSourceExtraData* source_data = |
| 233 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| 234 if (!source_data) { |
| 235 // TODO(henrika): Implement support for sources from remote MediaStreams. |
| 236 NOTIMPLEMENTED(); |
| 237 continue; |
| 238 } |
| 239 |
| 240 const StreamDeviceInfo device_info = source_data->device_info(); |
| 241 if (device_info.device.type == content::MEDIA_DEVICE_AUDIO_CAPTURE) { |
| 242 if (!InitializeAudioSource(device_info)) { |
| 243 DLOG(WARNING) << "Unsupported audio source"; |
| 244 sources_created.Run(description, false); |
| 245 return; |
| 246 } |
| 247 } |
| 248 } |
| 249 |
| 226 source_observer->StartObservering(); | 250 source_observer->StartObservering(); |
| 227 } | 251 } |
| 228 | 252 |
| 229 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 253 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
| 230 WebKit::WebMediaStreamDescriptor* description) { | 254 WebKit::WebMediaStreamDescriptor* description) { |
| 255 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; |
| 231 DCHECK(PeerConnectionFactoryCreated()); | 256 DCHECK(PeerConnectionFactoryCreated()); |
| 232 | 257 |
| 233 std::string label = UTF16ToUTF8(description->label()); | 258 std::string label = UTF16ToUTF8(description->label()); |
| 234 scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream = | 259 scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream = |
| 235 CreateLocalMediaStream(label); | 260 CreateLocalMediaStream(label); |
| 236 | 261 |
| 237 // Add audio tracks. | 262 // Add audio tracks. |
| 238 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; | 263 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; |
| 239 description->audioSources(audio_components); | 264 description->audioSources(audio_components); |
| 240 for (size_t i = 0; i < audio_components.size(); ++i) { | 265 for (size_t i = 0; i < audio_components.size(); ++i) { |
| 241 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); | 266 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); |
| 242 MediaStreamSourceExtraData* source_data = | 267 MediaStreamSourceExtraData* source_data = |
| 243 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 268 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| 244 if (!source_data) { | 269 if (!source_data) { |
| 245 // TODO(perkj): Implement support for sources from remote MediaStreams. | 270 // TODO(perkj): Implement support for sources from remote MediaStreams. |
| 246 NOTIMPLEMENTED(); | 271 NOTIMPLEMENTED(); |
| 247 continue; | 272 continue; |
| 248 } | 273 } |
| 249 // TODO(perkj): Refactor the creation of audio tracks to use a proper | 274 // TODO(perkj): Refactor the creation of audio tracks to use a proper |
| 250 // interface for receiving audio input data. Currently NULL is passed since | 275 // interface for receiving audio input data. Currently NULL is passed since |
| 251 // the |audio_device| is the wrong class and is unused. | 276 // the |audio_device| is the wrong class and is unused. |
| 252 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( | 277 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( |
| 253 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); | 278 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); |
| 254 native_stream->AddTrack(audio_track); | 279 native_stream->AddTrack(audio_track); |
| 255 audio_track->set_enabled(audio_components[i].isEnabled()); | 280 audio_track->set_enabled(audio_components[i].isEnabled()); |
| 256 // TODO(xians): This set the source of all audio tracks to the same | |
| 257 // microphone. Implement support for setting the source per audio track | |
| 258 // instead. | |
| 259 SetAudioDeviceSessionId(source_data->device_info().session_id); | |
| 260 } | 281 } |
| 261 | 282 |
| 262 // Add video tracks. | 283 // Add video tracks. |
| 263 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; | 284 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; |
| 264 description->videoSources(video_components); | 285 description->videoSources(video_components); |
| 265 for (size_t i = 0; i < video_components.size(); ++i) { | 286 for (size_t i = 0; i < video_components.size(); ++i) { |
| 266 const WebKit::WebMediaStreamSource& source = video_components[i].source(); | 287 const WebKit::WebMediaStreamSource& source = video_components[i].source(); |
| 267 MediaStreamSourceExtraData* source_data = | 288 MediaStreamSourceExtraData* source_data = |
| 268 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 289 static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| 269 if (!source_data || !source_data->video_source()) { | 290 if (!source_data || !source_data->video_source()) { |
| (...skipping 18 matching lines...) Expand all Loading... |
| 288 WebKit::WebMediaStreamDescriptor* description, | 309 WebKit::WebMediaStreamDescriptor* description, |
| 289 const MediaStreamExtraData::StreamStopCallback& stream_stop) { | 310 const MediaStreamExtraData::StreamStopCallback& stream_stop) { |
| 290 CreateNativeLocalMediaStream(description); | 311 CreateNativeLocalMediaStream(description); |
| 291 | 312 |
| 292 MediaStreamExtraData* extra_data = | 313 MediaStreamExtraData* extra_data = |
| 293 static_cast<MediaStreamExtraData*>(description->extraData()); | 314 static_cast<MediaStreamExtraData*>(description->extraData()); |
| 294 extra_data->SetLocalStreamStopCallback(stream_stop); | 315 extra_data->SetLocalStreamStopCallback(stream_stop); |
| 295 } | 316 } |
| 296 | 317 |
| 297 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { | 318 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { |
| 319 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; |
| 298 if (!pc_factory_.get()) { | 320 if (!pc_factory_.get()) { |
| 299 DCHECK(!audio_device_); | 321 DCHECK(!audio_device_); |
| 300 audio_device_ = new WebRtcAudioDeviceImpl(); | 322 audio_device_ = new WebRtcAudioDeviceImpl(); |
| 301 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( | 323 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( |
| 302 webrtc::CreatePeerConnectionFactory(worker_thread_, | 324 webrtc::CreatePeerConnectionFactory(worker_thread_, |
| 303 signaling_thread_, | 325 signaling_thread_, |
| 304 audio_device_)); | 326 audio_device_)); |
| 305 if (factory.get()) | 327 if (factory.get()) |
| 306 pc_factory_ = factory; | 328 pc_factory_ = factory; |
| 307 else | 329 else |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 345 const webrtc::MediaConstraintsInterface* constraints) { | 367 const webrtc::MediaConstraintsInterface* constraints) { |
| 346 RtcVideoCapturer* capturer = new RtcVideoCapturer( | 368 RtcVideoCapturer* capturer = new RtcVideoCapturer( |
| 347 video_session_id, vc_manager_.get(), is_screencast); | 369 video_session_id, vc_manager_.get(), is_screencast); |
| 348 | 370 |
| 349 // The video source takes ownership of |capturer|. | 371 // The video source takes ownership of |capturer|. |
| 350 scoped_refptr<webrtc::VideoSourceInterface> source = | 372 scoped_refptr<webrtc::VideoSourceInterface> source = |
| 351 pc_factory_->CreateVideoSource(capturer, constraints).get(); | 373 pc_factory_->CreateVideoSource(capturer, constraints).get(); |
| 352 return source; | 374 return source; |
| 353 } | 375 } |
| 354 | 376 |
| 377 bool MediaStreamDependencyFactory::InitializeAudioSource( |
| 378 const StreamDeviceInfo& device_info) { |
| 379 DVLOG(1) << "MediaStreamDependencyFactory::InitializeAudioSource()"; |
| 380 const MediaStreamDevice device = device_info.device; |
| 381 |
| 382 // Initialize the source using audio parameters for the selected |
| 383 // capture device. |
| 384 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer(); |
| 385 // TODO(henrika): refactor \content\public\common\media_stream_request.h |
| 386 // to allow dependency of media::ChannelLayout and avoid static_cast. |
| 387 if (!capturer->Initialize( |
| 388 static_cast<media::ChannelLayout>(device.channel_layout), |
| 389 device.sample_rate)) |
| 390 return false; |
| 391 |
| 392 // Specify which capture device to use. The acquired session id is used |
| 393 // for identification. |
| 394 // TODO(henrika): the current design does not support a uniqe source |
| 395 // for each audio track. |
| 396 if (device_info.session_id <= 0) |
| 397 return false; |
| 398 |
| 399 capturer->SetDevice(device_info.session_id); |
| 400 return true; |
| 401 } |
| 402 |
| 355 scoped_refptr<webrtc::VideoTrackInterface> | 403 scoped_refptr<webrtc::VideoTrackInterface> |
| 356 MediaStreamDependencyFactory::CreateLocalVideoTrack( | 404 MediaStreamDependencyFactory::CreateLocalVideoTrack( |
| 357 const std::string& label, | 405 const std::string& label, |
| 358 webrtc::VideoSourceInterface* source) { | 406 webrtc::VideoSourceInterface* source) { |
| 359 return pc_factory_->CreateVideoTrack(label, source).get(); | 407 return pc_factory_->CreateVideoTrack(label, source).get(); |
| 360 } | 408 } |
| 361 | 409 |
| 362 scoped_refptr<webrtc::LocalAudioTrackInterface> | 410 scoped_refptr<webrtc::LocalAudioTrackInterface> |
| 363 MediaStreamDependencyFactory::CreateLocalAudioTrack( | 411 MediaStreamDependencyFactory::CreateLocalAudioTrack( |
| 364 const std::string& label, | 412 const std::string& label, |
| (...skipping 12 matching lines...) Expand all Loading... |
| 377 int sdp_mline_index, | 425 int sdp_mline_index, |
| 378 const std::string& sdp) { | 426 const std::string& sdp) { |
| 379 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); | 427 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); |
| 380 } | 428 } |
| 381 | 429 |
| 382 WebRtcAudioDeviceImpl* | 430 WebRtcAudioDeviceImpl* |
| 383 MediaStreamDependencyFactory::GetWebRtcAudioDevice() { | 431 MediaStreamDependencyFactory::GetWebRtcAudioDevice() { |
| 384 return audio_device_; | 432 return audio_device_; |
| 385 } | 433 } |
| 386 | 434 |
| 387 void MediaStreamDependencyFactory::SetAudioDeviceSessionId(int session_id) { | |
| 388 audio_device_->SetSessionId(session_id); | |
| 389 } | |
| 390 | |
| 391 void MediaStreamDependencyFactory::InitializeWorkerThread( | 435 void MediaStreamDependencyFactory::InitializeWorkerThread( |
| 392 talk_base::Thread** thread, | 436 talk_base::Thread** thread, |
| 393 base::WaitableEvent* event) { | 437 base::WaitableEvent* event) { |
| 394 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); | 438 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); |
| 395 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); | 439 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); |
| 396 *thread = jingle_glue::JingleThreadWrapper::current(); | 440 *thread = jingle_glue::JingleThreadWrapper::current(); |
| 397 event->Signal(); | 441 event->Signal(); |
| 398 } | 442 } |
| 399 | 443 |
| 400 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread( | 444 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread( |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 479 // processed before returning. We wait for the above task to finish before | 523 // processed before returning. We wait for the above task to finish before |
| 480 // letting the the function continue to avoid any potential race issues. | 524 // letting the the function continue to avoid any potential race issues. |
| 481 chrome_worker_thread_.Stop(); | 525 chrome_worker_thread_.Stop(); |
| 482 } else { | 526 } else { |
| 483 NOTREACHED() << "Worker thread not running."; | 527 NOTREACHED() << "Worker thread not running."; |
| 484 } | 528 } |
| 485 } | 529 } |
| 486 } | 530 } |
| 487 | 531 |
| 488 } // namespace content | 532 } // namespace content |
| OLD | NEW |