Chromium Code Reviews| Index: content/renderer/media/media_stream_dependency_factory.cc |
| diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc |
| index ab436ee4555ad42bc8b1079aa205e039e33df64e..c308248cecdaac4a284cb9fc8aebcf13e6821d42 100644 |
| --- a/content/renderer/media/media_stream_dependency_factory.cc |
| +++ b/content/renderer/media/media_stream_dependency_factory.cc |
| @@ -12,8 +12,9 @@ |
| #include "content/common/media/media_stream_messages.h" |
| #include "content/public/common/content_switches.h" |
| #include "content/renderer/media/media_stream_audio_processor_options.h" |
| -#include "content/renderer/media/media_stream_source_extra_data.h" |
| +#include "content/renderer/media/media_stream_audio_source.h" |
| #include "content/renderer/media/media_stream_track_extra_data.h" |
| +#include "content/renderer/media/media_stream_video_source.h" |
| #include "content/renderer/media/media_stream_video_track.h" |
| #include "content/renderer/media/peer_connection_identity_service.h" |
| #include "content/renderer/media/rtc_media_constraints.h" |
| @@ -119,82 +120,6 @@ class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { |
| blink::WebFrame* web_frame_; |
| }; |
| -// SourceStateObserver is a help class used for observing the startup state |
| -// transition of webrtc media sources such as a camera or microphone. |
| -// An instance of the object deletes itself after use. |
| -// Usage: |
| -// 1. Create an instance of the object with the blink::WebMediaStream |
| -// the observed sources belongs to a callback. |
| -// 2. Add the sources to the observer using AddSource. |
| -// 3. Call StartObserving() |
| -// 4. The callback will be triggered when all sources have transitioned from |
| -// webrtc::MediaSourceInterface::kInitializing. |
| -class SourceStateObserver : public webrtc::ObserverInterface, |
| - public base::NonThreadSafe { |
| - public: |
| - SourceStateObserver( |
| - blink::WebMediaStream* web_stream, |
| - const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) |
| - : web_stream_(web_stream), |
| - ready_callback_(callback), |
| - live_(true) { |
| - } |
| - |
| - void AddSource(webrtc::MediaSourceInterface* source) { |
| - DCHECK(CalledOnValidThread()); |
| - switch (source->state()) { |
| - case webrtc::MediaSourceInterface::kInitializing: |
| - sources_.push_back(source); |
| - source->RegisterObserver(this); |
| - break; |
| - case webrtc::MediaSourceInterface::kLive: |
| - // The source is already live so we don't need to wait for it. |
| - break; |
| - case webrtc::MediaSourceInterface::kEnded: |
| - // The source have already failed. |
| - live_ = false; |
| - break; |
| - default: |
| - NOTREACHED(); |
| - } |
| - } |
| - |
| - void StartObservering() { |
| - DCHECK(CalledOnValidThread()); |
| - CheckIfSourcesAreLive(); |
| - } |
| - |
| - virtual void OnChanged() OVERRIDE { |
| - DCHECK(CalledOnValidThread()); |
| - CheckIfSourcesAreLive(); |
| - } |
| - |
| - private: |
| - void CheckIfSourcesAreLive() { |
| - ObservedSources::iterator it = sources_.begin(); |
| - while (it != sources_.end()) { |
| - if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) { |
| - live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive; |
| - (*it)->UnregisterObserver(this); |
| - it = sources_.erase(it); |
| - } else { |
| - ++it; |
| - } |
| - } |
| - if (sources_.empty()) { |
| - ready_callback_.Run(web_stream_, live_); |
| - delete this; |
| - } |
| - } |
| - |
| - blink::WebMediaStream* web_stream_; |
| - MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; |
| - bool live_; |
| - typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > |
| - ObservedSources; |
| - ObservedSources sources_; |
| -}; |
| - |
| MediaStreamDependencyFactory::MediaStreamDependencyFactory( |
| P2PSocketDispatcher* p2p_socket_dispatcher) |
| : network_manager_(NULL), |
| @@ -219,134 +144,89 @@ MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( |
| // webKitRTCPeerConnection. |
| UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
| - if (!EnsurePeerConnectionFactory()) |
| - return NULL; |
| - |
| return new RTCPeerConnectionHandler(client, this); |
| } |
| -void MediaStreamDependencyFactory::CreateNativeMediaSources( |
| +bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource( |
| int render_view_id, |
| const blink::WebMediaConstraints& audio_constraints, |
| - const blink::WebMediaConstraints& video_constraints, |
| - blink::WebMediaStream* web_stream, |
| - const MediaSourcesCreatedCallback& sources_created) { |
| - DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; |
| - if (!EnsurePeerConnectionFactory()) { |
| - sources_created.Run(web_stream, false); |
| - return; |
| - } |
| - |
| - // |source_observer| clean up itself when it has completed |
| - // source_observer->StartObservering. |
| - SourceStateObserver* source_observer = |
| - new SourceStateObserver(web_stream, sources_created); |
| - |
| - // Create local video sources. |
| - RTCMediaConstraints native_video_constraints(video_constraints); |
| - blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
| - web_stream->videoTracks(video_tracks); |
| - for (size_t i = 0; i < video_tracks.size(); ++i) { |
| - const blink::WebMediaStreamSource& source = video_tracks[i].source(); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| - |
| - // Check if the source has already been created. This happens when the same |
| - // source is used in multiple MediaStreams as a result of calling |
| - // getUserMedia. |
| - if (source_data->video_source()) |
| - continue; |
| - |
| - const bool is_screencast = |
| - source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || |
| - source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
| - source_data->SetVideoSource( |
| - CreateLocalVideoSource(source_data->device_info().session_id, |
| - is_screencast, |
| - &native_video_constraints).get()); |
| - source_observer->AddSource(source_data->video_source()); |
| - } |
| + const blink::WebMediaStreamSource& audio_source) { |
| + DVLOG(1) << "InitializeMediaStreamAudioSources()"; |
| // Do additional source initialization if the audio source is a valid |
| // microphone or tab audio. |
| RTCMediaConstraints native_audio_constraints(audio_constraints); |
| ApplyFixedAudioConstraints(&native_audio_constraints); |
| - blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
| - web_stream->audioTracks(audio_tracks); |
| - for (size_t i = 0; i < audio_tracks.size(); ++i) { |
| - const blink::WebMediaStreamSource& source = audio_tracks[i].source(); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| - |
| - // Check if the source has already been created. This happens when the same |
| - // source is used in multiple MediaStreams as a result of calling |
| - // getUserMedia. |
| - if (source_data->local_audio_source()) |
| - continue; |
| - |
| - // TODO(xians): Create a new capturer for difference microphones when we |
| - // support multiple microphones. See issue crbug/262117 . |
| - StreamDeviceInfo device_info = source_data->device_info(); |
| - RTCMediaConstraints constraints = native_audio_constraints; |
| - |
| - // If any platform effects are available, check them against the |
| - // constraints. Disable effects to match false constraints, but if a |
| - // constraint is true, set the constraint to false to later disable the |
| - // software effect. |
| - int effects = device_info.device.input.effects; |
| - if (effects != media::AudioParameters::NO_EFFECTS) { |
| - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { |
| - bool value; |
| - if (!webrtc::FindConstraint(&constraints, |
| - kConstraintEffectMap[i].constraint, &value, NULL) || !value) { |
| - // If the constraint is false, or does not exist, disable the platform |
| - // effect. |
| - effects &= ~kConstraintEffectMap[i].effect; |
| - DVLOG(1) << "Disabling constraint: " |
| - << kConstraintEffectMap[i].constraint; |
| - } else if (effects & kConstraintEffectMap[i].effect) { |
| - // If the constraint is true, leave the platform effect enabled, and |
| - // set the constraint to false to later disable the software effect. |
| - constraints.AddMandatory(kConstraintEffectMap[i].constraint, |
| - webrtc::MediaConstraintsInterface::kValueFalse, true); |
| - DVLOG(1) << "Disabling platform effect: " |
| - << kConstraintEffectMap[i].constraint; |
| - } |
| + |
| + MediaStreamAudioSource* source_data = |
| + static_cast<MediaStreamAudioSource*>(audio_source.extraData()); |
| + |
| + // TODO(xians): Create a new capturer for difference microphones when we |
| + // support multiple microphones. See issue crbug/262117 . |
| + StreamDeviceInfo device_info = source_data->device_info(); |
| + RTCMediaConstraints constraints = native_audio_constraints; |
| + |
| + // If any platform effects are available, check them against the |
| + // constraints. Disable effects to match false constraints, but if a |
| + // constraint is true, set the constraint to false to later disable the |
| + // software effect. |
| + int effects = device_info.device.input.effects; |
| + if (effects != media::AudioParameters::NO_EFFECTS) { |
| + for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { |
| + bool value; |
| + if (!webrtc::FindConstraint(&constraints, |
| + kConstraintEffectMap[i].constraint, &value, |
| + NULL) || !value) { |
| + // If the constraint is false, or does not exist, disable the platform |
| + // effect. |
| + effects &= ~kConstraintEffectMap[i].effect; |
| + DVLOG(1) << "Disabling constraint: " |
| + << kConstraintEffectMap[i].constraint; |
| + } else if (effects & kConstraintEffectMap[i].effect) { |
| + // If the constraint is true, leave the platform effect enabled, and |
| + // set the constraint to false to later disable the software effect. |
| + constraints.AddMandatory(kConstraintEffectMap[i].constraint, |
| + webrtc::MediaConstraintsInterface::kValueFalse, |
| + true); |
| + DVLOG(1) << "Disabling platform effect: " |
| + << kConstraintEffectMap[i].constraint; |
| } |
| - device_info.device.input.effects = effects; |
| } |
| + device_info.device.input.effects = effects; |
| + } |
| + |
| + scoped_refptr<WebRtcAudioCapturer> capturer( |
| + CreateAudioCapturer(render_view_id, device_info, audio_constraints)); |
| + if (!capturer.get()) { |
| + DLOG(WARNING) << "Failed to create the capturer for device " |
| + << device_info.device.id; |
| + // TODO(xians): Don't we need to check if source_observer is observing |
| + // something? If not, then it looks like we have a leak here. |
| + // OTOH, if it _is_ observing something, then the callback might |
| + // be called multiple times which is likely also a bug. |
| + return false; |
| + } |
| + source_data->SetAudioCapturer(capturer); |
| - scoped_refptr<WebRtcAudioCapturer> capturer( |
| - CreateAudioCapturer(render_view_id, device_info, audio_constraints)); |
| - if (!capturer.get()) { |
| - DLOG(WARNING) << "Failed to create the capturer for device " |
| - << device_info.device.id; |
| - sources_created.Run(web_stream, false); |
| - // TODO(xians): Don't we need to check if source_observer is observing |
| - // something? If not, then it looks like we have a leak here. |
| - // OTOH, if it _is_ observing something, then the callback might |
| - // be called multiple times which is likely also a bug. |
| - return; |
| - } |
| - source_data->SetAudioCapturer(capturer); |
| + // Creates a LocalAudioSource object which holds audio options. |
| + // TODO(xians): The option should apply to the track instead of the source. |
| + source_data->SetLocalAudioSource( |
| + CreateLocalAudioSource(&constraints).get()); |
| - // Creates a LocalAudioSource object which holds audio options. |
| - // TODO(xians): The option should apply to the track instead of the source. |
| - source_data->SetLocalAudioSource( |
| - CreateLocalAudioSource(&constraints).get()); |
| - source_observer->AddSource(source_data->local_audio_source()); |
| - } |
| + return true; |
| +} |
| - source_observer->StartObservering(); |
| +cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer( |
| + const StreamDeviceInfo& info) { |
| + bool is_screeencast = |
| + info.device.type == MEDIA_TAB_VIDEO_CAPTURE || |
| + info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
| + return new RtcVideoCapturer(info.session_id, is_screeencast); |
| } |
| void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
| blink::WebMediaStream* web_stream) { |
| DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; |
| - if (!EnsurePeerConnectionFactory()) { |
| - DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; |
| - return; |
| - } |
| std::string label = base::UTF16ToUTF8(web_stream->id()); |
| scoped_refptr<webrtc::MediaStreamInterface> native_stream = |
| @@ -385,8 +265,8 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| const blink::WebMediaStreamTrack& track) { |
| blink::WebMediaStreamSource source = track.source(); |
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + MediaStreamAudioSource* source_data = |
| + static_cast<MediaStreamAudioSource*>(source.extraData()); |
| // In the future the constraints will belong to the track itself, but |
| // right now they're on the source, so we fetch them from there. |
| @@ -403,7 +283,7 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| // Create a specific capturer for each WebAudio consumer. |
| webaudio_source = CreateWebAudioSource(&source, track_constraints); |
| source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + static_cast<MediaStreamAudioSource*>(source.extraData()); |
| } else { |
| // TODO(perkj): Implement support for sources from |
| // remote MediaStreams. |
| @@ -432,10 +312,12 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| scoped_refptr<webrtc::VideoTrackInterface> |
| MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| const blink::WebMediaStreamTrack& track) { |
| + DCHECK(track.extraData() == NULL); |
| blink::WebMediaStreamSource source = track.source(); |
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + |
| + MediaStreamVideoSource* source_data = |
| + static_cast<MediaStreamVideoSource*>(source.extraData()); |
| if (!source_data) { |
| // TODO(perkj): Implement support for sources from |
| @@ -444,14 +326,16 @@ MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| return NULL; |
| } |
| - std::string track_id = base::UTF16ToUTF8(track.id()); |
| - scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| - CreateLocalVideoTrack(track_id, source_data->video_source())); |
| - AddNativeTrackToBlinkTrack(video_track.get(), track, true); |
| + // Create native track from the source. |
| + scoped_refptr<webrtc::VideoTrackInterface> webrtc_track = |
| + CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter()); |
| - video_track->set_enabled(track.isEnabled()); |
| + bool local_track = true; |
| + AddNativeTrackToBlinkTrack(webrtc_track, track, local_track); |
| - return video_track; |
| + webrtc_track->set_enabled(track.isEnabled()); |
| + |
| + return webrtc_track; |
| } |
| void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( |
| @@ -563,15 +447,70 @@ scoped_refptr<webrtc::VideoSourceInterface> |
| cricket::VideoCapturer* capturer, |
| const webrtc::MediaConstraintsInterface* constraints) { |
| scoped_refptr<webrtc::VideoSourceInterface> source = |
| - pc_factory_->CreateVideoSource(capturer, constraints).get(); |
| + pc_factory()->CreateVideoSource(capturer, constraints).get(); |
| return source; |
| } |
| +const scoped_refptr<webrtc::PeerConnectionFactoryInterface>& |
| +MediaStreamDependencyFactory::pc_factory() { |
| + if (!pc_factory_) |
| + CreatePeerConnectionFactory(); |
|
Ronghua Wu (Left Chromium)
2014/01/23 00:42:43
lazy initialization lg. but how many case do we us
perkj_chrome
2014/01/27 18:47:17
The MediaStreamDependencyFactory is created when t
|
| + CHECK(pc_factory_); |
| + return pc_factory_; |
| +} |
| + |
| bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { |
| DCHECK(!pc_factory_.get()); |
| DCHECK(!audio_device_.get()); |
| + DCHECK(!signaling_thread_); |
| + DCHECK(!worker_thread_); |
| + DCHECK(!network_manager_); |
| + DCHECK(!socket_factory_); |
| + |
| DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; |
| + jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); |
| + jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); |
| + signaling_thread_ = jingle_glue::JingleThreadWrapper::current(); |
| + CHECK(signaling_thread_); |
| + |
| + if (!chrome_worker_thread_.IsRunning()) { |
| + if (!chrome_worker_thread_.Start()) { |
| + LOG(ERROR) << "Could not start worker thread"; |
| + signaling_thread_ = NULL; |
| + return false; |
| + } |
| + } |
| + base::WaitableEvent start_worker_event(true, false); |
| + chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &MediaStreamDependencyFactory::InitializeWorkerThread, |
| + base::Unretained(this), |
| + &worker_thread_, |
| + &start_worker_event)); |
| + start_worker_event.Wait(); |
| + CHECK(worker_thread_); |
| + |
| + base::WaitableEvent create_network_manager_event(true, false); |
| + chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| + &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread, |
| + base::Unretained(this), |
| + &create_network_manager_event)); |
| + create_network_manager_event.Wait(); |
| + |
| + socket_factory_.reset( |
| + new IpcPacketSocketFactory(p2p_socket_dispatcher_.get())); |
| + |
| + // Init SSL, which will be needed by PeerConnection. |
| +#if defined(USE_OPENSSL) |
| + if (!talk_base::InitializeSSL()) { |
| + LOG(ERROR) << "Failed on InitializeSSL."; |
| + return false; |
| + } |
| +#else |
| + // TODO(ronghuawu): Replace this call with InitializeSSL. |
| + net::EnsureNSSSSLInit(); |
| +#endif |
| + |
| scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; |
| scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; |
| @@ -650,7 +589,7 @@ MediaStreamDependencyFactory::CreatePeerConnection( |
| new PeerConnectionIdentityService( |
| GURL(web_frame->document().url().spec()).GetOrigin()); |
| - return pc_factory_->CreatePeerConnection(ice_servers, |
| + return pc_factory()->CreatePeerConnection(ice_servers, |
| constraints, |
| pa_factory.get(), |
| identity_service, |
| @@ -660,28 +599,14 @@ MediaStreamDependencyFactory::CreatePeerConnection( |
| scoped_refptr<webrtc::MediaStreamInterface> |
| MediaStreamDependencyFactory::CreateLocalMediaStream( |
| const std::string& label) { |
| - return pc_factory_->CreateLocalMediaStream(label).get(); |
| + return pc_factory()->CreateLocalMediaStream(label).get(); |
| } |
| scoped_refptr<webrtc::AudioSourceInterface> |
| MediaStreamDependencyFactory::CreateLocalAudioSource( |
| const webrtc::MediaConstraintsInterface* constraints) { |
| scoped_refptr<webrtc::AudioSourceInterface> source = |
| - pc_factory_->CreateAudioSource(constraints).get(); |
| - return source; |
| -} |
| - |
| -scoped_refptr<webrtc::VideoSourceInterface> |
| -MediaStreamDependencyFactory::CreateLocalVideoSource( |
| - int video_session_id, |
| - bool is_screencast, |
| - const webrtc::MediaConstraintsInterface* constraints) { |
| - RtcVideoCapturer* capturer = new RtcVideoCapturer( |
| - video_session_id, is_screencast); |
| - |
| - // The video source takes ownership of |capturer|. |
| - scoped_refptr<webrtc::VideoSourceInterface> source = |
| - CreateVideoSource(capturer, constraints); |
| + pc_factory()->CreateAudioSource(constraints).get(); |
| return source; |
| } |
| @@ -694,7 +619,7 @@ MediaStreamDependencyFactory::CreateWebAudioSource( |
| scoped_refptr<WebAudioCapturerSource> |
| webaudio_capturer_source(new WebAudioCapturerSource()); |
| - MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); |
| + MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); |
| // Create a LocalAudioSource object which holds audio options. |
| // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
| @@ -711,7 +636,7 @@ scoped_refptr<webrtc::VideoTrackInterface> |
| MediaStreamDependencyFactory::CreateLocalVideoTrack( |
| const std::string& id, |
| webrtc::VideoSourceInterface* source) { |
| - return pc_factory_->CreateVideoTrack(id, source).get(); |
| + return pc_factory()->CreateVideoTrack(id, source).get(); |
| } |
| scoped_refptr<webrtc::VideoTrackInterface> |
| @@ -727,7 +652,7 @@ MediaStreamDependencyFactory::CreateLocalVideoTrack( |
| CreateVideoSource(capturer, NULL); |
| // Create native track from the source. |
| - return pc_factory_->CreateVideoTrack(id, source.get()).get(); |
| + return pc_factory()->CreateVideoTrack(id, source.get()).get(); |
| } |
| scoped_refptr<webrtc::AudioTrackInterface> |
| @@ -793,68 +718,6 @@ void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { |
| network_manager_ = NULL; |
| } |
| -bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() { |
| - DCHECK(CalledOnValidThread()); |
| - if (PeerConnectionFactoryCreated()) |
| - return true; |
| - |
| - if (!signaling_thread_) { |
| - jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); |
| - jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); |
| - signaling_thread_ = jingle_glue::JingleThreadWrapper::current(); |
| - CHECK(signaling_thread_); |
| - } |
| - |
| - if (!worker_thread_) { |
| - if (!chrome_worker_thread_.IsRunning()) { |
| - if (!chrome_worker_thread_.Start()) { |
| - LOG(ERROR) << "Could not start worker thread"; |
| - signaling_thread_ = NULL; |
| - return false; |
| - } |
| - } |
| - base::WaitableEvent event(true, false); |
| - chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| - &MediaStreamDependencyFactory::InitializeWorkerThread, |
| - base::Unretained(this), |
| - &worker_thread_, |
| - &event)); |
| - event.Wait(); |
| - DCHECK(worker_thread_); |
| - } |
| - |
| - if (!network_manager_) { |
| - base::WaitableEvent event(true, false); |
| - chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| - &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread, |
| - base::Unretained(this), |
| - &event)); |
| - event.Wait(); |
| - } |
| - |
| - if (!socket_factory_) { |
| - socket_factory_.reset( |
| - new IpcPacketSocketFactory(p2p_socket_dispatcher_.get())); |
| - } |
| - |
| - // Init SSL, which will be needed by PeerConnection. |
| -#if defined(USE_OPENSSL) |
| - if (!talk_base::InitializeSSL()) { |
| - LOG(ERROR) << "Failed on InitializeSSL."; |
| - return false; |
| - } |
| -#else |
| - // TODO(ronghuawu): Replace this call with InitializeSSL. |
| - net::EnsureNSSSSLInit(); |
| -#endif |
| - |
| - if (!CreatePeerConnectionFactory()) { |
| - LOG(ERROR) << "Could not create PeerConnection factory"; |
| - return false; |
| - } |
| - return true; |
| -} |
| - |
| void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { |
| pc_factory_ = NULL; |
| if (network_manager_) { |
| @@ -964,7 +827,7 @@ void MediaStreamDependencyFactory::StartAecDump( |
| base::ClosePlatformFile(aec_dump_file); |
| } else { |
| // |pc_factory_| takes ownership of |aec_dump_file_stream|. |
| - pc_factory_->StartAecDump(aec_dump_file_stream); |
| + pc_factory()->StartAecDump(aec_dump_file_stream); |
| } |
| } |