Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/media_stream_dependency_factory.h" | 5 #include "content/renderer/media/media_stream_dependency_factory.h" |
| 6 | 6 |
| 7 #include <vector> | 7 #include <vector> |
| 8 | 8 |
| 9 #include "base/command_line.h" | 9 #include "base/command_line.h" |
| 10 #include "base/strings/utf_string_conversions.h" | 10 #include "base/strings/utf_string_conversions.h" |
| 11 #include "base/synchronization/waitable_event.h" | 11 #include "base/synchronization/waitable_event.h" |
| 12 #include "content/common/media/media_stream_messages.h" | 12 #include "content/common/media/media_stream_messages.h" |
| 13 #include "content/public/common/content_switches.h" | 13 #include "content/public/common/content_switches.h" |
| 14 #include "content/renderer/media/media_stream_audio_processor_options.h" | 14 #include "content/renderer/media/media_stream_audio_processor_options.h" |
| 15 #include "content/renderer/media/media_stream_source_extra_data.h" | 15 #include "content/renderer/media/media_stream_audio_source.h" |
| 16 #include "content/renderer/media/media_stream_track_extra_data.h" | 16 #include "content/renderer/media/media_stream_track_extra_data.h" |
| 17 #include "content/renderer/media/media_stream_video_source.h" | |
| 17 #include "content/renderer/media/media_stream_video_track.h" | 18 #include "content/renderer/media/media_stream_video_track.h" |
| 18 #include "content/renderer/media/peer_connection_identity_service.h" | 19 #include "content/renderer/media/peer_connection_identity_service.h" |
| 19 #include "content/renderer/media/rtc_media_constraints.h" | 20 #include "content/renderer/media/rtc_media_constraints.h" |
| 20 #include "content/renderer/media/rtc_peer_connection_handler.h" | 21 #include "content/renderer/media/rtc_peer_connection_handler.h" |
| 21 #include "content/renderer/media/rtc_video_capturer.h" | 22 #include "content/renderer/media/rtc_video_capturer.h" |
| 22 #include "content/renderer/media/rtc_video_decoder_factory.h" | 23 #include "content/renderer/media/rtc_video_decoder_factory.h" |
| 23 #include "content/renderer/media/rtc_video_encoder_factory.h" | 24 #include "content/renderer/media/rtc_video_encoder_factory.h" |
| 24 #include "content/renderer/media/webaudio_capturer_source.h" | 25 #include "content/renderer/media/webaudio_capturer_source.h" |
| 25 #include "content/renderer/media/webrtc_audio_device_impl.h" | 26 #include "content/renderer/media/webrtc_audio_device_impl.h" |
| 26 #include "content/renderer/media/webrtc_local_audio_track.h" | 27 #include "content/renderer/media/webrtc_local_audio_track.h" |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 112 private: | 113 private: |
| 113 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; | 114 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; |
| 114 // |network_manager_| and |socket_factory_| are a weak references, owned by | 115 // |network_manager_| and |socket_factory_| are a weak references, owned by |
| 115 // MediaStreamDependencyFactory. | 116 // MediaStreamDependencyFactory. |
| 116 talk_base::NetworkManager* network_manager_; | 117 talk_base::NetworkManager* network_manager_; |
| 117 talk_base::PacketSocketFactory* socket_factory_; | 118 talk_base::PacketSocketFactory* socket_factory_; |
| 118 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. | 119 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. |
| 119 blink::WebFrame* web_frame_; | 120 blink::WebFrame* web_frame_; |
| 120 }; | 121 }; |
| 121 | 122 |
| 122 // SourceStateObserver is a help class used for observing the startup state | |
| 123 // transition of webrtc media sources such as a camera or microphone. | |
| 124 // An instance of the object deletes itself after use. | |
| 125 // Usage: | |
| 126 // 1. Create an instance of the object with the blink::WebMediaStream | |
| 127 // the observed sources belongs to a callback. | |
| 128 // 2. Add the sources to the observer using AddSource. | |
| 129 // 3. Call StartObserving() | |
| 130 // 4. The callback will be triggered when all sources have transitioned from | |
| 131 // webrtc::MediaSourceInterface::kInitializing. | |
| 132 class SourceStateObserver : public webrtc::ObserverInterface, | |
| 133 public base::NonThreadSafe { | |
| 134 public: | |
| 135 SourceStateObserver( | |
| 136 blink::WebMediaStream* web_stream, | |
| 137 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) | |
| 138 : web_stream_(web_stream), | |
| 139 ready_callback_(callback), | |
| 140 live_(true) { | |
| 141 } | |
| 142 | |
| 143 void AddSource(webrtc::MediaSourceInterface* source) { | |
| 144 DCHECK(CalledOnValidThread()); | |
| 145 switch (source->state()) { | |
| 146 case webrtc::MediaSourceInterface::kInitializing: | |
| 147 sources_.push_back(source); | |
| 148 source->RegisterObserver(this); | |
| 149 break; | |
| 150 case webrtc::MediaSourceInterface::kLive: | |
| 151 // The source is already live so we don't need to wait for it. | |
| 152 break; | |
| 153 case webrtc::MediaSourceInterface::kEnded: | |
| 154 // The source have already failed. | |
| 155 live_ = false; | |
| 156 break; | |
| 157 default: | |
| 158 NOTREACHED(); | |
| 159 } | |
| 160 } | |
| 161 | |
| 162 void StartObservering() { | |
| 163 DCHECK(CalledOnValidThread()); | |
| 164 CheckIfSourcesAreLive(); | |
| 165 } | |
| 166 | |
| 167 virtual void OnChanged() OVERRIDE { | |
| 168 DCHECK(CalledOnValidThread()); | |
| 169 CheckIfSourcesAreLive(); | |
| 170 } | |
| 171 | |
| 172 private: | |
| 173 void CheckIfSourcesAreLive() { | |
| 174 ObservedSources::iterator it = sources_.begin(); | |
| 175 while (it != sources_.end()) { | |
| 176 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) { | |
| 177 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive; | |
| 178 (*it)->UnregisterObserver(this); | |
| 179 it = sources_.erase(it); | |
| 180 } else { | |
| 181 ++it; | |
| 182 } | |
| 183 } | |
| 184 if (sources_.empty()) { | |
| 185 ready_callback_.Run(web_stream_, live_); | |
| 186 delete this; | |
| 187 } | |
| 188 } | |
| 189 | |
| 190 blink::WebMediaStream* web_stream_; | |
| 191 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; | |
| 192 bool live_; | |
| 193 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > | |
| 194 ObservedSources; | |
| 195 ObservedSources sources_; | |
| 196 }; | |
| 197 | |
| 198 MediaStreamDependencyFactory::MediaStreamDependencyFactory( | 123 MediaStreamDependencyFactory::MediaStreamDependencyFactory( |
| 199 P2PSocketDispatcher* p2p_socket_dispatcher) | 124 P2PSocketDispatcher* p2p_socket_dispatcher) |
| 200 : network_manager_(NULL), | 125 : network_manager_(NULL), |
| 201 p2p_socket_dispatcher_(p2p_socket_dispatcher), | 126 p2p_socket_dispatcher_(p2p_socket_dispatcher), |
| 202 signaling_thread_(NULL), | 127 signaling_thread_(NULL), |
| 203 worker_thread_(NULL), | 128 worker_thread_(NULL), |
| 204 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), | 129 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), |
| 205 aec_dump_file_(base::kInvalidPlatformFileValue) { | 130 aec_dump_file_(base::kInvalidPlatformFileValue) { |
| 206 } | 131 } |
| 207 | 132 |
| 208 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { | 133 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { |
| 209 CleanupPeerConnectionFactory(); | 134 CleanupPeerConnectionFactory(); |
| 210 if (aec_dump_file_ != base::kInvalidPlatformFileValue) | 135 if (aec_dump_file_ != base::kInvalidPlatformFileValue) |
| 211 base::ClosePlatformFile(aec_dump_file_); | 136 base::ClosePlatformFile(aec_dump_file_); |
| 212 } | 137 } |
| 213 | 138 |
| 214 blink::WebRTCPeerConnectionHandler* | 139 blink::WebRTCPeerConnectionHandler* |
| 215 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( | 140 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( |
| 216 blink::WebRTCPeerConnectionHandlerClient* client) { | 141 blink::WebRTCPeerConnectionHandlerClient* client) { |
| 217 // Save histogram data so we can see how much PeerConnetion is used. | 142 // Save histogram data so we can see how much PeerConnetion is used. |
| 218 // The histogram counts the number of calls to the JS API | 143 // The histogram counts the number of calls to the JS API |
| 219 // webKitRTCPeerConnection. | 144 // webKitRTCPeerConnection. |
| 220 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); | 145 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); |
| 221 | 146 |
| 222 if (!EnsurePeerConnectionFactory()) | |
| 223 return NULL; | |
| 224 | |
| 225 return new RTCPeerConnectionHandler(client, this); | 147 return new RTCPeerConnectionHandler(client, this); |
| 226 } | 148 } |
| 227 | 149 |
| 228 void MediaStreamDependencyFactory::CreateNativeMediaSources( | 150 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource( |
| 229 int render_view_id, | 151 int render_view_id, |
| 230 const blink::WebMediaConstraints& audio_constraints, | 152 const blink::WebMediaConstraints& audio_constraints, |
| 231 const blink::WebMediaConstraints& video_constraints, | 153 const blink::WebMediaStreamSource& audio_source) { |
| 232 blink::WebMediaStream* web_stream, | 154 DVLOG(1) << "InitializeMediaStreamAudioSources()"; |
| 233 const MediaSourcesCreatedCallback& sources_created) { | |
| 234 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; | |
| 235 if (!EnsurePeerConnectionFactory()) { | |
| 236 sources_created.Run(web_stream, false); | |
| 237 return; | |
| 238 } | |
| 239 | |
| 240 // |source_observer| clean up itself when it has completed | |
| 241 // source_observer->StartObservering. | |
| 242 SourceStateObserver* source_observer = | |
| 243 new SourceStateObserver(web_stream, sources_created); | |
| 244 | |
| 245 // Create local video sources. | |
| 246 RTCMediaConstraints native_video_constraints(video_constraints); | |
| 247 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; | |
| 248 web_stream->videoTracks(video_tracks); | |
| 249 for (size_t i = 0; i < video_tracks.size(); ++i) { | |
| 250 const blink::WebMediaStreamSource& source = video_tracks[i].source(); | |
| 251 MediaStreamSourceExtraData* source_data = | |
| 252 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | |
| 253 | |
| 254 // Check if the source has already been created. This happens when the same | |
| 255 // source is used in multiple MediaStreams as a result of calling | |
| 256 // getUserMedia. | |
| 257 if (source_data->video_source()) | |
| 258 continue; | |
| 259 | |
| 260 const bool is_screencast = | |
| 261 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || | |
| 262 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; | |
| 263 source_data->SetVideoSource( | |
| 264 CreateLocalVideoSource(source_data->device_info().session_id, | |
| 265 is_screencast, | |
| 266 &native_video_constraints).get()); | |
| 267 source_observer->AddSource(source_data->video_source()); | |
| 268 } | |
| 269 | 155 |
| 270 // Do additional source initialization if the audio source is a valid | 156 // Do additional source initialization if the audio source is a valid |
| 271 // microphone or tab audio. | 157 // microphone or tab audio. |
| 272 RTCMediaConstraints native_audio_constraints(audio_constraints); | 158 RTCMediaConstraints native_audio_constraints(audio_constraints); |
| 273 ApplyFixedAudioConstraints(&native_audio_constraints); | 159 ApplyFixedAudioConstraints(&native_audio_constraints); |
| 274 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; | |
| 275 web_stream->audioTracks(audio_tracks); | |
| 276 for (size_t i = 0; i < audio_tracks.size(); ++i) { | |
| 277 const blink::WebMediaStreamSource& source = audio_tracks[i].source(); | |
| 278 MediaStreamSourceExtraData* source_data = | |
| 279 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | |
| 280 | 160 |
| 281 // Check if the source has already been created. This happens when the same | 161 MediaStreamAudioSource* source_data = |
| 282 // source is used in multiple MediaStreams as a result of calling | 162 static_cast<MediaStreamAudioSource*>(audio_source.extraData()); |
| 283 // getUserMedia. | |
| 284 if (source_data->local_audio_source()) | |
| 285 continue; | |
| 286 | 163 |
| 287 // TODO(xians): Create a new capturer for difference microphones when we | 164 // TODO(xians): Create a new capturer for difference microphones when we |
| 288 // support multiple microphones. See issue crbug/262117 . | 165 // support multiple microphones. See issue crbug/262117 . |
| 289 StreamDeviceInfo device_info = source_data->device_info(); | 166 StreamDeviceInfo device_info = source_data->device_info(); |
| 290 RTCMediaConstraints constraints = native_audio_constraints; | 167 RTCMediaConstraints constraints = native_audio_constraints; |
| 291 | 168 |
| 292 // If any platform effects are available, check them against the | 169 // If any platform effects are available, check them against the |
| 293 // constraints. Disable effects to match false constraints, but if a | 170 // constraints. Disable effects to match false constraints, but if a |
| 294 // constraint is true, set the constraint to false to later disable the | 171 // constraint is true, set the constraint to false to later disable the |
| 295 // software effect. | 172 // software effect. |
| 296 int effects = device_info.device.input.effects; | 173 int effects = device_info.device.input.effects; |
| 297 if (effects != media::AudioParameters::NO_EFFECTS) { | 174 if (effects != media::AudioParameters::NO_EFFECTS) { |
| 298 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { | 175 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { |
| 299 bool value; | 176 bool value; |
| 300 if (!webrtc::FindConstraint(&constraints, | 177 if (!webrtc::FindConstraint(&constraints, |
| 301 kConstraintEffectMap[i].constraint, &value, NULL) || !value) { | 178 kConstraintEffectMap[i].constraint, &value, |
| 302 // If the constraint is false, or does not exist, disable the platform | 179 NULL) || !value) { |
| 303 // effect. | 180 // If the constraint is false, or does not exist, disable the platform |
| 304 effects &= ~kConstraintEffectMap[i].effect; | 181 // effect. |
| 305 DVLOG(1) << "Disabling constraint: " | 182 effects &= ~kConstraintEffectMap[i].effect; |
| 306 << kConstraintEffectMap[i].constraint; | 183 DVLOG(1) << "Disabling constraint: " |
| 307 } else if (effects & kConstraintEffectMap[i].effect) { | 184 << kConstraintEffectMap[i].constraint; |
| 308 // If the constraint is true, leave the platform effect enabled, and | 185 } else if (effects & kConstraintEffectMap[i].effect) { |
| 309 // set the constraint to false to later disable the software effect. | 186 // If the constraint is true, leave the platform effect enabled, and |
| 310 constraints.AddMandatory(kConstraintEffectMap[i].constraint, | 187 // set the constraint to false to later disable the software effect. |
| 311 webrtc::MediaConstraintsInterface::kValueFalse, true); | 188 constraints.AddMandatory(kConstraintEffectMap[i].constraint, |
| 312 DVLOG(1) << "Disabling platform effect: " | 189 webrtc::MediaConstraintsInterface::kValueFalse, |
| 313 << kConstraintEffectMap[i].constraint; | 190 true); |
| 314 } | 191 DVLOG(1) << "Disabling platform effect: " |
| 192 << kConstraintEffectMap[i].constraint; | |
| 315 } | 193 } |
| 316 device_info.device.input.effects = effects; | |
| 317 } | 194 } |
| 318 | 195 device_info.device.input.effects = effects; |
| 319 scoped_refptr<WebRtcAudioCapturer> capturer( | |
| 320 CreateAudioCapturer(render_view_id, device_info, audio_constraints)); | |
| 321 if (!capturer.get()) { | |
| 322 DLOG(WARNING) << "Failed to create the capturer for device " | |
| 323 << device_info.device.id; | |
| 324 sources_created.Run(web_stream, false); | |
| 325 // TODO(xians): Don't we need to check if source_observer is observing | |
| 326 // something? If not, then it looks like we have a leak here. | |
| 327 // OTOH, if it _is_ observing something, then the callback might | |
| 328 // be called multiple times which is likely also a bug. | |
| 329 return; | |
| 330 } | |
| 331 source_data->SetAudioCapturer(capturer); | |
| 332 | |
| 333 // Creates a LocalAudioSource object which holds audio options. | |
| 334 // TODO(xians): The option should apply to the track instead of the source. | |
| 335 source_data->SetLocalAudioSource( | |
| 336 CreateLocalAudioSource(&constraints).get()); | |
| 337 source_observer->AddSource(source_data->local_audio_source()); | |
| 338 } | 196 } |
| 339 | 197 |
| 340 source_observer->StartObservering(); | 198 scoped_refptr<WebRtcAudioCapturer> capturer( |
| 199 CreateAudioCapturer(render_view_id, device_info, audio_constraints)); | |
| 200 if (!capturer.get()) { | |
| 201 DLOG(WARNING) << "Failed to create the capturer for device " | |
| 202 << device_info.device.id; | |
| 203 // TODO(xians): Don't we need to check if source_observer is observing | |
| 204 // something? If not, then it looks like we have a leak here. | |
| 205 // OTOH, if it _is_ observing something, then the callback might | |
| 206 // be called multiple times which is likely also a bug. | |
| 207 return false; | |
| 208 } | |
| 209 source_data->SetAudioCapturer(capturer); | |
| 210 | |
| 211 // Creates a LocalAudioSource object which holds audio options. | |
| 212 // TODO(xians): The option should apply to the track instead of the source. | |
| 213 source_data->SetLocalAudioSource( | |
| 214 CreateLocalAudioSource(&constraints).get()); | |
| 215 | |
| 216 return true; | |
| 217 } | |
| 218 | |
| 219 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer( | |
| 220 const StreamDeviceInfo& info) { | |
| 221 bool is_screeencast = | |
| 222 info.device.type == MEDIA_TAB_VIDEO_CAPTURE || | |
| 223 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; | |
| 224 return new RtcVideoCapturer(info.session_id, is_screeencast); | |
| 341 } | 225 } |
| 342 | 226 |
| 343 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( | 227 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
| 344 blink::WebMediaStream* web_stream) { | 228 blink::WebMediaStream* web_stream) { |
| 345 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; | 229 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; |
| 346 if (!EnsurePeerConnectionFactory()) { | |
| 347 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; | |
| 348 return; | |
| 349 } | |
| 350 | 230 |
| 351 std::string label = base::UTF16ToUTF8(web_stream->id()); | 231 std::string label = base::UTF16ToUTF8(web_stream->id()); |
| 352 scoped_refptr<webrtc::MediaStreamInterface> native_stream = | 232 scoped_refptr<webrtc::MediaStreamInterface> native_stream = |
| 353 CreateLocalMediaStream(label); | 233 CreateLocalMediaStream(label); |
| 354 MediaStreamExtraData* extra_data = | 234 MediaStreamExtraData* extra_data = |
| 355 new MediaStreamExtraData(native_stream.get(), true); | 235 new MediaStreamExtraData(native_stream.get(), true); |
| 356 web_stream->setExtraData(extra_data); | 236 web_stream->setExtraData(extra_data); |
| 357 | 237 |
| 358 // Add audio tracks. | 238 // Add audio tracks. |
| 359 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; | 239 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 378 MediaStreamExtraData* extra_data = | 258 MediaStreamExtraData* extra_data = |
| 379 static_cast<MediaStreamExtraData*>(web_stream->extraData()); | 259 static_cast<MediaStreamExtraData*>(web_stream->extraData()); |
| 380 extra_data->SetLocalStreamStopCallback(stream_stop); | 260 extra_data->SetLocalStreamStopCallback(stream_stop); |
| 381 } | 261 } |
| 382 | 262 |
| 383 scoped_refptr<webrtc::AudioTrackInterface> | 263 scoped_refptr<webrtc::AudioTrackInterface> |
| 384 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( | 264 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| 385 const blink::WebMediaStreamTrack& track) { | 265 const blink::WebMediaStreamTrack& track) { |
| 386 blink::WebMediaStreamSource source = track.source(); | 266 blink::WebMediaStreamSource source = track.source(); |
| 387 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); | 267 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
| 388 MediaStreamSourceExtraData* source_data = | 268 MediaStreamAudioSource* source_data = |
| 389 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 269 static_cast<MediaStreamAudioSource*>(source.extraData()); |
| 390 | 270 |
| 391 // In the future the constraints will belong to the track itself, but | 271 // In the future the constraints will belong to the track itself, but |
| 392 // right now they're on the source, so we fetch them from there. | 272 // right now they're on the source, so we fetch them from there. |
| 393 RTCMediaConstraints track_constraints(source.constraints()); | 273 RTCMediaConstraints track_constraints(source.constraints()); |
| 394 | 274 |
| 395 // Apply default audio constraints that enable echo cancellation, | 275 // Apply default audio constraints that enable echo cancellation, |
| 396 // automatic gain control, noise suppression and high-pass filter. | 276 // automatic gain control, noise suppression and high-pass filter. |
| 397 ApplyFixedAudioConstraints(&track_constraints); | 277 ApplyFixedAudioConstraints(&track_constraints); |
| 398 | 278 |
| 399 scoped_refptr<WebAudioCapturerSource> webaudio_source; | 279 scoped_refptr<WebAudioCapturerSource> webaudio_source; |
| 400 if (!source_data) { | 280 if (!source_data) { |
| 401 if (source.requiresAudioConsumer()) { | 281 if (source.requiresAudioConsumer()) { |
| 402 // We're adding a WebAudio MediaStream. | 282 // We're adding a WebAudio MediaStream. |
| 403 // Create a specific capturer for each WebAudio consumer. | 283 // Create a specific capturer for each WebAudio consumer. |
| 404 webaudio_source = CreateWebAudioSource(&source, track_constraints); | 284 webaudio_source = CreateWebAudioSource(&source, track_constraints); |
| 405 source_data = | 285 source_data = |
| 406 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 286 static_cast<MediaStreamAudioSource*>(source.extraData()); |
| 407 } else { | 287 } else { |
| 408 // TODO(perkj): Implement support for sources from | 288 // TODO(perkj): Implement support for sources from |
| 409 // remote MediaStreams. | 289 // remote MediaStreams. |
| 410 NOTIMPLEMENTED(); | 290 NOTIMPLEMENTED(); |
| 411 return NULL; | 291 return NULL; |
| 412 } | 292 } |
| 413 } | 293 } |
| 414 | 294 |
| 415 scoped_refptr<webrtc::AudioTrackInterface> audio_track( | 295 scoped_refptr<webrtc::AudioTrackInterface> audio_track( |
| 416 CreateLocalAudioTrack(track.id().utf8(), | 296 CreateLocalAudioTrack(track.id().utf8(), |
| 417 source_data->GetAudioCapturer(), | 297 source_data->GetAudioCapturer(), |
| 418 webaudio_source.get(), | 298 webaudio_source.get(), |
| 419 source_data->local_audio_source())); | 299 source_data->local_audio_source())); |
| 420 AddNativeTrackToBlinkTrack(audio_track.get(), track, true); | 300 AddNativeTrackToBlinkTrack(audio_track.get(), track, true); |
| 421 | 301 |
| 422 audio_track->set_enabled(track.isEnabled()); | 302 audio_track->set_enabled(track.isEnabled()); |
| 423 | 303 |
| 424 // Pass the pointer of the source provider to the blink audio track. | 304 // Pass the pointer of the source provider to the blink audio track. |
| 425 blink::WebMediaStreamTrack writable_track = track; | 305 blink::WebMediaStreamTrack writable_track = track; |
| 426 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( | 306 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( |
| 427 audio_track.get())->audio_source_provider()); | 307 audio_track.get())->audio_source_provider()); |
| 428 | 308 |
| 429 return audio_track; | 309 return audio_track; |
| 430 } | 310 } |
| 431 | 311 |
| 432 scoped_refptr<webrtc::VideoTrackInterface> | 312 scoped_refptr<webrtc::VideoTrackInterface> |
| 433 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( | 313 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| 434 const blink::WebMediaStreamTrack& track) { | 314 const blink::WebMediaStreamTrack& track) { |
| 315 DCHECK(track.extraData() == NULL); | |
| 435 blink::WebMediaStreamSource source = track.source(); | 316 blink::WebMediaStreamSource source = track.source(); |
| 436 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); | 317 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); |
| 437 MediaStreamSourceExtraData* source_data = | 318 |
| 438 static_cast<MediaStreamSourceExtraData*>(source.extraData()); | 319 MediaStreamVideoSource* source_data = |
| 320 static_cast<MediaStreamVideoSource*>(source.extraData()); | |
| 439 | 321 |
| 440 if (!source_data) { | 322 if (!source_data) { |
| 441 // TODO(perkj): Implement support for sources from | 323 // TODO(perkj): Implement support for sources from |
| 442 // remote MediaStreams. | 324 // remote MediaStreams. |
| 443 NOTIMPLEMENTED(); | 325 NOTIMPLEMENTED(); |
| 444 return NULL; | 326 return NULL; |
| 445 } | 327 } |
| 446 | 328 |
| 447 std::string track_id = base::UTF16ToUTF8(track.id()); | 329 // Create native track from the source. |
| 448 scoped_refptr<webrtc::VideoTrackInterface> video_track( | 330 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track = |
| 449 CreateLocalVideoTrack(track_id, source_data->video_source())); | 331 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter()); |
| 450 AddNativeTrackToBlinkTrack(video_track.get(), track, true); | |
| 451 | 332 |
| 452 video_track->set_enabled(track.isEnabled()); | 333 bool local_track = true; |
| 334 AddNativeTrackToBlinkTrack(webrtc_track, track, local_track); | |
| 453 | 335 |
| 454 return video_track; | 336 webrtc_track->set_enabled(track.isEnabled()); |
| 337 | |
| 338 return webrtc_track; | |
| 455 } | 339 } |
| 456 | 340 |
| 457 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( | 341 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( |
| 458 const blink::WebMediaStreamTrack& track) { | 342 const blink::WebMediaStreamTrack& track) { |
| 459 DCHECK(!track.isNull() && !track.extraData()); | 343 DCHECK(!track.isNull() && !track.extraData()); |
| 460 DCHECK(!track.source().isNull()); | 344 DCHECK(!track.source().isNull()); |
| 461 | 345 |
| 462 switch (track.source().type()) { | 346 switch (track.source().type()) { |
| 463 case blink::WebMediaStreamSource::TypeAudio: | 347 case blink::WebMediaStreamSource::TypeAudio: |
| 464 CreateNativeAudioMediaStreamTrack(track); | 348 CreateNativeAudioMediaStreamTrack(track); |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 556 native_stream->FindVideoTrack(track_id)); | 440 native_stream->FindVideoTrack(track_id)); |
| 557 } | 441 } |
| 558 return false; | 442 return false; |
| 559 } | 443 } |
| 560 | 444 |
| 561 scoped_refptr<webrtc::VideoSourceInterface> | 445 scoped_refptr<webrtc::VideoSourceInterface> |
| 562 MediaStreamDependencyFactory::CreateVideoSource( | 446 MediaStreamDependencyFactory::CreateVideoSource( |
| 563 cricket::VideoCapturer* capturer, | 447 cricket::VideoCapturer* capturer, |
| 564 const webrtc::MediaConstraintsInterface* constraints) { | 448 const webrtc::MediaConstraintsInterface* constraints) { |
| 565 scoped_refptr<webrtc::VideoSourceInterface> source = | 449 scoped_refptr<webrtc::VideoSourceInterface> source = |
| 566 pc_factory_->CreateVideoSource(capturer, constraints).get(); | 450 pc_factory()->CreateVideoSource(capturer, constraints).get(); |
| 567 return source; | 451 return source; |
| 568 } | 452 } |
| 569 | 453 |
| 454 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>& | |
| 455 MediaStreamDependencyFactory::pc_factory() { | |
| 456 if (!pc_factory_) | |
| 457 CreatePeerConnectionFactory(); | |
|
Ronghua Wu (Left Chromium)
2014/01/23 00:42:43
lazy initialization lg. but how many case do we us
perkj_chrome
2014/01/27 18:47:17
The MediaStreamDependencyFactory is created when t
| |
| 458 CHECK(pc_factory_); | |
| 459 return pc_factory_; | |
| 460 } | |
| 461 | |
| 570 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { | 462 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { |
| 571 DCHECK(!pc_factory_.get()); | 463 DCHECK(!pc_factory_.get()); |
| 572 DCHECK(!audio_device_.get()); | 464 DCHECK(!audio_device_.get()); |
| 465 DCHECK(!signaling_thread_); | |
| 466 DCHECK(!worker_thread_); | |
| 467 DCHECK(!network_manager_); | |
| 468 DCHECK(!socket_factory_); | |
| 469 | |
| 573 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; | 470 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; |
| 574 | 471 |
| 472 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); | |
| 473 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); | |
| 474 signaling_thread_ = jingle_glue::JingleThreadWrapper::current(); | |
| 475 CHECK(signaling_thread_); | |
| 476 | |
| 477 if (!chrome_worker_thread_.IsRunning()) { | |
| 478 if (!chrome_worker_thread_.Start()) { | |
| 479 LOG(ERROR) << "Could not start worker thread"; | |
| 480 signaling_thread_ = NULL; | |
| 481 return false; | |
| 482 } | |
| 483 } | |
| 484 base::WaitableEvent start_worker_event(true, false); | |
| 485 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
| 486 &MediaStreamDependencyFactory::InitializeWorkerThread, | |
| 487 base::Unretained(this), | |
| 488 &worker_thread_, | |
| 489 &start_worker_event)); | |
| 490 start_worker_event.Wait(); | |
| 491 CHECK(worker_thread_); | |
| 492 | |
| 493 base::WaitableEvent create_network_manager_event(true, false); | |
| 494 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
| 495 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread, | |
| 496 base::Unretained(this), | |
| 497 &create_network_manager_event)); | |
| 498 create_network_manager_event.Wait(); | |
| 499 | |
| 500 socket_factory_.reset( | |
| 501 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get())); | |
| 502 | |
| 503 // Init SSL, which will be needed by PeerConnection. | |
| 504 #if defined(USE_OPENSSL) | |
| 505 if (!talk_base::InitializeSSL()) { | |
| 506 LOG(ERROR) << "Failed on InitializeSSL."; | |
| 507 return false; | |
| 508 } | |
| 509 #else | |
| 510 // TODO(ronghuawu): Replace this call with InitializeSSL. | |
| 511 net::EnsureNSSSSLInit(); | |
| 512 #endif | |
| 513 | |
| 575 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; | 514 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; |
| 576 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; | 515 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; |
| 577 | 516 |
| 578 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | 517 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
| 579 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories = | 518 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories = |
| 580 RenderThreadImpl::current()->GetGpuFactories(); | 519 RenderThreadImpl::current()->GetGpuFactories(); |
| 581 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) { | 520 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) { |
| 582 if (gpu_factories) | 521 if (gpu_factories) |
| 583 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories)); | 522 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories)); |
| 584 } | 523 } |
| (...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 643 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( | 582 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( |
| 644 p2p_socket_dispatcher_.get(), | 583 p2p_socket_dispatcher_.get(), |
| 645 network_manager_, | 584 network_manager_, |
| 646 socket_factory_.get(), | 585 socket_factory_.get(), |
| 647 web_frame); | 586 web_frame); |
| 648 | 587 |
| 649 PeerConnectionIdentityService* identity_service = | 588 PeerConnectionIdentityService* identity_service = |
| 650 new PeerConnectionIdentityService( | 589 new PeerConnectionIdentityService( |
| 651 GURL(web_frame->document().url().spec()).GetOrigin()); | 590 GURL(web_frame->document().url().spec()).GetOrigin()); |
| 652 | 591 |
| 653 return pc_factory_->CreatePeerConnection(ice_servers, | 592 return pc_factory()->CreatePeerConnection(ice_servers, |
| 654 constraints, | 593 constraints, |
| 655 pa_factory.get(), | 594 pa_factory.get(), |
| 656 identity_service, | 595 identity_service, |
| 657 observer).get(); | 596 observer).get(); |
| 658 } | 597 } |
| 659 | 598 |
| 660 scoped_refptr<webrtc::MediaStreamInterface> | 599 scoped_refptr<webrtc::MediaStreamInterface> |
| 661 MediaStreamDependencyFactory::CreateLocalMediaStream( | 600 MediaStreamDependencyFactory::CreateLocalMediaStream( |
| 662 const std::string& label) { | 601 const std::string& label) { |
| 663 return pc_factory_->CreateLocalMediaStream(label).get(); | 602 return pc_factory()->CreateLocalMediaStream(label).get(); |
| 664 } | 603 } |
| 665 | 604 |
| 666 scoped_refptr<webrtc::AudioSourceInterface> | 605 scoped_refptr<webrtc::AudioSourceInterface> |
| 667 MediaStreamDependencyFactory::CreateLocalAudioSource( | 606 MediaStreamDependencyFactory::CreateLocalAudioSource( |
| 668 const webrtc::MediaConstraintsInterface* constraints) { | 607 const webrtc::MediaConstraintsInterface* constraints) { |
| 669 scoped_refptr<webrtc::AudioSourceInterface> source = | 608 scoped_refptr<webrtc::AudioSourceInterface> source = |
| 670 pc_factory_->CreateAudioSource(constraints).get(); | 609 pc_factory()->CreateAudioSource(constraints).get(); |
| 671 return source; | 610 return source; |
| 672 } | 611 } |
| 673 | 612 |
| 674 scoped_refptr<webrtc::VideoSourceInterface> | |
| 675 MediaStreamDependencyFactory::CreateLocalVideoSource( | |
| 676 int video_session_id, | |
| 677 bool is_screencast, | |
| 678 const webrtc::MediaConstraintsInterface* constraints) { | |
| 679 RtcVideoCapturer* capturer = new RtcVideoCapturer( | |
| 680 video_session_id, is_screencast); | |
| 681 | |
| 682 // The video source takes ownership of |capturer|. | |
| 683 scoped_refptr<webrtc::VideoSourceInterface> source = | |
| 684 CreateVideoSource(capturer, constraints); | |
| 685 return source; | |
| 686 } | |
| 687 | |
| 688 scoped_refptr<WebAudioCapturerSource> | 613 scoped_refptr<WebAudioCapturerSource> |
| 689 MediaStreamDependencyFactory::CreateWebAudioSource( | 614 MediaStreamDependencyFactory::CreateWebAudioSource( |
| 690 blink::WebMediaStreamSource* source, | 615 blink::WebMediaStreamSource* source, |
| 691 const RTCMediaConstraints& constraints) { | 616 const RTCMediaConstraints& constraints) { |
| 692 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; | 617 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; |
| 693 DCHECK(GetWebRtcAudioDevice()); | 618 DCHECK(GetWebRtcAudioDevice()); |
| 694 | 619 |
| 695 scoped_refptr<WebAudioCapturerSource> | 620 scoped_refptr<WebAudioCapturerSource> |
| 696 webaudio_capturer_source(new WebAudioCapturerSource()); | 621 webaudio_capturer_source(new WebAudioCapturerSource()); |
| 697 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); | 622 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); |
| 698 | 623 |
| 699 // Create a LocalAudioSource object which holds audio options. | 624 // Create a LocalAudioSource object which holds audio options. |
| 700 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | 625 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
| 701 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); | 626 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); |
| 702 source->setExtraData(source_data); | 627 source->setExtraData(source_data); |
| 703 | 628 |
| 704 // Replace the default source with WebAudio as source instead. | 629 // Replace the default source with WebAudio as source instead. |
| 705 source->addAudioConsumer(webaudio_capturer_source.get()); | 630 source->addAudioConsumer(webaudio_capturer_source.get()); |
| 706 | 631 |
| 707 return webaudio_capturer_source; | 632 return webaudio_capturer_source; |
| 708 } | 633 } |
| 709 | 634 |
| 710 scoped_refptr<webrtc::VideoTrackInterface> | 635 scoped_refptr<webrtc::VideoTrackInterface> |
| 711 MediaStreamDependencyFactory::CreateLocalVideoTrack( | 636 MediaStreamDependencyFactory::CreateLocalVideoTrack( |
| 712 const std::string& id, | 637 const std::string& id, |
| 713 webrtc::VideoSourceInterface* source) { | 638 webrtc::VideoSourceInterface* source) { |
| 714 return pc_factory_->CreateVideoTrack(id, source).get(); | 639 return pc_factory()->CreateVideoTrack(id, source).get(); |
| 715 } | 640 } |
| 716 | 641 |
| 717 scoped_refptr<webrtc::VideoTrackInterface> | 642 scoped_refptr<webrtc::VideoTrackInterface> |
| 718 MediaStreamDependencyFactory::CreateLocalVideoTrack( | 643 MediaStreamDependencyFactory::CreateLocalVideoTrack( |
| 719 const std::string& id, cricket::VideoCapturer* capturer) { | 644 const std::string& id, cricket::VideoCapturer* capturer) { |
| 720 if (!capturer) { | 645 if (!capturer) { |
| 721 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer."; | 646 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer."; |
| 722 return NULL; | 647 return NULL; |
| 723 } | 648 } |
| 724 | 649 |
| 725 // Create video source from the |capturer|. | 650 // Create video source from the |capturer|. |
| 726 scoped_refptr<webrtc::VideoSourceInterface> source = | 651 scoped_refptr<webrtc::VideoSourceInterface> source = |
| 727 CreateVideoSource(capturer, NULL); | 652 CreateVideoSource(capturer, NULL); |
| 728 | 653 |
| 729 // Create native track from the source. | 654 // Create native track from the source. |
| 730 return pc_factory_->CreateVideoTrack(id, source.get()).get(); | 655 return pc_factory()->CreateVideoTrack(id, source.get()).get(); |
| 731 } | 656 } |
| 732 | 657 |
| 733 scoped_refptr<webrtc::AudioTrackInterface> | 658 scoped_refptr<webrtc::AudioTrackInterface> |
| 734 MediaStreamDependencyFactory::CreateLocalAudioTrack( | 659 MediaStreamDependencyFactory::CreateLocalAudioTrack( |
| 735 const std::string& id, | 660 const std::string& id, |
| 736 const scoped_refptr<WebRtcAudioCapturer>& capturer, | 661 const scoped_refptr<WebRtcAudioCapturer>& capturer, |
| 737 WebAudioCapturerSource* webaudio_source, | 662 WebAudioCapturerSource* webaudio_source, |
| 738 webrtc::AudioSourceInterface* source) { | 663 webrtc::AudioSourceInterface* source) { |
| 739 // TODO(xians): Merge |source| to the capturer(). We can't do this today | 664 // TODO(xians): Merge |source| to the capturer(). We can't do this today |
| 740 // because only one capturer() is supported while one |source| is created | 665 // because only one capturer() is supported while one |source| is created |
| (...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 786 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get()); | 711 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get()); |
| 787 event->Signal(); | 712 event->Signal(); |
| 788 } | 713 } |
| 789 | 714 |
| 790 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { | 715 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { |
| 791 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); | 716 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); |
| 792 delete network_manager_; | 717 delete network_manager_; |
| 793 network_manager_ = NULL; | 718 network_manager_ = NULL; |
| 794 } | 719 } |
| 795 | 720 |
| 796 bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() { | |
| 797 DCHECK(CalledOnValidThread()); | |
| 798 if (PeerConnectionFactoryCreated()) | |
| 799 return true; | |
| 800 | |
| 801 if (!signaling_thread_) { | |
| 802 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); | |
| 803 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); | |
| 804 signaling_thread_ = jingle_glue::JingleThreadWrapper::current(); | |
| 805 CHECK(signaling_thread_); | |
| 806 } | |
| 807 | |
| 808 if (!worker_thread_) { | |
| 809 if (!chrome_worker_thread_.IsRunning()) { | |
| 810 if (!chrome_worker_thread_.Start()) { | |
| 811 LOG(ERROR) << "Could not start worker thread"; | |
| 812 signaling_thread_ = NULL; | |
| 813 return false; | |
| 814 } | |
| 815 } | |
| 816 base::WaitableEvent event(true, false); | |
| 817 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
| 818 &MediaStreamDependencyFactory::InitializeWorkerThread, | |
| 819 base::Unretained(this), | |
| 820 &worker_thread_, | |
| 821 &event)); | |
| 822 event.Wait(); | |
| 823 DCHECK(worker_thread_); | |
| 824 } | |
| 825 | |
| 826 if (!network_manager_) { | |
| 827 base::WaitableEvent event(true, false); | |
| 828 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | |
| 829 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread, | |
| 830 base::Unretained(this), | |
| 831 &event)); | |
| 832 event.Wait(); | |
| 833 } | |
| 834 | |
| 835 if (!socket_factory_) { | |
| 836 socket_factory_.reset( | |
| 837 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get())); | |
| 838 } | |
| 839 | |
| 840 // Init SSL, which will be needed by PeerConnection. | |
| 841 #if defined(USE_OPENSSL) | |
| 842 if (!talk_base::InitializeSSL()) { | |
| 843 LOG(ERROR) << "Failed on InitializeSSL."; | |
| 844 return false; | |
| 845 } | |
| 846 #else | |
| 847 // TODO(ronghuawu): Replace this call with InitializeSSL. | |
| 848 net::EnsureNSSSSLInit(); | |
| 849 #endif | |
| 850 | |
| 851 if (!CreatePeerConnectionFactory()) { | |
| 852 LOG(ERROR) << "Could not create PeerConnection factory"; | |
| 853 return false; | |
| 854 } | |
| 855 return true; | |
| 856 } | |
| 857 | |
| 858 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { | 721 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { |
| 859 pc_factory_ = NULL; | 722 pc_factory_ = NULL; |
| 860 if (network_manager_) { | 723 if (network_manager_) { |
| 861 // The network manager needs to free its resources on the thread they were | 724 // The network manager needs to free its resources on the thread they were |
| 862 // created, which is the worked thread. | 725 // created, which is the worked thread. |
| 863 if (chrome_worker_thread_.IsRunning()) { | 726 if (chrome_worker_thread_.IsRunning()) { |
| 864 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( | 727 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( |
| 865 &MediaStreamDependencyFactory::DeleteIpcNetworkManager, | 728 &MediaStreamDependencyFactory::DeleteIpcNetworkManager, |
| 866 base::Unretained(this))); | 729 base::Unretained(this))); |
| 867 // Stopping the thread will wait until all tasks have been | 730 // Stopping the thread will wait until all tasks have been |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 957 } | 820 } |
| 958 | 821 |
| 959 void MediaStreamDependencyFactory::StartAecDump( | 822 void MediaStreamDependencyFactory::StartAecDump( |
| 960 const base::PlatformFile& aec_dump_file) { | 823 const base::PlatformFile& aec_dump_file) { |
| 961 FILE* aec_dump_file_stream = base::FdopenPlatformFile(aec_dump_file, "w"); | 824 FILE* aec_dump_file_stream = base::FdopenPlatformFile(aec_dump_file, "w"); |
| 962 if (!aec_dump_file_stream) { | 825 if (!aec_dump_file_stream) { |
| 963 VLOG(1) << "Could not open AEC dump file."; | 826 VLOG(1) << "Could not open AEC dump file."; |
| 964 base::ClosePlatformFile(aec_dump_file); | 827 base::ClosePlatformFile(aec_dump_file); |
| 965 } else { | 828 } else { |
| 966 // |pc_factory_| takes ownership of |aec_dump_file_stream|. | 829 // |pc_factory_| takes ownership of |aec_dump_file_stream|. |
| 967 pc_factory_->StartAecDump(aec_dump_file_stream); | 830 pc_factory()->StartAecDump(aec_dump_file_stream); |
| 968 } | 831 } |
| 969 } | 832 } |
| 970 | 833 |
| 971 } // namespace content | 834 } // namespace content |
| OLD | NEW |