Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(770)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 131763002: Adds MediaStreamSource, MediaStreamAudioSource and MediaStreamVideoCaptureDeviceSource (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Move initialization of the audio source object to MediaStreamAudioSource::AddTrack and check result. Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/command_line.h" 9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h" 10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h" 11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h" 12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h" 13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream_audio_processor_options.h" 14 #include "content/renderer/media/media_stream_audio_processor_options.h"
15 #include "content/renderer/media/media_stream_source_extra_data.h" 15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_track_extra_data.h" 16 #include "content/renderer/media/media_stream_track_extra_data.h"
17 #include "content/renderer/media/media_stream_video_source.h"
17 #include "content/renderer/media/media_stream_video_track.h" 18 #include "content/renderer/media/media_stream_video_track.h"
18 #include "content/renderer/media/peer_connection_identity_service.h" 19 #include "content/renderer/media/peer_connection_identity_service.h"
19 #include "content/renderer/media/rtc_media_constraints.h" 20 #include "content/renderer/media/rtc_media_constraints.h"
20 #include "content/renderer/media/rtc_peer_connection_handler.h" 21 #include "content/renderer/media/rtc_peer_connection_handler.h"
21 #include "content/renderer/media/rtc_video_capturer.h" 22 #include "content/renderer/media/rtc_video_capturer.h"
22 #include "content/renderer/media/rtc_video_decoder_factory.h" 23 #include "content/renderer/media/rtc_video_decoder_factory.h"
23 #include "content/renderer/media/rtc_video_encoder_factory.h" 24 #include "content/renderer/media/rtc_video_encoder_factory.h"
24 #include "content/renderer/media/webaudio_capturer_source.h" 25 #include "content/renderer/media/webaudio_capturer_source.h"
25 #include "content/renderer/media/webrtc_audio_device_impl.h" 26 #include "content/renderer/media/webrtc_audio_device_impl.h"
26 #include "content/renderer/media/webrtc_local_audio_track.h" 27 #include "content/renderer/media/webrtc_local_audio_track.h"
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 private: 113 private:
113 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; 114 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
114 // |network_manager_| and |socket_factory_| are a weak references, owned by 115 // |network_manager_| and |socket_factory_| are a weak references, owned by
115 // MediaStreamDependencyFactory. 116 // MediaStreamDependencyFactory.
116 talk_base::NetworkManager* network_manager_; 117 talk_base::NetworkManager* network_manager_;
117 talk_base::PacketSocketFactory* socket_factory_; 118 talk_base::PacketSocketFactory* socket_factory_;
118 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. 119 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
119 blink::WebFrame* web_frame_; 120 blink::WebFrame* web_frame_;
120 }; 121 };
121 122
122 // SourceStateObserver is a help class used for observing the startup state
123 // transition of webrtc media sources such as a camera or microphone.
124 // An instance of the object deletes itself after use.
125 // Usage:
126 // 1. Create an instance of the object with the blink::WebMediaStream
127 // the observed sources belongs to a callback.
128 // 2. Add the sources to the observer using AddSource.
129 // 3. Call StartObserving()
130 // 4. The callback will be triggered when all sources have transitioned from
131 // webrtc::MediaSourceInterface::kInitializing.
132 class SourceStateObserver : public webrtc::ObserverInterface,
133 public base::NonThreadSafe {
134 public:
135 SourceStateObserver(
136 blink::WebMediaStream* web_stream,
137 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
138 : web_stream_(web_stream),
139 ready_callback_(callback),
140 live_(true) {
141 }
142
143 void AddSource(webrtc::MediaSourceInterface* source) {
144 DCHECK(CalledOnValidThread());
145 switch (source->state()) {
146 case webrtc::MediaSourceInterface::kInitializing:
147 sources_.push_back(source);
148 source->RegisterObserver(this);
149 break;
150 case webrtc::MediaSourceInterface::kLive:
151 // The source is already live so we don't need to wait for it.
152 break;
153 case webrtc::MediaSourceInterface::kEnded:
154 // The source have already failed.
155 live_ = false;
156 break;
157 default:
158 NOTREACHED();
159 }
160 }
161
162 void StartObservering() {
163 DCHECK(CalledOnValidThread());
164 CheckIfSourcesAreLive();
165 }
166
167 virtual void OnChanged() OVERRIDE {
168 DCHECK(CalledOnValidThread());
169 CheckIfSourcesAreLive();
170 }
171
172 private:
173 void CheckIfSourcesAreLive() {
174 ObservedSources::iterator it = sources_.begin();
175 while (it != sources_.end()) {
176 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
177 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
178 (*it)->UnregisterObserver(this);
179 it = sources_.erase(it);
180 } else {
181 ++it;
182 }
183 }
184 if (sources_.empty()) {
185 ready_callback_.Run(web_stream_, live_);
186 delete this;
187 }
188 }
189
190 blink::WebMediaStream* web_stream_;
191 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
192 bool live_;
193 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
194 ObservedSources;
195 ObservedSources sources_;
196 };
197
198 MediaStreamDependencyFactory::MediaStreamDependencyFactory( 123 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
199 P2PSocketDispatcher* p2p_socket_dispatcher) 124 P2PSocketDispatcher* p2p_socket_dispatcher)
200 : network_manager_(NULL), 125 : network_manager_(NULL),
201 p2p_socket_dispatcher_(p2p_socket_dispatcher), 126 p2p_socket_dispatcher_(p2p_socket_dispatcher),
202 signaling_thread_(NULL), 127 signaling_thread_(NULL),
203 worker_thread_(NULL), 128 worker_thread_(NULL),
204 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), 129 chrome_worker_thread_("Chrome_libJingle_WorkerThread"),
205 aec_dump_file_(base::kInvalidPlatformFileValue) { 130 aec_dump_file_(base::kInvalidPlatformFileValue) {
206 } 131 }
207 132
208 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() { 133 MediaStreamDependencyFactory::~MediaStreamDependencyFactory() {
209 CleanupPeerConnectionFactory(); 134 CleanupPeerConnectionFactory();
210 if (aec_dump_file_ != base::kInvalidPlatformFileValue) 135 if (aec_dump_file_ != base::kInvalidPlatformFileValue)
211 base::ClosePlatformFile(aec_dump_file_); 136 base::ClosePlatformFile(aec_dump_file_);
212 } 137 }
213 138
214 blink::WebRTCPeerConnectionHandler* 139 blink::WebRTCPeerConnectionHandler*
215 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( 140 MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
216 blink::WebRTCPeerConnectionHandlerClient* client) { 141 blink::WebRTCPeerConnectionHandlerClient* client) {
217 // Save histogram data so we can see how much PeerConnetion is used. 142 // Save histogram data so we can see how much PeerConnetion is used.
218 // The histogram counts the number of calls to the JS API 143 // The histogram counts the number of calls to the JS API
219 // webKitRTCPeerConnection. 144 // webKitRTCPeerConnection.
220 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 145 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
221 146
222 if (!EnsurePeerConnectionFactory())
223 return NULL;
224
225 return new RTCPeerConnectionHandler(client, this); 147 return new RTCPeerConnectionHandler(client, this);
226 } 148 }
227 149
228 void MediaStreamDependencyFactory::CreateNativeMediaSources( 150 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
229 int render_view_id, 151 int render_view_id,
230 const blink::WebMediaConstraints& audio_constraints, 152 const blink::WebMediaConstraints& audio_constraints,
231 const blink::WebMediaConstraints& video_constraints, 153 MediaStreamAudioSource* source_data) {
232 blink::WebMediaStream* web_stream, 154 DVLOG(1) << "InitializeMediaStreamAudioSources()";
233 const MediaSourcesCreatedCallback& sources_created) {
234 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
235 if (!EnsurePeerConnectionFactory()) {
236 sources_created.Run(web_stream, false);
237 return;
238 }
239
240 // |source_observer| clean up itself when it has completed
241 // source_observer->StartObservering.
242 SourceStateObserver* source_observer =
243 new SourceStateObserver(web_stream, sources_created);
244
245 // Create local video sources.
246 RTCMediaConstraints native_video_constraints(video_constraints);
247 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
248 web_stream->videoTracks(video_tracks);
249 for (size_t i = 0; i < video_tracks.size(); ++i) {
250 const blink::WebMediaStreamSource& source = video_tracks[i].source();
251 MediaStreamSourceExtraData* source_data =
252 static_cast<MediaStreamSourceExtraData*>(source.extraData());
253
254 // Check if the source has already been created. This happens when the same
255 // source is used in multiple MediaStreams as a result of calling
256 // getUserMedia.
257 if (source_data->video_source())
258 continue;
259
260 const bool is_screencast =
261 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
262 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
263 source_data->SetVideoSource(
264 CreateLocalVideoSource(source_data->device_info().session_id,
265 is_screencast,
266 &native_video_constraints).get());
267 source_observer->AddSource(source_data->video_source());
268 }
269 155
270 // Do additional source initialization if the audio source is a valid 156 // Do additional source initialization if the audio source is a valid
271 // microphone or tab audio. 157 // microphone or tab audio.
272 RTCMediaConstraints native_audio_constraints(audio_constraints); 158 RTCMediaConstraints native_audio_constraints(audio_constraints);
273 ApplyFixedAudioConstraints(&native_audio_constraints); 159 ApplyFixedAudioConstraints(&native_audio_constraints);
274 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
275 web_stream->audioTracks(audio_tracks);
276 for (size_t i = 0; i < audio_tracks.size(); ++i) {
277 const blink::WebMediaStreamSource& source = audio_tracks[i].source();
278 MediaStreamSourceExtraData* source_data =
279 static_cast<MediaStreamSourceExtraData*>(source.extraData());
280 160
281 // Check if the source has already been created. This happens when the same 161 // TODO(xians): Create a new capturer for difference microphones when we
Jói 2014/01/27 22:14:57 "difference microphones"? Do you mean "different m
perkj_chrome 2014/01/28 08:31:43 Copy paste - but I guess so... or wait... it looks
282 // source is used in multiple MediaStreams as a result of calling 162 // support multiple microphones. See issue crbug/262117 .
283 // getUserMedia. 163 StreamDeviceInfo device_info = source_data->device_info();
284 if (source_data->local_audio_source()) 164 RTCMediaConstraints constraints = native_audio_constraints;
285 continue;
286 165
287 // TODO(xians): Create a new capturer for difference microphones when we 166 // If any platform effects are available, check them against the
288 // support multiple microphones. See issue crbug/262117 . 167 // constraints. Disable effects to match false constraints, but if a
289 StreamDeviceInfo device_info = source_data->device_info(); 168 // constraint is true, set the constraint to false to later disable the
290 RTCMediaConstraints constraints = native_audio_constraints; 169 // software effect.
291 170 int effects = device_info.device.input.effects;
292 // If any platform effects are available, check them against the 171 if (effects != media::AudioParameters::NO_EFFECTS) {
293 // constraints. Disable effects to match false constraints, but if a 172 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
294 // constraint is true, set the constraint to false to later disable the 173 bool value;
295 // software effect. 174 if (!webrtc::FindConstraint(&constraints,
296 int effects = device_info.device.input.effects; 175 kConstraintEffectMap[i].constraint, &value,
297 if (effects != media::AudioParameters::NO_EFFECTS) { 176 NULL) || !value) {
298 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { 177 // If the constraint is false, or does not exist, disable the platform
299 bool value; 178 // effect.
300 if (!webrtc::FindConstraint(&constraints, 179 effects &= ~kConstraintEffectMap[i].effect;
301 kConstraintEffectMap[i].constraint, &value, NULL) || !value) { 180 DVLOG(1) << "Disabling constraint: "
302 // If the constraint is false, or does not exist, disable the platform 181 << kConstraintEffectMap[i].constraint;
303 // effect. 182 } else if (effects & kConstraintEffectMap[i].effect) {
304 effects &= ~kConstraintEffectMap[i].effect; 183 // If the constraint is true, leave the platform effect enabled, and
305 DVLOG(1) << "Disabling constraint: " 184 // set the constraint to false to later disable the software effect.
306 << kConstraintEffectMap[i].constraint; 185 constraints.AddMandatory(kConstraintEffectMap[i].constraint,
307 } else if (effects & kConstraintEffectMap[i].effect) { 186 webrtc::MediaConstraintsInterface::kValueFalse,
308 // If the constraint is true, leave the platform effect enabled, and 187 true);
309 // set the constraint to false to later disable the software effect. 188 DVLOG(1) << "Disabling platform effect: "
310 constraints.AddMandatory(kConstraintEffectMap[i].constraint, 189 << kConstraintEffectMap[i].constraint;
311 webrtc::MediaConstraintsInterface::kValueFalse, true);
312 DVLOG(1) << "Disabling platform effect: "
313 << kConstraintEffectMap[i].constraint;
314 }
315 } 190 }
316 device_info.device.input.effects = effects;
317 } 191 }
318 192 device_info.device.input.effects = effects;
319 scoped_refptr<WebRtcAudioCapturer> capturer(
320 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
321 if (!capturer.get()) {
322 DLOG(WARNING) << "Failed to create the capturer for device "
323 << device_info.device.id;
324 sources_created.Run(web_stream, false);
325 // TODO(xians): Don't we need to check if source_observer is observing
326 // something? If not, then it looks like we have a leak here.
327 // OTOH, if it _is_ observing something, then the callback might
328 // be called multiple times which is likely also a bug.
329 return;
330 }
331 source_data->SetAudioCapturer(capturer);
332
333 // Creates a LocalAudioSource object which holds audio options.
334 // TODO(xians): The option should apply to the track instead of the source.
335 source_data->SetLocalAudioSource(
336 CreateLocalAudioSource(&constraints).get());
337 source_observer->AddSource(source_data->local_audio_source());
338 } 193 }
339 194
340 source_observer->StartObservering(); 195 scoped_refptr<WebRtcAudioCapturer> capturer(
196 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
197 if (!capturer.get()) {
198 DLOG(WARNING) << "Failed to create the capturer for device "
199 << device_info.device.id;
200 // TODO(xians): Don't we need to check if source_observer is observing
201 // something? If not, then it looks like we have a leak here.
202 // OTOH, if it _is_ observing something, then the callback might
203 // be called multiple times which is likely also a bug.
204 return false;
205 }
206 source_data->SetAudioCapturer(capturer);
207
208 // Creates a LocalAudioSource object which holds audio options.
209 // TODO(xians): The option should apply to the track instead of the source.
210 // TODO(perkj): Move audio constraints parsing to Chrome.
211 // Currently there are a few constraints that are parsed by libjingle and
212 // the state is set to ended if parsing fails.
213 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
214 CreateLocalAudioSource(&constraints).get());
215 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
216 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
217 return false;
218 }
219 source_data->SetLocalAudioSource(rtc_source);
220 return true;
221 }
222
223 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
224 const StreamDeviceInfo& info) {
225 bool is_screeencast =
226 info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
227 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
228 return new RtcVideoCapturer(info.session_id, is_screeencast);
341 } 229 }
342 230
343 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 231 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
344 blink::WebMediaStream* web_stream) { 232 blink::WebMediaStream* web_stream) {
345 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 233 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
346 if (!EnsurePeerConnectionFactory()) {
347 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
348 return;
349 }
350 234
351 std::string label = base::UTF16ToUTF8(web_stream->id()); 235 std::string label = base::UTF16ToUTF8(web_stream->id());
352 scoped_refptr<webrtc::MediaStreamInterface> native_stream = 236 scoped_refptr<webrtc::MediaStreamInterface> native_stream =
353 CreateLocalMediaStream(label); 237 CreateLocalMediaStream(label);
354 MediaStreamExtraData* extra_data = 238 MediaStreamExtraData* extra_data =
355 new MediaStreamExtraData(native_stream.get(), true); 239 new MediaStreamExtraData(native_stream.get(), true);
356 web_stream->setExtraData(extra_data); 240 web_stream->setExtraData(extra_data);
357 241
358 // Add audio tracks. 242 // Add audio tracks.
359 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; 243 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
(...skipping 18 matching lines...) Expand all
378 MediaStreamExtraData* extra_data = 262 MediaStreamExtraData* extra_data =
379 static_cast<MediaStreamExtraData*>(web_stream->extraData()); 263 static_cast<MediaStreamExtraData*>(web_stream->extraData());
380 extra_data->SetLocalStreamStopCallback(stream_stop); 264 extra_data->SetLocalStreamStopCallback(stream_stop);
381 } 265 }
382 266
383 scoped_refptr<webrtc::AudioTrackInterface> 267 scoped_refptr<webrtc::AudioTrackInterface>
384 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( 268 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
385 const blink::WebMediaStreamTrack& track) { 269 const blink::WebMediaStreamTrack& track) {
386 blink::WebMediaStreamSource source = track.source(); 270 blink::WebMediaStreamSource source = track.source();
387 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 271 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
388 MediaStreamSourceExtraData* source_data = 272 MediaStreamAudioSource* source_data =
389 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 273 static_cast<MediaStreamAudioSource*>(source.extraData());
390 274
391 // In the future the constraints will belong to the track itself, but 275 // In the future the constraints will belong to the track itself, but
392 // right now they're on the source, so we fetch them from there. 276 // right now they're on the source, so we fetch them from there.
393 RTCMediaConstraints track_constraints(source.constraints()); 277 RTCMediaConstraints track_constraints(source.constraints());
394 278
395 // Apply default audio constraints that enable echo cancellation, 279 // Apply default audio constraints that enable echo cancellation,
396 // automatic gain control, noise suppression and high-pass filter. 280 // automatic gain control, noise suppression and high-pass filter.
397 ApplyFixedAudioConstraints(&track_constraints); 281 ApplyFixedAudioConstraints(&track_constraints);
398 282
399 scoped_refptr<WebAudioCapturerSource> webaudio_source; 283 scoped_refptr<WebAudioCapturerSource> webaudio_source;
400 if (!source_data) { 284 if (!source_data) {
401 if (source.requiresAudioConsumer()) { 285 if (source.requiresAudioConsumer()) {
402 // We're adding a WebAudio MediaStream. 286 // We're adding a WebAudio MediaStream.
403 // Create a specific capturer for each WebAudio consumer. 287 // Create a specific capturer for each WebAudio consumer.
404 webaudio_source = CreateWebAudioSource(&source, track_constraints); 288 webaudio_source = CreateWebAudioSource(&source, track_constraints);
405 source_data = 289 source_data =
406 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 290 static_cast<MediaStreamAudioSource*>(source.extraData());
407 } else { 291 } else {
408 // TODO(perkj): Implement support for sources from 292 // TODO(perkj): Implement support for sources from
409 // remote MediaStreams. 293 // remote MediaStreams.
410 NOTIMPLEMENTED(); 294 NOTIMPLEMENTED();
411 return NULL; 295 return NULL;
412 } 296 }
413 } 297 }
414 298
415 scoped_refptr<webrtc::AudioTrackInterface> audio_track( 299 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
416 CreateLocalAudioTrack(track.id().utf8(), 300 CreateLocalAudioTrack(track.id().utf8(),
417 source_data->GetAudioCapturer(), 301 source_data->GetAudioCapturer(),
418 webaudio_source.get(), 302 webaudio_source.get(),
419 source_data->local_audio_source())); 303 source_data->local_audio_source()));
420 AddNativeTrackToBlinkTrack(audio_track.get(), track, true); 304 AddNativeTrackToBlinkTrack(audio_track.get(), track, true);
421 305
422 audio_track->set_enabled(track.isEnabled()); 306 audio_track->set_enabled(track.isEnabled());
423 307
424 // Pass the pointer of the source provider to the blink audio track. 308 // Pass the pointer of the source provider to the blink audio track.
425 blink::WebMediaStreamTrack writable_track = track; 309 blink::WebMediaStreamTrack writable_track = track;
426 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( 310 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>(
427 audio_track.get())->audio_source_provider()); 311 audio_track.get())->audio_source_provider());
428 312
429 return audio_track; 313 return audio_track;
430 } 314 }
431 315
432 scoped_refptr<webrtc::VideoTrackInterface> 316 scoped_refptr<webrtc::VideoTrackInterface>
433 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( 317 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
434 const blink::WebMediaStreamTrack& track) { 318 const blink::WebMediaStreamTrack& track) {
319 DCHECK(track.extraData() == NULL);
435 blink::WebMediaStreamSource source = track.source(); 320 blink::WebMediaStreamSource source = track.source();
436 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); 321 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
437 MediaStreamSourceExtraData* source_data = 322
438 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 323 MediaStreamVideoSource* source_data =
324 static_cast<MediaStreamVideoSource*>(source.extraData());
439 325
440 if (!source_data) { 326 if (!source_data) {
441 // TODO(perkj): Implement support for sources from 327 // TODO(perkj): Implement support for sources from
442 // remote MediaStreams. 328 // remote MediaStreams.
443 NOTIMPLEMENTED(); 329 NOTIMPLEMENTED();
444 return NULL; 330 return NULL;
445 } 331 }
446 332
447 std::string track_id = base::UTF16ToUTF8(track.id()); 333 // Create native track from the source.
448 scoped_refptr<webrtc::VideoTrackInterface> video_track( 334 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
449 CreateLocalVideoTrack(track_id, source_data->video_source())); 335 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter());
450 AddNativeTrackToBlinkTrack(video_track.get(), track, true);
451 336
452 video_track->set_enabled(track.isEnabled()); 337 bool local_track = true;
338 AddNativeTrackToBlinkTrack(webrtc_track, track, local_track);
453 339
454 return video_track; 340 webrtc_track->set_enabled(track.isEnabled());
341
342 return webrtc_track;
455 } 343 }
456 344
457 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( 345 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
458 const blink::WebMediaStreamTrack& track) { 346 const blink::WebMediaStreamTrack& track) {
459 DCHECK(!track.isNull() && !track.extraData()); 347 DCHECK(!track.isNull() && !track.extraData());
460 DCHECK(!track.source().isNull()); 348 DCHECK(!track.source().isNull());
461 349
462 switch (track.source().type()) { 350 switch (track.source().type()) {
463 case blink::WebMediaStreamSource::TypeAudio: 351 case blink::WebMediaStreamSource::TypeAudio:
464 CreateNativeAudioMediaStreamTrack(track); 352 CreateNativeAudioMediaStreamTrack(track);
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
556 native_stream->FindVideoTrack(track_id)); 444 native_stream->FindVideoTrack(track_id));
557 } 445 }
558 return false; 446 return false;
559 } 447 }
560 448
561 scoped_refptr<webrtc::VideoSourceInterface> 449 scoped_refptr<webrtc::VideoSourceInterface>
562 MediaStreamDependencyFactory::CreateVideoSource( 450 MediaStreamDependencyFactory::CreateVideoSource(
563 cricket::VideoCapturer* capturer, 451 cricket::VideoCapturer* capturer,
564 const webrtc::MediaConstraintsInterface* constraints) { 452 const webrtc::MediaConstraintsInterface* constraints) {
565 scoped_refptr<webrtc::VideoSourceInterface> source = 453 scoped_refptr<webrtc::VideoSourceInterface> source =
566 pc_factory_->CreateVideoSource(capturer, constraints).get(); 454 pc_factory()->CreateVideoSource(capturer, constraints).get();
567 return source; 455 return source;
568 } 456 }
569 457
570 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 458 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
459 MediaStreamDependencyFactory::pc_factory() {
Jói 2014/01/27 22:14:57 This is not just a simple getter since it creates
perkj_chrome 2014/01/28 08:31:43 Done.
460 if (!pc_factory_)
461 CreatePeerConnectionFactory();
462 CHECK(pc_factory_);
463 return pc_factory_;
464 }
465
466 void MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
571 DCHECK(!pc_factory_.get()); 467 DCHECK(!pc_factory_.get());
572 DCHECK(!audio_device_.get()); 468 DCHECK(!audio_device_.get());
469 DCHECK(!signaling_thread_);
470 DCHECK(!worker_thread_);
471 DCHECK(!network_manager_);
472 DCHECK(!socket_factory_);
473 DCHECK(!chrome_worker_thread_.IsRunning());
474
573 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; 475 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
574 476
477 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
478 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
479 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
480 CHECK(signaling_thread_);
481
482 chrome_worker_thread_.Start();
483
484 base::WaitableEvent start_worker_event(true, false);
485 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
486 &MediaStreamDependencyFactory::InitializeWorkerThread,
487 base::Unretained(this),
488 &worker_thread_,
489 &start_worker_event));
490 start_worker_event.Wait();
491 CHECK(worker_thread_);
492
493 base::WaitableEvent create_network_manager_event(true, false);
494 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
495 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
496 base::Unretained(this),
497 &create_network_manager_event));
498 create_network_manager_event.Wait();
499
500 socket_factory_.reset(
501 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
502
503 // Init SSL, which will be needed by PeerConnection.
504 #if defined(USE_OPENSSL)
505 if (!talk_base::InitializeSSL()) {
506 LOG(ERROR) << "Failed on InitializeSSL.";
507 NOTREACHED();
508 return;
509 }
510 #else
511 // TODO(ronghuawu): Replace this call with InitializeSSL.
512 net::EnsureNSSSSLInit();
513 #endif
514
575 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory; 515 scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
576 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory; 516 scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
577 517
578 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); 518 const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
579 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories = 519 scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories =
580 RenderThreadImpl::current()->GetGpuFactories(); 520 RenderThreadImpl::current()->GetGpuFactories();
581 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) { 521 if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
582 if (gpu_factories) 522 if (gpu_factories)
583 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories)); 523 decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
584 } 524 }
(...skipping 12 matching lines...) Expand all
597 537
598 scoped_refptr<WebRtcAudioDeviceImpl> audio_device( 538 scoped_refptr<WebRtcAudioDeviceImpl> audio_device(
599 new WebRtcAudioDeviceImpl()); 539 new WebRtcAudioDeviceImpl());
600 540
601 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( 541 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
602 webrtc::CreatePeerConnectionFactory(worker_thread_, 542 webrtc::CreatePeerConnectionFactory(worker_thread_,
603 signaling_thread_, 543 signaling_thread_,
604 audio_device.get(), 544 audio_device.get(),
605 encoder_factory.release(), 545 encoder_factory.release(),
606 decoder_factory.release())); 546 decoder_factory.release()));
607 if (!factory.get()) { 547 CHECK(factory);
608 return false;
609 }
610 548
611 audio_device_ = audio_device; 549 audio_device_ = audio_device;
612 pc_factory_ = factory; 550 pc_factory_ = factory;
613 webrtc::PeerConnectionFactoryInterface::Options factory_options; 551 webrtc::PeerConnectionFactoryInterface::Options factory_options;
614 factory_options.disable_sctp_data_channels = 552 factory_options.disable_sctp_data_channels =
615 cmd_line->HasSwitch(switches::kDisableSCTPDataChannels); 553 cmd_line->HasSwitch(switches::kDisableSCTPDataChannels);
616 factory_options.disable_encryption = 554 factory_options.disable_encryption =
617 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption); 555 cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
618 pc_factory_->SetOptions(factory_options); 556 pc_factory_->SetOptions(factory_options);
619 557
620 // |aec_dump_file| will be invalid when dump is not enabled. 558 // |aec_dump_file| will be invalid when dump is not enabled.
621 if (aec_dump_file_ != base::kInvalidPlatformFileValue) { 559 if (aec_dump_file_ != base::kInvalidPlatformFileValue) {
622 StartAecDump(aec_dump_file_); 560 StartAecDump(aec_dump_file_);
623 aec_dump_file_ = base::kInvalidPlatformFileValue; 561 aec_dump_file_ = base::kInvalidPlatformFileValue;
624 } 562 }
625
626 return true;
627 } 563 }
628 564
629 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() { 565 bool MediaStreamDependencyFactory::PeerConnectionFactoryCreated() {
630 return pc_factory_.get() != NULL; 566 return pc_factory_.get() != NULL;
631 } 567 }
632 568
633 scoped_refptr<webrtc::PeerConnectionInterface> 569 scoped_refptr<webrtc::PeerConnectionInterface>
634 MediaStreamDependencyFactory::CreatePeerConnection( 570 MediaStreamDependencyFactory::CreatePeerConnection(
635 const webrtc::PeerConnectionInterface::IceServers& ice_servers, 571 const webrtc::PeerConnectionInterface::IceServers& ice_servers,
636 const webrtc::MediaConstraintsInterface* constraints, 572 const webrtc::MediaConstraintsInterface* constraints,
637 blink::WebFrame* web_frame, 573 blink::WebFrame* web_frame,
638 webrtc::PeerConnectionObserver* observer) { 574 webrtc::PeerConnectionObserver* observer) {
639 CHECK(web_frame); 575 CHECK(web_frame);
640 CHECK(observer); 576 CHECK(observer);
577 if (!pc_factory())
578 return NULL;
641 579
642 scoped_refptr<P2PPortAllocatorFactory> pa_factory = 580 scoped_refptr<P2PPortAllocatorFactory> pa_factory =
643 new talk_base::RefCountedObject<P2PPortAllocatorFactory>( 581 new talk_base::RefCountedObject<P2PPortAllocatorFactory>(
644 p2p_socket_dispatcher_.get(), 582 p2p_socket_dispatcher_.get(),
645 network_manager_, 583 network_manager_,
646 socket_factory_.get(), 584 socket_factory_.get(),
647 web_frame); 585 web_frame);
648 586
649 PeerConnectionIdentityService* identity_service = 587 PeerConnectionIdentityService* identity_service =
650 new PeerConnectionIdentityService( 588 new PeerConnectionIdentityService(
651 GURL(web_frame->document().url().spec()).GetOrigin()); 589 GURL(web_frame->document().url().spec()).GetOrigin());
652 590
653 return pc_factory_->CreatePeerConnection(ice_servers, 591 return pc_factory()->CreatePeerConnection(ice_servers,
654 constraints, 592 constraints,
655 pa_factory.get(), 593 pa_factory.get(),
656 identity_service, 594 identity_service,
657 observer).get(); 595 observer).get();
658 } 596 }
659 597
660 scoped_refptr<webrtc::MediaStreamInterface> 598 scoped_refptr<webrtc::MediaStreamInterface>
661 MediaStreamDependencyFactory::CreateLocalMediaStream( 599 MediaStreamDependencyFactory::CreateLocalMediaStream(
662 const std::string& label) { 600 const std::string& label) {
663 return pc_factory_->CreateLocalMediaStream(label).get(); 601 return pc_factory()->CreateLocalMediaStream(label).get();
664 } 602 }
665 603
666 scoped_refptr<webrtc::AudioSourceInterface> 604 scoped_refptr<webrtc::AudioSourceInterface>
667 MediaStreamDependencyFactory::CreateLocalAudioSource( 605 MediaStreamDependencyFactory::CreateLocalAudioSource(
668 const webrtc::MediaConstraintsInterface* constraints) { 606 const webrtc::MediaConstraintsInterface* constraints) {
669 scoped_refptr<webrtc::AudioSourceInterface> source = 607 scoped_refptr<webrtc::AudioSourceInterface> source =
670 pc_factory_->CreateAudioSource(constraints).get(); 608 pc_factory()->CreateAudioSource(constraints).get();
671 return source; 609 return source;
672 } 610 }
673 611
674 scoped_refptr<webrtc::VideoSourceInterface>
675 MediaStreamDependencyFactory::CreateLocalVideoSource(
676 int video_session_id,
677 bool is_screencast,
678 const webrtc::MediaConstraintsInterface* constraints) {
679 RtcVideoCapturer* capturer = new RtcVideoCapturer(
680 video_session_id, is_screencast);
681
682 // The video source takes ownership of |capturer|.
683 scoped_refptr<webrtc::VideoSourceInterface> source =
684 CreateVideoSource(capturer, constraints);
685 return source;
686 }
687
688 scoped_refptr<WebAudioCapturerSource> 612 scoped_refptr<WebAudioCapturerSource>
689 MediaStreamDependencyFactory::CreateWebAudioSource( 613 MediaStreamDependencyFactory::CreateWebAudioSource(
690 blink::WebMediaStreamSource* source, 614 blink::WebMediaStreamSource* source,
691 const RTCMediaConstraints& constraints) { 615 const RTCMediaConstraints& constraints) {
692 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; 616 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
693 DCHECK(GetWebRtcAudioDevice()); 617 DCHECK(GetWebRtcAudioDevice());
694 618
695 scoped_refptr<WebAudioCapturerSource> 619 scoped_refptr<WebAudioCapturerSource>
696 webaudio_capturer_source(new WebAudioCapturerSource()); 620 webaudio_capturer_source(new WebAudioCapturerSource());
697 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); 621 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
698 622
699 // Create a LocalAudioSource object which holds audio options. 623 // Create a LocalAudioSource object which holds audio options.
700 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 624 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
701 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); 625 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get());
702 source->setExtraData(source_data); 626 source->setExtraData(source_data);
703 627
704 // Replace the default source with WebAudio as source instead. 628 // Replace the default source with WebAudio as source instead.
705 source->addAudioConsumer(webaudio_capturer_source.get()); 629 source->addAudioConsumer(webaudio_capturer_source.get());
706 630
707 return webaudio_capturer_source; 631 return webaudio_capturer_source;
708 } 632 }
709 633
710 scoped_refptr<webrtc::VideoTrackInterface> 634 scoped_refptr<webrtc::VideoTrackInterface>
711 MediaStreamDependencyFactory::CreateLocalVideoTrack( 635 MediaStreamDependencyFactory::CreateLocalVideoTrack(
712 const std::string& id, 636 const std::string& id,
713 webrtc::VideoSourceInterface* source) { 637 webrtc::VideoSourceInterface* source) {
714 return pc_factory_->CreateVideoTrack(id, source).get(); 638 return pc_factory()->CreateVideoTrack(id, source).get();
715 } 639 }
716 640
717 scoped_refptr<webrtc::VideoTrackInterface> 641 scoped_refptr<webrtc::VideoTrackInterface>
718 MediaStreamDependencyFactory::CreateLocalVideoTrack( 642 MediaStreamDependencyFactory::CreateLocalVideoTrack(
719 const std::string& id, cricket::VideoCapturer* capturer) { 643 const std::string& id, cricket::VideoCapturer* capturer) {
720 if (!capturer) { 644 if (!capturer) {
721 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer."; 645 LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
722 return NULL; 646 return NULL;
723 } 647 }
724 648
725 // Create video source from the |capturer|. 649 // Create video source from the |capturer|.
726 scoped_refptr<webrtc::VideoSourceInterface> source = 650 scoped_refptr<webrtc::VideoSourceInterface> source =
727 CreateVideoSource(capturer, NULL); 651 CreateVideoSource(capturer, NULL);
728 652
729 // Create native track from the source. 653 // Create native track from the source.
730 return pc_factory_->CreateVideoTrack(id, source.get()).get(); 654 return pc_factory()->CreateVideoTrack(id, source.get()).get();
731 } 655 }
732 656
733 scoped_refptr<webrtc::AudioTrackInterface> 657 scoped_refptr<webrtc::AudioTrackInterface>
734 MediaStreamDependencyFactory::CreateLocalAudioTrack( 658 MediaStreamDependencyFactory::CreateLocalAudioTrack(
735 const std::string& id, 659 const std::string& id,
736 const scoped_refptr<WebRtcAudioCapturer>& capturer, 660 const scoped_refptr<WebRtcAudioCapturer>& capturer,
737 WebAudioCapturerSource* webaudio_source, 661 WebAudioCapturerSource* webaudio_source,
738 webrtc::AudioSourceInterface* source) { 662 webrtc::AudioSourceInterface* source) {
739 // TODO(xians): Merge |source| to the capturer(). We can't do this today 663 // TODO(xians): Merge |source| to the capturer(). We can't do this today
740 // because only one capturer() is supported while one |source| is created 664 // because only one capturer() is supported while one |source| is created
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
786 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get()); 710 network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
787 event->Signal(); 711 event->Signal();
788 } 712 }
789 713
790 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() { 714 void MediaStreamDependencyFactory::DeleteIpcNetworkManager() {
791 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop()); 715 DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
792 delete network_manager_; 716 delete network_manager_;
793 network_manager_ = NULL; 717 network_manager_ = NULL;
794 } 718 }
795 719
796 bool MediaStreamDependencyFactory::EnsurePeerConnectionFactory() {
797 DCHECK(CalledOnValidThread());
798 if (PeerConnectionFactoryCreated())
799 return true;
800
801 if (!signaling_thread_) {
802 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
803 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
804 signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
805 CHECK(signaling_thread_);
806 }
807
808 if (!worker_thread_) {
809 if (!chrome_worker_thread_.IsRunning()) {
810 if (!chrome_worker_thread_.Start()) {
811 LOG(ERROR) << "Could not start worker thread";
812 signaling_thread_ = NULL;
813 return false;
814 }
815 }
816 base::WaitableEvent event(true, false);
817 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
818 &MediaStreamDependencyFactory::InitializeWorkerThread,
819 base::Unretained(this),
820 &worker_thread_,
821 &event));
822 event.Wait();
823 DCHECK(worker_thread_);
824 }
825
826 if (!network_manager_) {
827 base::WaitableEvent event(true, false);
828 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
829 &MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
830 base::Unretained(this),
831 &event));
832 event.Wait();
833 }
834
835 if (!socket_factory_) {
836 socket_factory_.reset(
837 new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
838 }
839
840 // Init SSL, which will be needed by PeerConnection.
841 #if defined(USE_OPENSSL)
842 if (!talk_base::InitializeSSL()) {
843 LOG(ERROR) << "Failed on InitializeSSL.";
844 return false;
845 }
846 #else
847 // TODO(ronghuawu): Replace this call with InitializeSSL.
848 net::EnsureNSSSSLInit();
849 #endif
850
851 if (!CreatePeerConnectionFactory()) {
852 LOG(ERROR) << "Could not create PeerConnection factory";
853 return false;
854 }
855 return true;
856 }
857
858 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() { 720 void MediaStreamDependencyFactory::CleanupPeerConnectionFactory() {
859 pc_factory_ = NULL; 721 pc_factory_ = NULL;
860 if (network_manager_) { 722 if (network_manager_) {
861 // The network manager needs to free its resources on the thread they were 723 // The network manager needs to free its resources on the thread they were
862 // created, which is the worked thread. 724 // created, which is the worked thread.
863 if (chrome_worker_thread_.IsRunning()) { 725 if (chrome_worker_thread_.IsRunning()) {
864 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind( 726 chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
865 &MediaStreamDependencyFactory::DeleteIpcNetworkManager, 727 &MediaStreamDependencyFactory::DeleteIpcNetworkManager,
866 base::Unretained(this))); 728 base::Unretained(this)));
867 // Stopping the thread will wait until all tasks have been 729 // Stopping the thread will wait until all tasks have been
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
957 } 819 }
958 820
959 void MediaStreamDependencyFactory::StartAecDump( 821 void MediaStreamDependencyFactory::StartAecDump(
960 const base::PlatformFile& aec_dump_file) { 822 const base::PlatformFile& aec_dump_file) {
961 FILE* aec_dump_file_stream = base::FdopenPlatformFile(aec_dump_file, "w"); 823 FILE* aec_dump_file_stream = base::FdopenPlatformFile(aec_dump_file, "w");
962 if (!aec_dump_file_stream) { 824 if (!aec_dump_file_stream) {
963 VLOG(1) << "Could not open AEC dump file."; 825 VLOG(1) << "Could not open AEC dump file.";
964 base::ClosePlatformFile(aec_dump_file); 826 base::ClosePlatformFile(aec_dump_file);
965 } else { 827 } else {
966 // |pc_factory_| takes ownership of |aec_dump_file_stream|. 828 // |pc_factory_| takes ownership of |aec_dump_file_stream|.
967 pc_factory_->StartAecDump(aec_dump_file_stream); 829 pc_factory()->StartAecDump(aec_dump_file_stream);
968 } 830 }
969 } 831 }
970 832
971 } // namespace content 833 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698