Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(662)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 131763002: Adds MediaStreamSource, MediaStreamAudioSource and MediaStreamVideoCaptureDeviceSource (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Addressed xians comments. Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/command_line.h" 9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h" 10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h" 11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h" 12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h" 13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream_audio_processor_options.h" 14 #include "content/renderer/media/media_stream_audio_processor_options.h"
15 #include "content/renderer/media/media_stream_source_extra_data.h" 15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_track_extra_data.h" 16 #include "content/renderer/media/media_stream_track_extra_data.h"
17 #include "content/renderer/media/media_stream_video_source.h"
17 #include "content/renderer/media/media_stream_video_track.h" 18 #include "content/renderer/media/media_stream_video_track.h"
18 #include "content/renderer/media/peer_connection_identity_service.h" 19 #include "content/renderer/media/peer_connection_identity_service.h"
19 #include "content/renderer/media/rtc_media_constraints.h" 20 #include "content/renderer/media/rtc_media_constraints.h"
20 #include "content/renderer/media/rtc_peer_connection_handler.h" 21 #include "content/renderer/media/rtc_peer_connection_handler.h"
21 #include "content/renderer/media/rtc_video_capturer.h" 22 #include "content/renderer/media/rtc_video_capturer.h"
22 #include "content/renderer/media/rtc_video_decoder_factory.h" 23 #include "content/renderer/media/rtc_video_decoder_factory.h"
23 #include "content/renderer/media/rtc_video_encoder_factory.h" 24 #include "content/renderer/media/rtc_video_encoder_factory.h"
24 #include "content/renderer/media/webaudio_capturer_source.h" 25 #include "content/renderer/media/webaudio_capturer_source.h"
25 #include "content/renderer/media/webrtc_audio_device_impl.h" 26 #include "content/renderer/media/webrtc_audio_device_impl.h"
26 #include "content/renderer/media/webrtc_local_audio_track.h" 27 #include "content/renderer/media/webrtc_local_audio_track.h"
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
116 private: 117 private:
117 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; 118 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
118 // |network_manager_| and |socket_factory_| are a weak references, owned by 119 // |network_manager_| and |socket_factory_| are a weak references, owned by
119 // MediaStreamDependencyFactory. 120 // MediaStreamDependencyFactory.
120 talk_base::NetworkManager* network_manager_; 121 talk_base::NetworkManager* network_manager_;
121 talk_base::PacketSocketFactory* socket_factory_; 122 talk_base::PacketSocketFactory* socket_factory_;
122 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. 123 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
123 blink::WebFrame* web_frame_; 124 blink::WebFrame* web_frame_;
124 }; 125 };
125 126
126 // SourceStateObserver is a help class used for observing the startup state
127 // transition of webrtc media sources such as a camera or microphone.
128 // An instance of the object deletes itself after use.
129 // Usage:
130 // 1. Create an instance of the object with the blink::WebMediaStream
131 // the observed sources belongs to a callback.
132 // 2. Add the sources to the observer using AddSource.
133 // 3. Call StartObserving()
134 // 4. The callback will be triggered when all sources have transitioned from
135 // webrtc::MediaSourceInterface::kInitializing.
136 class SourceStateObserver : public webrtc::ObserverInterface,
137 public base::NonThreadSafe {
138 public:
139 SourceStateObserver(
140 blink::WebMediaStream* web_stream,
141 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
142 : web_stream_(web_stream),
143 ready_callback_(callback),
144 live_(true) {
145 }
146
147 void AddSource(webrtc::MediaSourceInterface* source) {
148 DCHECK(CalledOnValidThread());
149 switch (source->state()) {
150 case webrtc::MediaSourceInterface::kInitializing:
151 sources_.push_back(source);
152 source->RegisterObserver(this);
153 break;
154 case webrtc::MediaSourceInterface::kLive:
155 // The source is already live so we don't need to wait for it.
156 break;
157 case webrtc::MediaSourceInterface::kEnded:
158 // The source have already failed.
159 live_ = false;
160 break;
161 default:
162 NOTREACHED();
163 }
164 }
165
166 void StartObservering() {
167 DCHECK(CalledOnValidThread());
168 CheckIfSourcesAreLive();
169 }
170
171 virtual void OnChanged() OVERRIDE {
172 DCHECK(CalledOnValidThread());
173 CheckIfSourcesAreLive();
174 }
175
176 private:
177 void CheckIfSourcesAreLive() {
178 ObservedSources::iterator it = sources_.begin();
179 while (it != sources_.end()) {
180 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
181 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
182 (*it)->UnregisterObserver(this);
183 it = sources_.erase(it);
184 } else {
185 ++it;
186 }
187 }
188 if (sources_.empty()) {
189 ready_callback_.Run(web_stream_, live_);
190 delete this;
191 }
192 }
193
194 blink::WebMediaStream* web_stream_;
195 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
196 bool live_;
197 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
198 ObservedSources;
199 ObservedSources sources_;
200 };
201
202 MediaStreamDependencyFactory::MediaStreamDependencyFactory( 127 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
203 P2PSocketDispatcher* p2p_socket_dispatcher) 128 P2PSocketDispatcher* p2p_socket_dispatcher)
204 : network_manager_(NULL), 129 : network_manager_(NULL),
205 #if defined(GOOGLE_TV) 130 #if defined(GOOGLE_TV)
206 decoder_factory_tv_(NULL), 131 decoder_factory_tv_(NULL),
207 #endif 132 #endif
208 p2p_socket_dispatcher_(p2p_socket_dispatcher), 133 p2p_socket_dispatcher_(p2p_socket_dispatcher),
209 signaling_thread_(NULL), 134 signaling_thread_(NULL),
210 worker_thread_(NULL), 135 worker_thread_(NULL),
211 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), 136 chrome_worker_thread_("Chrome_libJingle_WorkerThread"),
(...skipping 13 matching lines...) Expand all
225 // The histogram counts the number of calls to the JS API 150 // The histogram counts the number of calls to the JS API
226 // webKitRTCPeerConnection. 151 // webKitRTCPeerConnection.
227 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 152 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
228 153
229 if (!EnsurePeerConnectionFactory()) 154 if (!EnsurePeerConnectionFactory())
230 return NULL; 155 return NULL;
231 156
232 return new RTCPeerConnectionHandler(client, this); 157 return new RTCPeerConnectionHandler(client, this);
233 } 158 }
234 159
235 void MediaStreamDependencyFactory::CreateNativeMediaSources( 160 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
236 int render_view_id, 161 int render_view_id,
237 const blink::WebMediaConstraints& audio_constraints, 162 const blink::WebMediaConstraints& audio_constraints,
238 const blink::WebMediaConstraints& video_constraints, 163 const blink::WebMediaStreamSource& audio_source) {
239 blink::WebMediaStream* web_stream, 164 DVLOG(1) << "InitializeMediaStreamAudioSources()";
240 const MediaSourcesCreatedCallback& sources_created) {
241 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
242 if (!EnsurePeerConnectionFactory()) {
243 sources_created.Run(web_stream, false);
244 return;
245 }
246
247 // |source_observer| clean up itself when it has completed
248 // source_observer->StartObservering.
249 SourceStateObserver* source_observer =
250 new SourceStateObserver(web_stream, sources_created);
251
252 // Create local video sources.
253 RTCMediaConstraints native_video_constraints(video_constraints);
254 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
255 web_stream->videoTracks(video_tracks);
256 for (size_t i = 0; i < video_tracks.size(); ++i) {
257 const blink::WebMediaStreamSource& source = video_tracks[i].source();
258 MediaStreamSourceExtraData* source_data =
259 static_cast<MediaStreamSourceExtraData*>(source.extraData());
260
261 // Check if the source has already been created. This happens when the same
262 // source is used in multiple MediaStreams as a result of calling
263 // getUserMedia.
264 if (source_data->video_source())
265 continue;
266
267 const bool is_screencast =
268 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
269 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
270 source_data->SetVideoSource(
271 CreateLocalVideoSource(source_data->device_info().session_id,
272 is_screencast,
273 &native_video_constraints).get());
274 source_observer->AddSource(source_data->video_source());
275 }
276 165
277 // Do additional source initialization if the audio source is a valid 166 // Do additional source initialization if the audio source is a valid
278 // microphone or tab audio. 167 // microphone or tab audio.
279 RTCMediaConstraints native_audio_constraints(audio_constraints); 168 RTCMediaConstraints native_audio_constraints(audio_constraints);
280 ApplyFixedAudioConstraints(&native_audio_constraints); 169 ApplyFixedAudioConstraints(&native_audio_constraints);
281 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
282 web_stream->audioTracks(audio_tracks);
283 for (size_t i = 0; i < audio_tracks.size(); ++i) {
284 const blink::WebMediaStreamSource& source = audio_tracks[i].source();
285 MediaStreamSourceExtraData* source_data =
286 static_cast<MediaStreamSourceExtraData*>(source.extraData());
287 170
288 // Check if the source has already been created. This happens when the same 171 MediaStreamAudioSource* source_data =
289 // source is used in multiple MediaStreams as a result of calling 172 static_cast<MediaStreamAudioSource*>(audio_source.extraData());
290 // getUserMedia.
291 if (source_data->local_audio_source())
292 continue;
293 173
294 // TODO(xians): Create a new capturer for difference microphones when we 174 // TODO(xians): Create a new capturer for difference microphones when we
295 // support multiple microphones. See issue crbug/262117 . 175 // support multiple microphones. See issue crbug/262117 .
296 StreamDeviceInfo device_info = source_data->device_info(); 176 StreamDeviceInfo device_info = source_data->device_info();
297 RTCMediaConstraints constraints = native_audio_constraints; 177 RTCMediaConstraints constraints = native_audio_constraints;
298 178
299 // If any platform effects are available, check them against the 179 // If any platform effects are available, check them against the
300 // constraints. Disable effects to match false constraints, but if a 180 // constraints. Disable effects to match false constraints, but if a
301 // constraint is true, set the constraint to false to later disable the 181 // constraint is true, set the constraint to false to later disable the
302 // software effect. 182 // software effect.
303 int effects = device_info.device.input.effects; 183 int effects = device_info.device.input.effects;
304 if (effects != media::AudioParameters::NO_EFFECTS) { 184 if (effects != media::AudioParameters::NO_EFFECTS) {
305 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { 185 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
306 bool value; 186 bool value;
307 if (!webrtc::FindConstraint(&constraints, 187 if (!webrtc::FindConstraint(&constraints,
308 kConstraintEffectMap[i].constraint, &value, NULL) || !value) { 188 kConstraintEffectMap[i].constraint, &value,
309 // If the constraint is false, or does not exist, disable the platform 189 NULL) || !value) {
310 // effect. 190 // If the constraint is false, or does not exist, disable the platform
311 effects &= ~kConstraintEffectMap[i].effect; 191 // effect.
312 DVLOG(1) << "Disabling constraint: " 192 effects &= ~kConstraintEffectMap[i].effect;
313 << kConstraintEffectMap[i].constraint; 193 DVLOG(1) << "Disabling constraint: "
314 } else if (effects & kConstraintEffectMap[i].effect) { 194 << kConstraintEffectMap[i].constraint;
315 // If the constraint is true, leave the platform effect enabled, and 195 } else if (effects & kConstraintEffectMap[i].effect) {
316 // set the constraint to false to later disable the software effect. 196 // If the constraint is true, leave the platform effect enabled, and
317 constraints.AddMandatory(kConstraintEffectMap[i].constraint, 197 // set the constraint to false to later disable the software effect.
318 webrtc::MediaConstraintsInterface::kValueFalse, true); 198 constraints.AddMandatory(kConstraintEffectMap[i].constraint,
319 DVLOG(1) << "Disabling platform effect: " 199 webrtc::MediaConstraintsInterface::kValueFalse,
320 << kConstraintEffectMap[i].constraint; 200 true);
321 } 201 DVLOG(1) << "Disabling platform effect: "
202 << kConstraintEffectMap[i].constraint;
322 } 203 }
323 device_info.device.input.effects = effects;
324 } 204 }
325 205 device_info.device.input.effects = effects;
326 scoped_refptr<WebRtcAudioCapturer> capturer(
327 MaybeCreateAudioCapturer(render_view_id, device_info,
328 audio_constraints));
329 if (!capturer.get()) {
330 DLOG(WARNING) << "Failed to create the capturer for device "
331 << device_info.device.id;
332 sources_created.Run(web_stream, false);
333 // TODO(xians): Don't we need to check if source_observer is observing
334 // something? If not, then it looks like we have a leak here.
335 // OTOH, if it _is_ observing something, then the callback might
336 // be called multiple times which is likely also a bug.
337 return;
338 }
339 source_data->SetAudioCapturer(capturer);
340
341 // Creates a LocalAudioSource object which holds audio options.
342 // TODO(xians): The option should apply to the track instead of the source.
343 source_data->SetLocalAudioSource(
344 CreateLocalAudioSource(&constraints).get());
345 source_observer->AddSource(source_data->local_audio_source());
346 } 206 }
347 207
348 source_observer->StartObservering(); 208 scoped_refptr<WebRtcAudioCapturer> capturer(
209 MaybeCreateAudioCapturer(render_view_id, device_info,
210 audio_constraints));
211 if (!capturer.get()) {
212 DLOG(WARNING) << "Failed to create the capturer for device "
213 << device_info.device.id;
214 // TODO(xians): Don't we need to check if source_observer is observing
215 // something? If not, then it looks like we have a leak here.
216 // OTOH, if it _is_ observing something, then the callback might
217 // be called multiple times which is likely also a bug.
218 return false;
219 }
220 source_data->SetAudioCapturer(capturer);
221
222 // Creates a LocalAudioSource object which holds audio options.
223 // TODO(xians): The option should apply to the track instead of the source.
224 source_data->SetLocalAudioSource(
225 CreateLocalAudioSource(&constraints).get());
226
227 return true;
228 }
229
230 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
231 const StreamDeviceInfo& info) {
232 bool is_screeencast =
233 info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
234 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
235 return new RtcVideoCapturer(info.session_id, is_screeencast);
349 } 236 }
350 237
351 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 238 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
352 blink::WebMediaStream* web_stream) { 239 blink::WebMediaStream* web_stream) {
353 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 240 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
354 if (!EnsurePeerConnectionFactory()) { 241 if (!EnsurePeerConnectionFactory()) {
355 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; 242 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
356 return; 243 return;
357 } 244 }
358 245
(...skipping 27 matching lines...) Expand all
386 MediaStreamExtraData* extra_data = 273 MediaStreamExtraData* extra_data =
387 static_cast<MediaStreamExtraData*>(web_stream->extraData()); 274 static_cast<MediaStreamExtraData*>(web_stream->extraData());
388 extra_data->SetLocalStreamStopCallback(stream_stop); 275 extra_data->SetLocalStreamStopCallback(stream_stop);
389 } 276 }
390 277
391 scoped_refptr<webrtc::AudioTrackInterface> 278 scoped_refptr<webrtc::AudioTrackInterface>
392 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( 279 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
393 const blink::WebMediaStreamTrack& track) { 280 const blink::WebMediaStreamTrack& track) {
394 blink::WebMediaStreamSource source = track.source(); 281 blink::WebMediaStreamSource source = track.source();
395 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 282 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
396 MediaStreamSourceExtraData* source_data = 283 MediaStreamAudioSource* source_data =
397 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 284 static_cast<MediaStreamAudioSource*>(source.extraData());
398 285
399 // In the future the constraints will belong to the track itself, but 286 // In the future the constraints will belong to the track itself, but
400 // right now they're on the source, so we fetch them from there. 287 // right now they're on the source, so we fetch them from there.
401 RTCMediaConstraints track_constraints(source.constraints()); 288 RTCMediaConstraints track_constraints(source.constraints());
402 289
403 // Apply default audio constraints that enable echo cancellation, 290 // Apply default audio constraints that enable echo cancellation,
404 // automatic gain control, noise suppression and high-pass filter. 291 // automatic gain control, noise suppression and high-pass filter.
405 ApplyFixedAudioConstraints(&track_constraints); 292 ApplyFixedAudioConstraints(&track_constraints);
406 293
407 scoped_refptr<WebAudioCapturerSource> webaudio_source; 294 scoped_refptr<WebAudioCapturerSource> webaudio_source;
408 if (!source_data) { 295 if (!source_data) {
409 if (source.requiresAudioConsumer()) { 296 if (source.requiresAudioConsumer()) {
410 // We're adding a WebAudio MediaStream. 297 // We're adding a WebAudio MediaStream.
411 // Create a specific capturer for each WebAudio consumer. 298 // Create a specific capturer for each WebAudio consumer.
412 webaudio_source = CreateWebAudioSource(&source, track_constraints); 299 webaudio_source = CreateWebAudioSource(&source, track_constraints);
413 source_data = 300 source_data =
414 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 301 static_cast<MediaStreamAudioSource*>(source.extraData());
415 } else { 302 } else {
416 // TODO(perkj): Implement support for sources from 303 // TODO(perkj): Implement support for sources from
417 // remote MediaStreams. 304 // remote MediaStreams.
418 NOTIMPLEMENTED(); 305 NOTIMPLEMENTED();
419 return NULL; 306 return NULL;
420 } 307 }
421 } 308 }
422 309
423 std::string track_id = base::UTF16ToUTF8(track.id()); 310 std::string track_id = base::UTF16ToUTF8(track.id());
424 scoped_refptr<WebRtcAudioCapturer> capturer; 311 scoped_refptr<WebRtcAudioCapturer> capturer;
(...skipping 13 matching lines...) Expand all
438 blink::WebMediaStreamTrack writable_track = track; 325 blink::WebMediaStreamTrack writable_track = track;
439 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( 326 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>(
440 audio_track.get())->audio_source_provider()); 327 audio_track.get())->audio_source_provider());
441 328
442 return audio_track; 329 return audio_track;
443 } 330 }
444 331
445 scoped_refptr<webrtc::VideoTrackInterface> 332 scoped_refptr<webrtc::VideoTrackInterface>
446 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( 333 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
447 const blink::WebMediaStreamTrack& track) { 334 const blink::WebMediaStreamTrack& track) {
335 DCHECK(track.extraData() == NULL);
448 blink::WebMediaStreamSource source = track.source(); 336 blink::WebMediaStreamSource source = track.source();
449 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); 337 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
450 MediaStreamSourceExtraData* source_data = 338
451 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 339 MediaStreamVideoSource* source_data =
340 static_cast<MediaStreamVideoSource*>(source.extraData());
452 341
453 if (!source_data) { 342 if (!source_data) {
454 // TODO(perkj): Implement support for sources from 343 // TODO(perkj): Implement support for sources from
455 // remote MediaStreams. 344 // remote MediaStreams.
456 NOTIMPLEMENTED(); 345 NOTIMPLEMENTED();
457 return NULL; 346 return NULL;
458 } 347 }
459 348
460 std::string track_id = base::UTF16ToUTF8(track.id()); 349 // Create native track from the source.
461 scoped_refptr<webrtc::VideoTrackInterface> video_track( 350 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
462 CreateLocalVideoTrack(track_id, source_data->video_source())); 351 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter());
463 AddNativeTrackToBlinkTrack(video_track.get(), track, true);
464 352
465 video_track->set_enabled(track.isEnabled()); 353 AddNativeTrackToBlinkTrack(webrtc_track, track, true);
466 354
467 return video_track; 355 webrtc_track->set_enabled(track.isEnabled());
356
357 return webrtc_track;
468 } 358 }
469 359
470 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( 360 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
471 const blink::WebMediaStreamTrack& track) { 361 const blink::WebMediaStreamTrack& track) {
472 DCHECK(!track.isNull() && !track.extraData()); 362 DCHECK(!track.isNull() && !track.extraData());
473 DCHECK(!track.source().isNull()); 363 DCHECK(!track.source().isNull());
474 364
475 switch (track.source().type()) { 365 switch (track.source().type()) {
476 case blink::WebMediaStreamSource::TypeAudio: 366 case blink::WebMediaStreamSource::TypeAudio:
477 CreateNativeAudioMediaStreamTrack(track); 367 CreateNativeAudioMediaStreamTrack(track);
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
568 return native_stream->RemoveTrack( 458 return native_stream->RemoveTrack(
569 native_stream->FindVideoTrack(track_id)); 459 native_stream->FindVideoTrack(track_id));
570 } 460 }
571 return false; 461 return false;
572 } 462 }
573 463
574 scoped_refptr<webrtc::VideoSourceInterface> 464 scoped_refptr<webrtc::VideoSourceInterface>
575 MediaStreamDependencyFactory::CreateVideoSource( 465 MediaStreamDependencyFactory::CreateVideoSource(
576 cricket::VideoCapturer* capturer, 466 cricket::VideoCapturer* capturer,
577 const webrtc::MediaConstraintsInterface* constraints) { 467 const webrtc::MediaConstraintsInterface* constraints) {
468 if (!EnsurePeerConnectionFactory()) {
469 return NULL;
470 }
578 scoped_refptr<webrtc::VideoSourceInterface> source = 471 scoped_refptr<webrtc::VideoSourceInterface> source =
579 pc_factory_->CreateVideoSource(capturer, constraints).get(); 472 pc_factory_->CreateVideoSource(capturer, constraints).get();
580 return source; 473 return source;
581 } 474 }
582 475
583 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 476 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
584 DCHECK(!pc_factory_.get()); 477 DCHECK(!pc_factory_.get());
585 DCHECK(!audio_device_.get()); 478 DCHECK(!audio_device_.get());
586 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; 479 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
587 480
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after
683 } 576 }
684 577
685 scoped_refptr<webrtc::AudioSourceInterface> 578 scoped_refptr<webrtc::AudioSourceInterface>
686 MediaStreamDependencyFactory::CreateLocalAudioSource( 579 MediaStreamDependencyFactory::CreateLocalAudioSource(
687 const webrtc::MediaConstraintsInterface* constraints) { 580 const webrtc::MediaConstraintsInterface* constraints) {
688 scoped_refptr<webrtc::AudioSourceInterface> source = 581 scoped_refptr<webrtc::AudioSourceInterface> source =
689 pc_factory_->CreateAudioSource(constraints).get(); 582 pc_factory_->CreateAudioSource(constraints).get();
690 return source; 583 return source;
691 } 584 }
692 585
693 scoped_refptr<webrtc::VideoSourceInterface>
694 MediaStreamDependencyFactory::CreateLocalVideoSource(
695 int video_session_id,
696 bool is_screencast,
697 const webrtc::MediaConstraintsInterface* constraints) {
698 RtcVideoCapturer* capturer = new RtcVideoCapturer(
699 video_session_id, is_screencast);
700
701 // The video source takes ownership of |capturer|.
702 scoped_refptr<webrtc::VideoSourceInterface> source =
703 CreateVideoSource(capturer, constraints);
704 return source;
705 }
706
707 scoped_refptr<WebAudioCapturerSource> 586 scoped_refptr<WebAudioCapturerSource>
708 MediaStreamDependencyFactory::CreateWebAudioSource( 587 MediaStreamDependencyFactory::CreateWebAudioSource(
709 blink::WebMediaStreamSource* source, 588 blink::WebMediaStreamSource* source,
710 const RTCMediaConstraints& constraints) { 589 const RTCMediaConstraints& constraints) {
711 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; 590 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
712 DCHECK(GetWebRtcAudioDevice()); 591 DCHECK(GetWebRtcAudioDevice());
713 592
714 scoped_refptr<WebAudioCapturerSource> 593 scoped_refptr<WebAudioCapturerSource>
715 webaudio_capturer_source(new WebAudioCapturerSource()); 594 webaudio_capturer_source(new WebAudioCapturerSource());
716 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); 595 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
717 596
718 // Create a LocalAudioSource object which holds audio options. 597 // Create a LocalAudioSource object which holds audio options.
719 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 598 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
720 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); 599 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get());
721 source->setExtraData(source_data); 600 source->setExtraData(source_data);
722 601
723 // Replace the default source with WebAudio as source instead. 602 // Replace the default source with WebAudio as source instead.
724 source->addAudioConsumer(webaudio_capturer_source.get()); 603 source->addAudioConsumer(webaudio_capturer_source.get());
725 604
726 return webaudio_capturer_source; 605 return webaudio_capturer_source;
(...skipping 282 matching lines...) Expand 10 before | Expand all | Expand 10 after
1009 if (!aec_dump_file_stream) { 888 if (!aec_dump_file_stream) {
1010 VLOG(1) << "Could not open AEC dump file."; 889 VLOG(1) << "Could not open AEC dump file.";
1011 base::ClosePlatformFile(aec_dump_file); 890 base::ClosePlatformFile(aec_dump_file);
1012 } else { 891 } else {
1013 // |pc_factory_| takes ownership of |aec_dump_file_stream|. 892 // |pc_factory_| takes ownership of |aec_dump_file_stream|.
1014 pc_factory_->StartAecDump(aec_dump_file_stream); 893 pc_factory_->StartAecDump(aec_dump_file_stream);
1015 } 894 }
1016 } 895 }
1017 896
1018 } // namespace content 897 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698