Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(420)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 131763002: Adds MediaStreamSource, MediaStreamAudioSource and MediaStreamVideoCaptureDeviceSource (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Addressed review comments from xians. Created 6 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/command_line.h" 9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h" 10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h" 11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h" 12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h" 13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream_audio_processor_options.h" 14 #include "content/renderer/media/media_stream_audio_processor_options.h"
15 #include "content/renderer/media/media_stream_source_extra_data.h" 15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_track_extra_data.h" 16 #include "content/renderer/media/media_stream_track_extra_data.h"
17 #include "content/renderer/media/media_stream_video_source.h"
17 #include "content/renderer/media/media_stream_video_track.h" 18 #include "content/renderer/media/media_stream_video_track.h"
18 #include "content/renderer/media/peer_connection_identity_service.h" 19 #include "content/renderer/media/peer_connection_identity_service.h"
19 #include "content/renderer/media/rtc_media_constraints.h" 20 #include "content/renderer/media/rtc_media_constraints.h"
20 #include "content/renderer/media/rtc_peer_connection_handler.h" 21 #include "content/renderer/media/rtc_peer_connection_handler.h"
21 #include "content/renderer/media/rtc_video_capturer.h" 22 #include "content/renderer/media/rtc_video_capturer.h"
22 #include "content/renderer/media/rtc_video_decoder_factory.h" 23 #include "content/renderer/media/rtc_video_decoder_factory.h"
23 #include "content/renderer/media/rtc_video_encoder_factory.h" 24 #include "content/renderer/media/rtc_video_encoder_factory.h"
24 #include "content/renderer/media/webaudio_capturer_source.h" 25 #include "content/renderer/media/webaudio_capturer_source.h"
25 #include "content/renderer/media/webrtc_audio_device_impl.h" 26 #include "content/renderer/media/webrtc_audio_device_impl.h"
26 #include "content/renderer/media/webrtc_local_audio_track.h" 27 #include "content/renderer/media/webrtc_local_audio_track.h"
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 private: 113 private:
113 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_; 114 scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
114 // |network_manager_| and |socket_factory_| are a weak references, owned by 115 // |network_manager_| and |socket_factory_| are a weak references, owned by
115 // MediaStreamDependencyFactory. 116 // MediaStreamDependencyFactory.
116 talk_base::NetworkManager* network_manager_; 117 talk_base::NetworkManager* network_manager_;
117 talk_base::PacketSocketFactory* socket_factory_; 118 talk_base::PacketSocketFactory* socket_factory_;
118 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory. 119 // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
119 blink::WebFrame* web_frame_; 120 blink::WebFrame* web_frame_;
120 }; 121 };
121 122
122 // SourceStateObserver is a help class used for observing the startup state
123 // transition of webrtc media sources such as a camera or microphone.
124 // An instance of the object deletes itself after use.
125 // Usage:
126 // 1. Create an instance of the object with the blink::WebMediaStream
127 // the observed sources belongs to a callback.
128 // 2. Add the sources to the observer using AddSource.
129 // 3. Call StartObserving()
130 // 4. The callback will be triggered when all sources have transitioned from
131 // webrtc::MediaSourceInterface::kInitializing.
132 class SourceStateObserver : public webrtc::ObserverInterface,
133 public base::NonThreadSafe {
134 public:
135 SourceStateObserver(
136 blink::WebMediaStream* web_stream,
137 const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
138 : web_stream_(web_stream),
139 ready_callback_(callback),
140 live_(true) {
141 }
142
143 void AddSource(webrtc::MediaSourceInterface* source) {
144 DCHECK(CalledOnValidThread());
145 switch (source->state()) {
146 case webrtc::MediaSourceInterface::kInitializing:
147 sources_.push_back(source);
148 source->RegisterObserver(this);
149 break;
150 case webrtc::MediaSourceInterface::kLive:
151 // The source is already live so we don't need to wait for it.
152 break;
153 case webrtc::MediaSourceInterface::kEnded:
154 // The source have already failed.
155 live_ = false;
156 break;
157 default:
158 NOTREACHED();
159 }
160 }
161
162 void StartObservering() {
163 DCHECK(CalledOnValidThread());
164 CheckIfSourcesAreLive();
165 }
166
167 virtual void OnChanged() OVERRIDE {
168 DCHECK(CalledOnValidThread());
169 CheckIfSourcesAreLive();
170 }
171
172 private:
173 void CheckIfSourcesAreLive() {
174 ObservedSources::iterator it = sources_.begin();
175 while (it != sources_.end()) {
176 if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
177 live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
178 (*it)->UnregisterObserver(this);
179 it = sources_.erase(it);
180 } else {
181 ++it;
182 }
183 }
184 if (sources_.empty()) {
185 ready_callback_.Run(web_stream_, live_);
186 delete this;
187 }
188 }
189
190 blink::WebMediaStream* web_stream_;
191 MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
192 bool live_;
193 typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
194 ObservedSources;
195 ObservedSources sources_;
196 };
197
198 MediaStreamDependencyFactory::MediaStreamDependencyFactory( 123 MediaStreamDependencyFactory::MediaStreamDependencyFactory(
199 P2PSocketDispatcher* p2p_socket_dispatcher) 124 P2PSocketDispatcher* p2p_socket_dispatcher)
200 : network_manager_(NULL), 125 : network_manager_(NULL),
201 p2p_socket_dispatcher_(p2p_socket_dispatcher), 126 p2p_socket_dispatcher_(p2p_socket_dispatcher),
202 signaling_thread_(NULL), 127 signaling_thread_(NULL),
203 worker_thread_(NULL), 128 worker_thread_(NULL),
204 chrome_worker_thread_("Chrome_libJingle_WorkerThread"), 129 chrome_worker_thread_("Chrome_libJingle_WorkerThread"),
205 aec_dump_file_(base::kInvalidPlatformFileValue) { 130 aec_dump_file_(base::kInvalidPlatformFileValue) {
206 } 131 }
207 132
(...skipping 10 matching lines...) Expand all
218 // The histogram counts the number of calls to the JS API 143 // The histogram counts the number of calls to the JS API
219 // webKitRTCPeerConnection. 144 // webKitRTCPeerConnection.
220 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION); 145 UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
221 146
222 if (!EnsurePeerConnectionFactory()) 147 if (!EnsurePeerConnectionFactory())
223 return NULL; 148 return NULL;
224 149
225 return new RTCPeerConnectionHandler(client, this); 150 return new RTCPeerConnectionHandler(client, this);
226 } 151 }
227 152
228 void MediaStreamDependencyFactory::CreateNativeMediaSources( 153 bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
229 int render_view_id, 154 int render_view_id,
230 const blink::WebMediaConstraints& audio_constraints, 155 const blink::WebMediaConstraints& audio_constraints,
231 const blink::WebMediaConstraints& video_constraints, 156 const blink::WebMediaStreamSource& audio_source) {
232 blink::WebMediaStream* web_stream, 157 DVLOG(1) << "InitializeMediaStreamAudioSources()";
233 const MediaSourcesCreatedCallback& sources_created) {
234 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
235 if (!EnsurePeerConnectionFactory()) {
236 sources_created.Run(web_stream, false);
237 return;
238 }
239
240 // |source_observer| clean up itself when it has completed
241 // source_observer->StartObservering.
242 SourceStateObserver* source_observer =
243 new SourceStateObserver(web_stream, sources_created);
244
245 // Create local video sources.
246 RTCMediaConstraints native_video_constraints(video_constraints);
247 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
248 web_stream->videoTracks(video_tracks);
249 for (size_t i = 0; i < video_tracks.size(); ++i) {
250 const blink::WebMediaStreamSource& source = video_tracks[i].source();
251 MediaStreamSourceExtraData* source_data =
252 static_cast<MediaStreamSourceExtraData*>(source.extraData());
253
254 // Check if the source has already been created. This happens when the same
255 // source is used in multiple MediaStreams as a result of calling
256 // getUserMedia.
257 if (source_data->video_source())
258 continue;
259
260 const bool is_screencast =
261 source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
262 source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
263 source_data->SetVideoSource(
264 CreateLocalVideoSource(source_data->device_info().session_id,
265 is_screencast,
266 &native_video_constraints).get());
267 source_observer->AddSource(source_data->video_source());
268 }
269 158
270 // Do additional source initialization if the audio source is a valid 159 // Do additional source initialization if the audio source is a valid
271 // microphone or tab audio. 160 // microphone or tab audio.
272 RTCMediaConstraints native_audio_constraints(audio_constraints); 161 RTCMediaConstraints native_audio_constraints(audio_constraints);
273 ApplyFixedAudioConstraints(&native_audio_constraints); 162 ApplyFixedAudioConstraints(&native_audio_constraints);
274 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
275 web_stream->audioTracks(audio_tracks);
276 for (size_t i = 0; i < audio_tracks.size(); ++i) {
277 const blink::WebMediaStreamSource& source = audio_tracks[i].source();
278 MediaStreamSourceExtraData* source_data =
279 static_cast<MediaStreamSourceExtraData*>(source.extraData());
280 163
281 // Check if the source has already been created. This happens when the same 164 MediaStreamAudioSource* source_data =
282 // source is used in multiple MediaStreams as a result of calling 165 static_cast<MediaStreamAudioSource*>(audio_source.extraData());
283 // getUserMedia.
284 if (source_data->local_audio_source())
285 continue;
286 166
287 // TODO(xians): Create a new capturer for difference microphones when we 167 // TODO(xians): Create a new capturer for difference microphones when we
288 // support multiple microphones. See issue crbug/262117 . 168 // support multiple microphones. See issue crbug/262117 .
289 StreamDeviceInfo device_info = source_data->device_info(); 169 StreamDeviceInfo device_info = source_data->device_info();
290 RTCMediaConstraints constraints = native_audio_constraints; 170 RTCMediaConstraints constraints = native_audio_constraints;
291 171
292 // If any platform effects are available, check them against the 172 // If any platform effects are available, check them against the
293 // constraints. Disable effects to match false constraints, but if a 173 // constraints. Disable effects to match false constraints, but if a
294 // constraint is true, set the constraint to false to later disable the 174 // constraint is true, set the constraint to false to later disable the
295 // software effect. 175 // software effect.
296 int effects = device_info.device.input.effects; 176 int effects = device_info.device.input.effects;
297 if (effects != media::AudioParameters::NO_EFFECTS) { 177 if (effects != media::AudioParameters::NO_EFFECTS) {
298 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) { 178 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
299 bool value; 179 bool value;
300 if (!webrtc::FindConstraint(&constraints, 180 if (!webrtc::FindConstraint(&constraints,
301 kConstraintEffectMap[i].constraint, &value, NULL) || !value) { 181 kConstraintEffectMap[i].constraint, &value,
302 // If the constraint is false, or does not exist, disable the platform 182 NULL) || !value) {
303 // effect. 183 // If the constraint is false, or does not exist, disable the platform
304 effects &= ~kConstraintEffectMap[i].effect; 184 // effect.
305 DVLOG(1) << "Disabling constraint: " 185 effects &= ~kConstraintEffectMap[i].effect;
306 << kConstraintEffectMap[i].constraint; 186 DVLOG(1) << "Disabling constraint: "
307 } else if (effects & kConstraintEffectMap[i].effect) { 187 << kConstraintEffectMap[i].constraint;
308 // If the constraint is true, leave the platform effect enabled, and 188 } else if (effects & kConstraintEffectMap[i].effect) {
309 // set the constraint to false to later disable the software effect. 189 // If the constraint is true, leave the platform effect enabled, and
310 constraints.AddMandatory(kConstraintEffectMap[i].constraint, 190 // set the constraint to false to later disable the software effect.
311 webrtc::MediaConstraintsInterface::kValueFalse, true); 191 constraints.AddMandatory(kConstraintEffectMap[i].constraint,
312 DVLOG(1) << "Disabling platform effect: " 192 webrtc::MediaConstraintsInterface::kValueFalse,
313 << kConstraintEffectMap[i].constraint; 193 true);
314 } 194 DVLOG(1) << "Disabling platform effect: "
195 << kConstraintEffectMap[i].constraint;
315 } 196 }
316 device_info.device.input.effects = effects;
317 } 197 }
318 198 device_info.device.input.effects = effects;
319 scoped_refptr<WebRtcAudioCapturer> capturer(
320 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
321 if (!capturer.get()) {
322 DLOG(WARNING) << "Failed to create the capturer for device "
323 << device_info.device.id;
324 sources_created.Run(web_stream, false);
325 // TODO(xians): Don't we need to check if source_observer is observing
326 // something? If not, then it looks like we have a leak here.
327 // OTOH, if it _is_ observing something, then the callback might
328 // be called multiple times which is likely also a bug.
329 return;
330 }
331 source_data->SetAudioCapturer(capturer);
332
333 // Creates a LocalAudioSource object which holds audio options.
334 // TODO(xians): The option should apply to the track instead of the source.
335 source_data->SetLocalAudioSource(
336 CreateLocalAudioSource(&constraints).get());
337 source_observer->AddSource(source_data->local_audio_source());
338 } 199 }
339 200
340 source_observer->StartObservering(); 201 scoped_refptr<WebRtcAudioCapturer> capturer(
202 CreateAudioCapturer(render_view_id, device_info, audio_constraints));
203 if (!capturer.get()) {
204 DLOG(WARNING) << "Failed to create the capturer for device "
205 << device_info.device.id;
206 // TODO(xians): Don't we need to check if source_observer is observing
207 // something? If not, then it looks like we have a leak here.
208 // OTOH, if it _is_ observing something, then the callback might
209 // be called multiple times which is likely also a bug.
210 return false;
211 }
212 source_data->SetAudioCapturer(capturer);
213
214 // Creates a LocalAudioSource object which holds audio options.
215 // TODO(xians): The option should apply to the track instead of the source.
216 source_data->SetLocalAudioSource(
217 CreateLocalAudioSource(&constraints).get());
218
219 return true;
220 }
221
222 cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
223 const StreamDeviceInfo& info) {
224 bool is_screeencast =
225 info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
226 info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
227 return new RtcVideoCapturer(info.session_id, is_screeencast);
341 } 228 }
342 229
343 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 230 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
344 blink::WebMediaStream* web_stream) { 231 blink::WebMediaStream* web_stream) {
345 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()"; 232 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
346 if (!EnsurePeerConnectionFactory()) { 233 if (!EnsurePeerConnectionFactory()) {
347 DVLOG(1) << "EnsurePeerConnectionFactory() failed!"; 234 DVLOG(1) << "EnsurePeerConnectionFactory() failed!";
348 return; 235 return;
349 } 236 }
350 237
(...skipping 27 matching lines...) Expand all
378 MediaStreamExtraData* extra_data = 265 MediaStreamExtraData* extra_data =
379 static_cast<MediaStreamExtraData*>(web_stream->extraData()); 266 static_cast<MediaStreamExtraData*>(web_stream->extraData());
380 extra_data->SetLocalStreamStopCallback(stream_stop); 267 extra_data->SetLocalStreamStopCallback(stream_stop);
381 } 268 }
382 269
383 scoped_refptr<webrtc::AudioTrackInterface> 270 scoped_refptr<webrtc::AudioTrackInterface>
384 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( 271 MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
385 const blink::WebMediaStreamTrack& track) { 272 const blink::WebMediaStreamTrack& track) {
386 blink::WebMediaStreamSource source = track.source(); 273 blink::WebMediaStreamSource source = track.source();
387 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); 274 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
388 MediaStreamSourceExtraData* source_data = 275 MediaStreamAudioSource* source_data =
389 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 276 static_cast<MediaStreamAudioSource*>(source.extraData());
390 277
391 // In the future the constraints will belong to the track itself, but 278 // In the future the constraints will belong to the track itself, but
392 // right now they're on the source, so we fetch them from there. 279 // right now they're on the source, so we fetch them from there.
393 RTCMediaConstraints track_constraints(source.constraints()); 280 RTCMediaConstraints track_constraints(source.constraints());
394 281
395 // Apply default audio constraints that enable echo cancellation, 282 // Apply default audio constraints that enable echo cancellation,
396 // automatic gain control, noise suppression and high-pass filter. 283 // automatic gain control, noise suppression and high-pass filter.
397 ApplyFixedAudioConstraints(&track_constraints); 284 ApplyFixedAudioConstraints(&track_constraints);
398 285
399 scoped_refptr<WebAudioCapturerSource> webaudio_source; 286 scoped_refptr<WebAudioCapturerSource> webaudio_source;
400 if (!source_data) { 287 if (!source_data) {
401 if (source.requiresAudioConsumer()) { 288 if (source.requiresAudioConsumer()) {
402 // We're adding a WebAudio MediaStream. 289 // We're adding a WebAudio MediaStream.
403 // Create a specific capturer for each WebAudio consumer. 290 // Create a specific capturer for each WebAudio consumer.
404 webaudio_source = CreateWebAudioSource(&source, track_constraints); 291 webaudio_source = CreateWebAudioSource(&source, track_constraints);
405 source_data = 292 source_data =
406 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 293 static_cast<MediaStreamAudioSource*>(source.extraData());
407 } else { 294 } else {
408 // TODO(perkj): Implement support for sources from 295 // TODO(perkj): Implement support for sources from
409 // remote MediaStreams. 296 // remote MediaStreams.
410 NOTIMPLEMENTED(); 297 NOTIMPLEMENTED();
411 return NULL; 298 return NULL;
412 } 299 }
413 } 300 }
414 301
415 scoped_refptr<webrtc::AudioTrackInterface> audio_track( 302 scoped_refptr<webrtc::AudioTrackInterface> audio_track(
416 CreateLocalAudioTrack(track.id().utf8(), 303 CreateLocalAudioTrack(track.id().utf8(),
417 source_data->GetAudioCapturer(), 304 source_data->GetAudioCapturer(),
418 webaudio_source.get(), 305 webaudio_source.get(),
419 source_data->local_audio_source())); 306 source_data->local_audio_source()));
420 AddNativeTrackToBlinkTrack(audio_track.get(), track, true); 307 AddNativeTrackToBlinkTrack(audio_track.get(), track, true);
421 308
422 audio_track->set_enabled(track.isEnabled()); 309 audio_track->set_enabled(track.isEnabled());
423 310
424 // Pass the pointer of the source provider to the blink audio track. 311 // Pass the pointer of the source provider to the blink audio track.
425 blink::WebMediaStreamTrack writable_track = track; 312 blink::WebMediaStreamTrack writable_track = track;
426 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>( 313 writable_track.setSourceProvider(static_cast<WebRtcLocalAudioTrack*>(
427 audio_track.get())->audio_source_provider()); 314 audio_track.get())->audio_source_provider());
428 315
429 return audio_track; 316 return audio_track;
430 } 317 }
431 318
432 scoped_refptr<webrtc::VideoTrackInterface> 319 scoped_refptr<webrtc::VideoTrackInterface>
433 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( 320 MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
434 const blink::WebMediaStreamTrack& track) { 321 const blink::WebMediaStreamTrack& track) {
322 DCHECK(track.extraData() == NULL);
435 blink::WebMediaStreamSource source = track.source(); 323 blink::WebMediaStreamSource source = track.source();
436 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); 324 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
437 MediaStreamSourceExtraData* source_data = 325
438 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 326 MediaStreamVideoSource* source_data =
327 static_cast<MediaStreamVideoSource*>(source.extraData());
439 328
440 if (!source_data) { 329 if (!source_data) {
441 // TODO(perkj): Implement support for sources from 330 // TODO(perkj): Implement support for sources from
442 // remote MediaStreams. 331 // remote MediaStreams.
443 NOTIMPLEMENTED(); 332 NOTIMPLEMENTED();
444 return NULL; 333 return NULL;
445 } 334 }
446 335
447 std::string track_id = base::UTF16ToUTF8(track.id()); 336 // Create native track from the source.
448 scoped_refptr<webrtc::VideoTrackInterface> video_track( 337 scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
449 CreateLocalVideoTrack(track_id, source_data->video_source())); 338 CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter());
450 AddNativeTrackToBlinkTrack(video_track.get(), track, true);
451 339
452 video_track->set_enabled(track.isEnabled()); 340 AddNativeTrackToBlinkTrack(webrtc_track, track, true);
Ronghua Wu (Left Chromium) 2014/01/22 01:44:04 nit: bool local_track = true;
perkj_chrome 2014/01/22 16:56:37 Done.
453 341
454 return video_track; 342 webrtc_track->set_enabled(track.isEnabled());
343
344 return webrtc_track;
455 } 345 }
456 346
457 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( 347 void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
458 const blink::WebMediaStreamTrack& track) { 348 const blink::WebMediaStreamTrack& track) {
459 DCHECK(!track.isNull() && !track.extraData()); 349 DCHECK(!track.isNull() && !track.extraData());
460 DCHECK(!track.source().isNull()); 350 DCHECK(!track.source().isNull());
461 351
462 switch (track.source().type()) { 352 switch (track.source().type()) {
463 case blink::WebMediaStreamSource::TypeAudio: 353 case blink::WebMediaStreamSource::TypeAudio:
464 CreateNativeAudioMediaStreamTrack(track); 354 CreateNativeAudioMediaStreamTrack(track);
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
555 return native_stream->RemoveTrack( 445 return native_stream->RemoveTrack(
556 native_stream->FindVideoTrack(track_id)); 446 native_stream->FindVideoTrack(track_id));
557 } 447 }
558 return false; 448 return false;
559 } 449 }
560 450
561 scoped_refptr<webrtc::VideoSourceInterface> 451 scoped_refptr<webrtc::VideoSourceInterface>
562 MediaStreamDependencyFactory::CreateVideoSource( 452 MediaStreamDependencyFactory::CreateVideoSource(
563 cricket::VideoCapturer* capturer, 453 cricket::VideoCapturer* capturer,
564 const webrtc::MediaConstraintsInterface* constraints) { 454 const webrtc::MediaConstraintsInterface* constraints) {
455 if (!EnsurePeerConnectionFactory()) {
Ronghua Wu (Left Chromium) 2014/01/22 01:44:04 It's unclear to me what is the rule to call or not
perkj_chrome 2014/01/22 16:56:37 good idea
456 return NULL;
457 }
565 scoped_refptr<webrtc::VideoSourceInterface> source = 458 scoped_refptr<webrtc::VideoSourceInterface> source =
566 pc_factory_->CreateVideoSource(capturer, constraints).get(); 459 pc_factory_->CreateVideoSource(capturer, constraints).get();
567 return source; 460 return source;
568 } 461 }
569 462
570 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 463 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
571 DCHECK(!pc_factory_.get()); 464 DCHECK(!pc_factory_.get());
572 DCHECK(!audio_device_.get()); 465 DCHECK(!audio_device_.get());
573 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()"; 466 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
574 467
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
659 552
660 scoped_refptr<webrtc::MediaStreamInterface> 553 scoped_refptr<webrtc::MediaStreamInterface>
661 MediaStreamDependencyFactory::CreateLocalMediaStream( 554 MediaStreamDependencyFactory::CreateLocalMediaStream(
662 const std::string& label) { 555 const std::string& label) {
663 return pc_factory_->CreateLocalMediaStream(label).get(); 556 return pc_factory_->CreateLocalMediaStream(label).get();
664 } 557 }
665 558
666 scoped_refptr<webrtc::AudioSourceInterface> 559 scoped_refptr<webrtc::AudioSourceInterface>
667 MediaStreamDependencyFactory::CreateLocalAudioSource( 560 MediaStreamDependencyFactory::CreateLocalAudioSource(
668 const webrtc::MediaConstraintsInterface* constraints) { 561 const webrtc::MediaConstraintsInterface* constraints) {
562 if (!EnsurePeerConnectionFactory())
563 return NULL;
564
669 scoped_refptr<webrtc::AudioSourceInterface> source = 565 scoped_refptr<webrtc::AudioSourceInterface> source =
670 pc_factory_->CreateAudioSource(constraints).get(); 566 pc_factory_->CreateAudioSource(constraints).get();
671 return source; 567 return source;
672 } 568 }
673 569
674 scoped_refptr<webrtc::VideoSourceInterface>
675 MediaStreamDependencyFactory::CreateLocalVideoSource(
676 int video_session_id,
677 bool is_screencast,
678 const webrtc::MediaConstraintsInterface* constraints) {
679 RtcVideoCapturer* capturer = new RtcVideoCapturer(
680 video_session_id, is_screencast);
681
682 // The video source takes ownership of |capturer|.
683 scoped_refptr<webrtc::VideoSourceInterface> source =
684 CreateVideoSource(capturer, constraints);
685 return source;
686 }
687
688 scoped_refptr<WebAudioCapturerSource> 570 scoped_refptr<WebAudioCapturerSource>
689 MediaStreamDependencyFactory::CreateWebAudioSource( 571 MediaStreamDependencyFactory::CreateWebAudioSource(
690 blink::WebMediaStreamSource* source, 572 blink::WebMediaStreamSource* source,
691 const RTCMediaConstraints& constraints) { 573 const RTCMediaConstraints& constraints) {
692 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()"; 574 DVLOG(1) << "MediaStreamDependencyFactory::CreateWebAudioSource()";
693 DCHECK(GetWebRtcAudioDevice()); 575 DCHECK(GetWebRtcAudioDevice());
694 576
695 scoped_refptr<WebAudioCapturerSource> 577 scoped_refptr<WebAudioCapturerSource>
696 webaudio_capturer_source(new WebAudioCapturerSource()); 578 webaudio_capturer_source(new WebAudioCapturerSource());
697 MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); 579 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
698 580
699 // Create a LocalAudioSource object which holds audio options. 581 // Create a LocalAudioSource object which holds audio options.
700 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 582 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
701 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get()); 583 source_data->SetLocalAudioSource(CreateLocalAudioSource(&constraints).get());
702 source->setExtraData(source_data); 584 source->setExtraData(source_data);
703 585
704 // Replace the default source with WebAudio as source instead. 586 // Replace the default source with WebAudio as source instead.
705 source->addAudioConsumer(webaudio_capturer_source.get()); 587 source->addAudioConsumer(webaudio_capturer_source.get());
706 588
707 return webaudio_capturer_source; 589 return webaudio_capturer_source;
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after
962 if (!aec_dump_file_stream) { 844 if (!aec_dump_file_stream) {
963 VLOG(1) << "Could not open AEC dump file."; 845 VLOG(1) << "Could not open AEC dump file.";
964 base::ClosePlatformFile(aec_dump_file); 846 base::ClosePlatformFile(aec_dump_file);
965 } else { 847 } else {
966 // |pc_factory_| takes ownership of |aec_dump_file_stream|. 848 // |pc_factory_| takes ownership of |aec_dump_file_stream|.
967 pc_factory_->StartAecDump(aec_dump_file_stream); 849 pc_factory_->StartAecDump(aec_dump_file_stream);
968 } 850 }
969 } 851 }
970 852
971 } // namespace content 853 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698