Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(146)

Side by Side Diff: content/renderer/media/media_stream_dependency_factory.cc

Issue 11783059: Ensures that WebRTC works for device selection using a different sample rate than default (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Fixes include order Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_dependency_factory.h" 5 #include "content/renderer/media/media_stream_dependency_factory.h"
6 6
7 #include <vector> 7 #include <vector>
8 8
9 #include "base/synchronization/waitable_event.h" 9 #include "base/synchronization/waitable_event.h"
10 #include "base/utf_string_conversions.h" 10 #include "base/utf_string_conversions.h"
(...skipping 172 matching lines...) Expand 10 before | Expand all | Expand 10 after
183 return NULL; 183 return NULL;
184 184
185 return new RTCPeerConnectionHandler(client, this); 185 return new RTCPeerConnectionHandler(client, this);
186 } 186 }
187 187
188 void MediaStreamDependencyFactory::CreateNativeMediaSources( 188 void MediaStreamDependencyFactory::CreateNativeMediaSources(
189 const WebKit::WebMediaConstraints& audio_constraints, 189 const WebKit::WebMediaConstraints& audio_constraints,
190 const WebKit::WebMediaConstraints& video_constraints, 190 const WebKit::WebMediaConstraints& video_constraints,
191 WebKit::WebMediaStreamDescriptor* description, 191 WebKit::WebMediaStreamDescriptor* description,
192 const MediaSourcesCreatedCallback& sources_created) { 192 const MediaSourcesCreatedCallback& sources_created) {
193 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
193 if (!EnsurePeerConnectionFactory()) { 194 if (!EnsurePeerConnectionFactory()) {
194 sources_created.Run(description, false); 195 sources_created.Run(description, false);
195 return; 196 return;
196 } 197 }
197 198
198 // |source_observer| clean up itself when it has completed 199 // |source_observer| clean up itself when it has completed
199 // source_observer->StartObservering. 200 // source_observer->StartObservering.
200 SourceStateObserver* source_observer = 201 SourceStateObserver* source_observer =
201 new SourceStateObserver(description, sources_created); 202 new SourceStateObserver(description, sources_created);
202 203
(...skipping 13 matching lines...) Expand all
216 continue; 217 continue;
217 } 218 }
218 const bool is_screencast = (source_data->device_info().device.type == 219 const bool is_screencast = (source_data->device_info().device.type ==
219 content::MEDIA_TAB_VIDEO_CAPTURE); 220 content::MEDIA_TAB_VIDEO_CAPTURE);
220 source_data->SetVideoSource( 221 source_data->SetVideoSource(
221 CreateVideoSource(source_data->device_info().session_id, 222 CreateVideoSource(source_data->device_info().session_id,
222 is_screencast, 223 is_screencast,
223 &native_video_constraints)); 224 &native_video_constraints));
224 source_observer->AddSource(source_data->video_source()); 225 source_observer->AddSource(source_data->video_source());
225 } 226 }
227
228 // Do additional source initialization if the audio source is a valid
229 // microphone.
230 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
231 description->audioSources(audio_components);
232 for (size_t i = 0; i < audio_components.size(); ++i) {
233 const WebKit::WebMediaStreamSource& source = audio_components[i].source();
234 MediaStreamSourceExtraData* source_data =
235 static_cast<MediaStreamSourceExtraData*>(source.extraData());
perkj_chrome 2013/01/15 09:00:19 Will WebAudio also end up here? Fix comment in tha
henrika (OOO until Aug 14) 2013/01/16 16:49:00 Do you mean for the MediaStreamDestination case (n
236 if (!source_data) {
237 // TODO(henrika): Implement support for sources from remote MediaStreams.
238 NOTIMPLEMENTED();
239 continue;
240 }
241
242 const MediaStreamDevice device = source_data->device_info().device;
243 if (device.type == content::MEDIA_DEVICE_AUDIO_CAPTURE) {
244 // Initialize the source using audio parameters for the selected
245 // capture device.
246 WebRtcAudioCapturer* capturer = GetWebRtcAudioDevice()->capturer();
247 // TODO(henrika): refactor \content\public\common\media_stream_request.h
248 // to allow dependency of media::ChannelLayout and avoid static_cast.
249 if (!capturer->Initialize(
250 static_cast<media::ChannelLayout>(device.channel_layout),
251 device.sample_rate)) {
252 // The capturer does not support all possible sample rates.
perkj_chrome 2013/01/15 09:00:19 nit: all possible ? I don't understand the comment
henrika (OOO until Aug 14) 2013/01/16 16:49:00 Refactored + fixed.
253 sources_created.Run(description, false);
254 return;
255 }
256
257 // Specify which capture device to use. The acquired session id is used
258 // for identification.
259 // TODO(henrika): the current design does not support a uniqe source
260 // for each audio track.
261 if (source_data->device_info().session_id <= 0) {
262 LOG(ERROR) << "Invalid audio session id";
263 sources_created.Run(description, false);
264 return;
265 }
266 capturer->SetDevice(source_data->device_info().session_id);
267 } else {
268 DLOG(WARNING) << "Unsupported audio source";
perkj_chrome 2013/01/15 09:00:19 nit: NOTREACHED ?
henrika (OOO until Aug 14) 2013/01/16 16:49:00 See if you like new proposal better.
269 sources_created.Run(description, false);
270 return;
271 }
272 }
273
226 source_observer->StartObservering(); 274 source_observer->StartObservering();
227 } 275 }
228 276
229 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( 277 void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
230 WebKit::WebMediaStreamDescriptor* description) { 278 WebKit::WebMediaStreamDescriptor* description) {
279 DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeLocalMediaStream()";
231 DCHECK(PeerConnectionFactoryCreated()); 280 DCHECK(PeerConnectionFactoryCreated());
232 281
233 std::string label = UTF16ToUTF8(description->label()); 282 std::string label = UTF16ToUTF8(description->label());
234 scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream = 283 scoped_refptr<webrtc::LocalMediaStreamInterface> native_stream =
235 CreateLocalMediaStream(label); 284 CreateLocalMediaStream(label);
236 285
237 // Add audio tracks. 286 // Add audio tracks.
238 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components; 287 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
239 description->audioSources(audio_components); 288 description->audioSources(audio_components);
240 for (size_t i = 0; i < audio_components.size(); ++i) { 289 for (size_t i = 0; i < audio_components.size(); ++i) {
241 const WebKit::WebMediaStreamSource& source = audio_components[i].source(); 290 const WebKit::WebMediaStreamSource& source = audio_components[i].source();
242 MediaStreamSourceExtraData* source_data = 291 MediaStreamSourceExtraData* source_data =
243 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 292 static_cast<MediaStreamSourceExtraData*>(source.extraData());
244 if (!source_data) { 293 if (!source_data) {
245 // TODO(perkj): Implement support for sources from remote MediaStreams. 294 // TODO(perkj): Implement support for sources from remote MediaStreams.
246 NOTIMPLEMENTED(); 295 NOTIMPLEMENTED();
247 continue; 296 continue;
248 } 297 }
249 // TODO(perkj): Refactor the creation of audio tracks to use a proper 298 // TODO(perkj): Refactor the creation of audio tracks to use a proper
250 // interface for receiving audio input data. Currently NULL is passed since 299 // interface for receiving audio input data. Currently NULL is passed since
251 // the |audio_device| is the wrong class and is unused. 300 // the |audio_device| is the wrong class and is unused.
252 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track( 301 scoped_refptr<webrtc::LocalAudioTrackInterface> audio_track(
253 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL)); 302 CreateLocalAudioTrack(UTF16ToUTF8(source.id()), NULL));
254 native_stream->AddTrack(audio_track); 303 native_stream->AddTrack(audio_track);
255 audio_track->set_enabled(audio_components[i].isEnabled()); 304 audio_track->set_enabled(audio_components[i].isEnabled());
256 // TODO(xians): This set the source of all audio tracks to the same
257 // microphone. Implement support for setting the source per audio track
258 // instead.
259 SetAudioDeviceSessionId(source_data->device_info().session_id);
260 } 305 }
261 306
262 // Add video tracks. 307 // Add video tracks.
263 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components; 308 WebKit::WebVector<WebKit::WebMediaStreamComponent> video_components;
264 description->videoSources(video_components); 309 description->videoSources(video_components);
265 for (size_t i = 0; i < video_components.size(); ++i) { 310 for (size_t i = 0; i < video_components.size(); ++i) {
266 const WebKit::WebMediaStreamSource& source = video_components[i].source(); 311 const WebKit::WebMediaStreamSource& source = video_components[i].source();
267 MediaStreamSourceExtraData* source_data = 312 MediaStreamSourceExtraData* source_data =
268 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 313 static_cast<MediaStreamSourceExtraData*>(source.extraData());
269 if (!source_data || !source_data->video_source()) { 314 if (!source_data || !source_data->video_source()) {
(...skipping 18 matching lines...) Expand all
288 WebKit::WebMediaStreamDescriptor* description, 333 WebKit::WebMediaStreamDescriptor* description,
289 const MediaStreamExtraData::StreamStopCallback& stream_stop) { 334 const MediaStreamExtraData::StreamStopCallback& stream_stop) {
290 CreateNativeLocalMediaStream(description); 335 CreateNativeLocalMediaStream(description);
291 336
292 MediaStreamExtraData* extra_data = 337 MediaStreamExtraData* extra_data =
293 static_cast<MediaStreamExtraData*>(description->extraData()); 338 static_cast<MediaStreamExtraData*>(description->extraData());
294 extra_data->SetLocalStreamStopCallback(stream_stop); 339 extra_data->SetLocalStreamStopCallback(stream_stop);
295 } 340 }
296 341
297 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() { 342 bool MediaStreamDependencyFactory::CreatePeerConnectionFactory() {
343 DVLOG(1) << "MediaStreamDependencyFactory::CreatePeerConnectionFactory()";
298 if (!pc_factory_.get()) { 344 if (!pc_factory_.get()) {
299 DCHECK(!audio_device_); 345 DCHECK(!audio_device_);
300 audio_device_ = new WebRtcAudioDeviceImpl(); 346 audio_device_ = new WebRtcAudioDeviceImpl();
301 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory( 347 scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
302 webrtc::CreatePeerConnectionFactory(worker_thread_, 348 webrtc::CreatePeerConnectionFactory(worker_thread_,
303 signaling_thread_, 349 signaling_thread_,
304 audio_device_)); 350 audio_device_));
305 if (factory.get()) 351 if (factory.get())
306 pc_factory_ = factory; 352 pc_factory_ = factory;
307 else 353 else
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
382 int sdp_mline_index, 428 int sdp_mline_index,
383 const std::string& sdp) { 429 const std::string& sdp) {
384 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp); 430 return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
385 } 431 }
386 432
387 WebRtcAudioDeviceImpl* 433 WebRtcAudioDeviceImpl*
388 MediaStreamDependencyFactory::GetWebRtcAudioDevice() { 434 MediaStreamDependencyFactory::GetWebRtcAudioDevice() {
389 return audio_device_; 435 return audio_device_;
390 } 436 }
391 437
392 void MediaStreamDependencyFactory::SetAudioDeviceSessionId(int session_id) {
393 audio_device_->SetSessionId(session_id);
394 }
395
396 void MediaStreamDependencyFactory::InitializeWorkerThread( 438 void MediaStreamDependencyFactory::InitializeWorkerThread(
397 talk_base::Thread** thread, 439 talk_base::Thread** thread,
398 base::WaitableEvent* event) { 440 base::WaitableEvent* event) {
399 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop(); 441 jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
400 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true); 442 jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
401 *thread = jingle_glue::JingleThreadWrapper::current(); 443 *thread = jingle_glue::JingleThreadWrapper::current();
402 event->Signal(); 444 event->Signal();
403 } 445 }
404 446
405 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread( 447 void MediaStreamDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
(...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after
484 // processed before returning. We wait for the above task to finish before 526 // processed before returning. We wait for the above task to finish before
485 // letting the the function continue to avoid any potential race issues. 527 // letting the the function continue to avoid any potential race issues.
486 chrome_worker_thread_.Stop(); 528 chrome_worker_thread_.Stop();
487 } else { 529 } else {
488 NOTREACHED() << "Worker thread not running."; 530 NOTREACHED() << "Worker thread not running.";
489 } 531 }
490 } 532 }
491 } 533 }
492 534
493 } // namespace content 535 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698