Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: content/renderer/media/media_stream_impl.cc

Issue 11783059: Ensures that WebRTC works for device selection using a different sample rate than default (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Fixed content_unittests Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_impl.h" 5 #include "content/renderer/media/media_stream_impl.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/logging.h" 9 #include "base/logging.h"
10 #include "base/string_number_conversions.h" 10 #include "base/string_number_conversions.h"
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
61 GetMandatoryStreamConstraint(user_media_request.videoConstraints(), 61 GetMandatoryStreamConstraint(user_media_request.videoConstraints(),
62 kMediaStreamSource) == 62 kMediaStreamSource) ==
63 kMediaStreamSourceTab) { 63 kMediaStreamSourceTab) {
64 options->video_type = content::MEDIA_TAB_VIDEO_CAPTURE; 64 options->video_type = content::MEDIA_TAB_VIDEO_CAPTURE;
65 options->video_device_id = GetMandatoryStreamConstraint( 65 options->video_device_id = GetMandatoryStreamConstraint(
66 user_media_request.videoConstraints(), 66 user_media_request.videoConstraints(),
67 kMediaStreamSourceId); 67 kMediaStreamSourceId);
68 } 68 }
69 } 69 }
70 70
71 // Get session ID for the selected microphone to ensure that we start
72 // capturing audio using the correct input device.
73 static int GetSessionId(const WebKit::WebMediaStreamDescriptor& descriptor) {
74 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
75 descriptor.audioSources(audio_components);
76 if (audio_components.size() != 1) {
77 // TODO(henrika): add support for more than one audio track.
78 NOTIMPLEMENTED();
79 return -1;
80 }
81
82 if (!audio_components[0].isEnabled()) {
83 DVLOG(1) << "audio track is disabled";
84 return -1;
85 }
86
87 const WebKit::WebMediaStreamSource& source = audio_components[0].source();
88 MediaStreamSourceExtraData* source_data =
89 static_cast<MediaStreamSourceExtraData*>(source.extraData());
90 if (!source_data) {
91 // TODO(henrika): Implement support for sources from remote MediaStreams.
92 NOTIMPLEMENTED();
93 return -1;
94 }
95 DVLOG(1) << "local audio track source name: "
96 << source_data->device_info().device.name;
97
98 return source_data->device_info().session_id;
99 }
100
101 static int g_next_request_id = 0; 71 static int g_next_request_id = 0;
102 72
103 // Creates a WebKit representation of a stream sources based on 73 // Creates a WebKit representation of a stream sources based on
104 // |devices| from the MediaStreamDispatcher. 74 // |devices| from the MediaStreamDispatcher.
105 void CreateWebKitSourceVector( 75 void CreateWebKitSourceVector(
106 const std::string& label, 76 const std::string& label,
107 const StreamDeviceInfoArray& devices, 77 const StreamDeviceInfoArray& devices,
108 WebKit::WebMediaStreamSource::Type type, 78 WebKit::WebMediaStreamSource::Type type,
109 WebKit::WebVector<WebKit::WebMediaStreamSource>& webkit_sources) { 79 WebKit::WebVector<WebKit::WebMediaStreamSource>& webkit_sources) {
110 CHECK_EQ(devices.size(), webkit_sources.size()); 80 CHECK_EQ(devices.size(), webkit_sources.size());
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
312 CreateRemoteAudioRenderer(extra_data->remote_stream()); 282 CreateRemoteAudioRenderer(extra_data->remote_stream());
313 283
314 if (renderer && 284 if (renderer &&
315 dependency_factory_->GetWebRtcAudioDevice()->SetRenderer(renderer)) { 285 dependency_factory_->GetWebRtcAudioDevice()->SetRenderer(renderer)) {
316 return renderer; 286 return renderer;
317 } 287 }
318 288
319 // WebRtcAudioDeviceImpl can only support one renderer. 289 // WebRtcAudioDeviceImpl can only support one renderer.
320 return NULL; 290 return NULL;
321 } else if (extra_data->local_stream()) { 291 } else if (extra_data->local_stream()) {
322 DVLOG(1) << "creating local audio renderer for stream:" 292 // Create the local audio renderer if the stream contains audio tracks.
323 << extra_data->local_stream()->label();
324
325 // Get session ID for the local media stream.
326 int session_id = GetSessionId(descriptor);
327 if (session_id == -1)
328 return NULL;
329
330 // Create the local audio renderer using the specified session ID.
331 scoped_refptr<WebRtcLocalAudioRenderer> local_renderer = 293 scoped_refptr<WebRtcLocalAudioRenderer> local_renderer =
332 CreateLocalAudioRenderer(session_id); 294 CreateLocalAudioRenderer(extra_data->local_stream());
333 return local_renderer; 295 return local_renderer;
334 } 296 }
335 297
336 NOTREACHED(); 298 NOTREACHED();
337 return NULL; 299 return NULL;
338 } 300 }
339 301
340 // Callback from MediaStreamDispatcher. 302 // Callback from MediaStreamDispatcher.
341 // The requested stream have been generated by the MediaStreamDispatcher. 303 // The requested stream have been generated by the MediaStreamDispatcher.
342 void MediaStreamImpl::OnStreamGenerated( 304 void MediaStreamImpl::OnStreamGenerated(
343 int request_id, 305 int request_id,
344 const std::string& label, 306 const std::string& label,
345 const StreamDeviceInfoArray& audio_array, 307 const StreamDeviceInfoArray& audio_array,
346 const StreamDeviceInfoArray& video_array) { 308 const StreamDeviceInfoArray& video_array) {
347 DCHECK(CalledOnValidThread()); 309 DCHECK(CalledOnValidThread());
310 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
348 311
349 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); 312 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
350 if (!request_info) { 313 if (!request_info) {
351 // This can happen if the request is canceled or the frame reloads while 314 // This can happen if the request is canceled or the frame reloads while
352 // MediaStreamDispatcher is processing the request. 315 // MediaStreamDispatcher is processing the request.
353 // We need to tell the dispatcher to stop the stream. 316 // We need to tell the dispatcher to stop the stream.
354 media_stream_dispatcher_->StopStream(label); 317 media_stream_dispatcher_->StopStream(label);
355 DVLOG(1) << "Request ID not found"; 318 DVLOG(1) << "Request ID not found";
356 return; 319 return;
357 } 320 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
404 &request_info->request, 367 &request_info->request,
405 false); 368 false);
406 DeleteUserMediaRequestInfo(request_info); 369 DeleteUserMediaRequestInfo(request_info);
407 } 370 }
408 371
409 // Callback from MediaStreamDependencyFactory when the sources in |description| 372 // Callback from MediaStreamDependencyFactory when the sources in |description|
410 // have been generated. 373 // have been generated.
411 void MediaStreamImpl::OnCreateNativeSourcesComplete( 374 void MediaStreamImpl::OnCreateNativeSourcesComplete(
412 WebKit::WebMediaStreamDescriptor* description, 375 WebKit::WebMediaStreamDescriptor* description,
413 bool request_succeeded) { 376 bool request_succeeded) {
377 DVLOG(1) << "MediaStreamImpl::OnCreateNativeSourcesComplete stream:"
378 << UTF16ToUTF8(description->label());
tommi (sloooow) - chröme 2013/01/15 17:43:49 doesn't DVLOG support utf16?
henrika (OOO until Aug 14) 2013/01/16 16:37:17 Correct. Done.
414 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(description); 379 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(description);
415 if (!request_info) { 380 if (!request_info) {
416 // This can happen if the request is canceled or the frame reloads while 381 // This can happen if the request is canceled or the frame reloads while
417 // MediaStreamDependencyFactory is creating the sources. 382 // MediaStreamDependencyFactory is creating the sources.
418 DVLOG(1) << "Request ID not found"; 383 DVLOG(1) << "Request ID not found";
419 return; 384 return;
420 } 385 }
421 386
422 // Create a native representation of the stream. 387 // Create a native representation of the stream.
423 if (request_succeeded) { 388 if (request_succeeded) {
(...skipping 166 matching lines...) Expand 10 before | Expand all | Expand 10 after
590 if (!stream->audio_tracks() || stream->audio_tracks()->count() == 0) 555 if (!stream->audio_tracks() || stream->audio_tracks()->count() == 0)
591 return NULL; 556 return NULL;
592 557
593 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:" 558 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
594 << stream->label(); 559 << stream->label();
595 560
596 return new WebRtcAudioRenderer(RenderViewObserver::routing_id()); 561 return new WebRtcAudioRenderer(RenderViewObserver::routing_id());
597 } 562 }
598 563
599 scoped_refptr<WebRtcLocalAudioRenderer> 564 scoped_refptr<WebRtcLocalAudioRenderer>
600 MediaStreamImpl::CreateLocalAudioRenderer(int session_id) { 565 MediaStreamImpl::CreateLocalAudioRenderer(
601 DCHECK_NE(session_id, -1); 566 webrtc::MediaStreamInterface* stream) {
602 // Ensure that the existing capturer reads data from the selected microphone. 567 if (!stream->audio_tracks() || stream->audio_tracks()->count() == 0)
568 return NULL;
569
570 DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer label:"
571 << stream->label();
572
603 scoped_refptr<WebRtcAudioCapturer> source = 573 scoped_refptr<WebRtcAudioCapturer> source =
604 dependency_factory_->GetWebRtcAudioDevice()->capturer(); 574 dependency_factory_->GetWebRtcAudioDevice()->capturer();
605 if (!source) { 575 if (!source) {
606 // The WebRtcAudioCapturer instance can be NULL e.g. if an unsupported
607 // sample rate is used.
608 // TODO(henrika): extend support of capture sample rates.
609 return NULL; 576 return NULL;
610 } 577 }
611 source->SetDevice(session_id);
612 578
613 // Create a new WebRtcLocalAudioRenderer instance and connect it to the 579 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
614 // existing WebRtcAudioCapturer so that the renderer can use it as source. 580 // existing WebRtcAudioCapturer so that the renderer can use it as source.
615 return new WebRtcLocalAudioRenderer(source, RenderViewObserver::routing_id()); 581 return new WebRtcLocalAudioRenderer(source, RenderViewObserver::routing_id());
616 } 582 }
617 583
618 MediaStreamSourceExtraData::MediaStreamSourceExtraData( 584 MediaStreamSourceExtraData::MediaStreamSourceExtraData(
619 const StreamDeviceInfo& device_info) 585 const StreamDeviceInfo& device_info)
620 : device_info_(device_info) { 586 : device_info_(device_info) {
621 } 587 }
(...skipping 17 matching lines...) Expand all
639 const StreamStopCallback& stop_callback) { 605 const StreamStopCallback& stop_callback) {
640 stream_stop_callback_ = stop_callback; 606 stream_stop_callback_ = stop_callback;
641 } 607 }
642 608
643 void MediaStreamExtraData::OnLocalStreamStop() { 609 void MediaStreamExtraData::OnLocalStreamStop() {
644 if (!stream_stop_callback_.is_null()) 610 if (!stream_stop_callback_.is_null())
645 stream_stop_callback_.Run(local_stream_->label()); 611 stream_stop_callback_.Run(local_stream_->label());
646 } 612 }
647 613
648 } // namespace content 614 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698