Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(49)

Side by Side Diff: content/renderer/media/media_stream_impl.cc

Issue 11669004: Add chromium support for MediaStreamAudioDestinationNode - part II (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Nit Created 7 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_impl.h" 5 #include "content/renderer/media/media_stream_impl.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/logging.h" 9 #include "base/logging.h"
10 #include "base/string_number_conversions.h" 10 #include "base/string_number_conversions.h"
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
63 kMediaStreamSourceTab) { 63 kMediaStreamSourceTab) {
64 options->video_type = content::MEDIA_TAB_VIDEO_CAPTURE; 64 options->video_type = content::MEDIA_TAB_VIDEO_CAPTURE;
65 options->video_device_id = GetMandatoryStreamConstraint( 65 options->video_device_id = GetMandatoryStreamConstraint(
66 user_media_request.videoConstraints(), 66 user_media_request.videoConstraints(),
67 kMediaStreamSourceId); 67 kMediaStreamSourceId);
68 } 68 }
69 } 69 }
70 70
71 // Get session ID for the selected microphone to ensure that we start 71 // Get session ID for the selected microphone to ensure that we start
72 // capturing audio using the correct input device. 72 // capturing audio using the correct input device.
73 static int GetSessionId(const WebKit::WebMediaStreamDescriptor& descriptor) { 73 static int GetSessionId(const WebKit::WebMediaStreamSource& source) {
74 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
75 descriptor.audioSources(audio_components);
76 if (audio_components.size() != 1) {
77 // TODO(henrika): add support for more than one audio track.
78 NOTIMPLEMENTED();
79 return -1;
80 }
81
82 if (!audio_components[0].isEnabled()) {
83 DVLOG(1) << "audio track is disabled";
84 return -1;
85 }
86
87 const WebKit::WebMediaStreamSource& source = audio_components[0].source();
88 MediaStreamSourceExtraData* source_data = 74 MediaStreamSourceExtraData* source_data =
89 static_cast<MediaStreamSourceExtraData*>(source.extraData()); 75 static_cast<MediaStreamSourceExtraData*>(source.extraData());
90 if (!source_data) { 76 if (!source_data) {
91 // TODO(henrika): Implement support for sources from remote MediaStreams. 77 // TODO(henrika): Implement support for sources from remote MediaStreams.
92 NOTIMPLEMENTED(); 78 NOTIMPLEMENTED();
93 return -1; 79 return -1;
94 } 80 }
95 DVLOG(1) << "local audio track source name: " 81 DVLOG(1) << "local audio track source name: "
96 << source_data->device_info().device.name; 82 << source_data->device_info().device.name;
97 83
(...skipping 217 matching lines...) Expand 10 before | Expand all | Expand 10 after
315 dependency_factory_->GetWebRtcAudioDevice()->SetRenderer(renderer)) { 301 dependency_factory_->GetWebRtcAudioDevice()->SetRenderer(renderer)) {
316 return renderer; 302 return renderer;
317 } 303 }
318 304
319 // WebRtcAudioDeviceImpl can only support one renderer. 305 // WebRtcAudioDeviceImpl can only support one renderer.
320 return NULL; 306 return NULL;
321 } else if (extra_data->local_stream()) { 307 } else if (extra_data->local_stream()) {
322 DVLOG(1) << "creating local audio renderer for stream:" 308 DVLOG(1) << "creating local audio renderer for stream:"
323 << extra_data->local_stream()->label(); 309 << extra_data->local_stream()->label();
324 310
325 // Get session ID for the local media stream. 311 WebKit::WebVector<WebKit::WebMediaStreamComponent> audio_components;
326 int session_id = GetSessionId(descriptor); 312 descriptor.audioSources(audio_components);
327 if (session_id == -1) 313 if (audio_components.size() != 1) {
314 // TODO(henrika): add support for more than one audio track.
315 NOTIMPLEMENTED();
328 return NULL; 316 return NULL;
317 }
318
319 if (!audio_components[0].isEnabled()) {
320 DVLOG(1) << "audio track is disabled";
321 return NULL;
322 }
323
324 int session_id = 0;
325 const WebKit::WebMediaStreamSource& source = audio_components[0].source();
326 if (!source.requiresAudioConsumer()) {
327 session_id = GetSessionId(source);
328 if (session_id == -1) {
329 return NULL;
330 }
331 } else {
332 DVLOG(1) << "WebAudio MediaStream is detected";
333 session_id = -1;
334 }
329 335
330 // Create the local audio renderer using the specified session ID. 336 // Create the local audio renderer using the specified session ID.
331 scoped_refptr<WebRtcLocalAudioRenderer> local_renderer = 337 scoped_refptr<WebRtcLocalAudioRenderer> local_renderer =
332 CreateLocalAudioRenderer(session_id); 338 CreateLocalAudioRenderer(session_id);
333 return local_renderer; 339 return local_renderer;
334 } 340 }
335 341
336 NOTREACHED(); 342 NOTREACHED();
337 return NULL; 343 return NULL;
338 } 344 }
339 345
340 // Callback from MediaStreamDispatcher. 346 // Callback from MediaStreamDispatcher.
341 // The requested stream have been generated by the MediaStreamDispatcher. 347 // The requested stream have been generated by the MediaStreamDispatcher.
342 void MediaStreamImpl::OnStreamGenerated( 348 void MediaStreamImpl::OnStreamGenerated(
343 int request_id, 349 int request_id,
344 const std::string& label, 350 const std::string& label,
345 const StreamDeviceInfoArray& audio_array, 351 const StreamDeviceInfoArray& audio_array,
346 const StreamDeviceInfoArray& video_array) { 352 const StreamDeviceInfoArray& video_array) {
347 DCHECK(CalledOnValidThread()); 353 DCHECK(CalledOnValidThread());
354 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated("
355 << request_id << "," << label << ")";
348 356
349 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); 357 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
350 if (!request_info) { 358 if (!request_info) {
351 // This can happen if the request is canceled or the frame reloads while 359 // This can happen if the request is canceled or the frame reloads while
352 // MediaStreamDispatcher is processing the request. 360 // MediaStreamDispatcher is processing the request.
353 // We need to tell the dispatcher to stop the stream. 361 // We need to tell the dispatcher to stop the stream.
354 media_stream_dispatcher_->StopStream(label); 362 media_stream_dispatcher_->StopStream(label);
355 DVLOG(1) << "Request ID not found"; 363 DVLOG(1) << "Request ID not found";
356 return; 364 return;
357 } 365 }
(...skipping 211 matching lines...) Expand 10 before | Expand all | Expand 10 after
569 error_cb, 577 error_cb,
570 repaint_cb); 578 repaint_cb);
571 } 579 }
572 580
573 scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateVideoDecoder( 581 scoped_refptr<media::VideoDecoder> MediaStreamImpl::CreateVideoDecoder(
574 webrtc::MediaStreamInterface* stream, 582 webrtc::MediaStreamInterface* stream,
575 const scoped_refptr<base::MessageLoopProxy>& message_loop) { 583 const scoped_refptr<base::MessageLoopProxy>& message_loop) {
576 if (!stream->video_tracks() || stream->video_tracks()->count() == 0) 584 if (!stream->video_tracks() || stream->video_tracks()->count() == 0)
577 return NULL; 585 return NULL;
578 586
579 DVLOG(1) << "MediaStreamImpl::CreateRemoteVideoDecoder label:" 587 DVLOG(1) << "MediaStreamImpl::CrOeateRemoteVideoDecoder label:"
580 << stream->label(); 588 << stream->label();
581 589
582 return new RTCVideoDecoder( 590 return new RTCVideoDecoder(
583 message_loop, 591 message_loop,
584 base::MessageLoopProxy::current(), 592 base::MessageLoopProxy::current(),
585 stream->video_tracks()->at(0)); 593 stream->video_tracks()->at(0));
586 } 594 }
587 595
588 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer( 596 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
589 webrtc::MediaStreamInterface* stream) { 597 webrtc::MediaStreamInterface* stream) {
590 if (!stream->audio_tracks() || stream->audio_tracks()->count() == 0) 598 if (!stream->audio_tracks() || stream->audio_tracks()->count() == 0)
591 return NULL; 599 return NULL;
592 600
593 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:" 601 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
594 << stream->label(); 602 << stream->label();
595 603
596 return new WebRtcAudioRenderer(RenderViewObserver::routing_id()); 604 return new WebRtcAudioRenderer(RenderViewObserver::routing_id());
597 } 605 }
598 606
599 scoped_refptr<WebRtcLocalAudioRenderer> 607 scoped_refptr<WebRtcLocalAudioRenderer>
600 MediaStreamImpl::CreateLocalAudioRenderer(int session_id) { 608 MediaStreamImpl::CreateLocalAudioRenderer(int session_id) {
601 DCHECK_NE(session_id, -1);
602 // Ensure that the existing capturer reads data from the selected microphone. 609 // Ensure that the existing capturer reads data from the selected microphone.
603 scoped_refptr<WebRtcAudioCapturer> source = 610 scoped_refptr<WebRtcAudioCapturer> source =
604 dependency_factory_->GetWebRtcAudioDevice()->capturer(); 611 dependency_factory_->GetWebRtcAudioDevice()->capturer();
605 if (!source) { 612 if (!source) {
606 // The WebRtcAudioCapturer instance can be NULL e.g. if an unsupported 613 // The WebRtcAudioCapturer instance can be NULL e.g. if an unsupported
607 // sample rate is used. 614 // sample rate is used.
608 // TODO(henrika): extend support of capture sample rates. 615 // TODO(henrika): extend support of capture sample rates.
609 return NULL; 616 return NULL;
610 } 617 }
611 source->SetDevice(session_id); 618
619 if (session_id != -1)
620 source->SetDevice(session_id);
612 621
613 // Create a new WebRtcLocalAudioRenderer instance and connect it to the 622 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
614 // existing WebRtcAudioCapturer so that the renderer can use it as source. 623 // existing WebRtcAudioCapturer so that the renderer can use it as source.
615 return new WebRtcLocalAudioRenderer(source, RenderViewObserver::routing_id()); 624 return new WebRtcLocalAudioRenderer(source, RenderViewObserver::routing_id());
616 } 625 }
617 626
618 MediaStreamSourceExtraData::MediaStreamSourceExtraData( 627 MediaStreamSourceExtraData::MediaStreamSourceExtraData(
619 const StreamDeviceInfo& device_info) 628 const StreamDeviceInfo& device_info)
620 : device_info_(device_info) { 629 : device_info_(device_info) {
621 } 630 }
(...skipping 17 matching lines...) Expand all
639 const StreamStopCallback& stop_callback) { 648 const StreamStopCallback& stop_callback) {
640 stream_stop_callback_ = stop_callback; 649 stream_stop_callback_ = stop_callback;
641 } 650 }
642 651
643 void MediaStreamExtraData::OnLocalStreamStop() { 652 void MediaStreamExtraData::OnLocalStreamStop() {
644 if (!stream_stop_callback_.is_null()) 653 if (!stream_stop_callback_.is_null())
645 stream_stop_callback_.Run(local_stream_->label()); 654 stream_stop_callback_.Run(local_stream_->label());
646 } 655 }
647 656
648 } // namespace content 657 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/media_stream_dependency_factory.cc ('k') | content/renderer/media/webaudio_capturer_source.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698