Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(245)

Side by Side Diff: content/renderer/media/webrtc/peer_connection_dependency_factory.cc

Issue 1721273002: MediaStream audio object graph untangling and clean-ups. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: REBASE Created 4 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 8
9 #include <utility> 9 #include <utility>
10 #include <vector> 10 #include <vector>
11 11
12 #include "base/bind.h"
13 #include "base/bind_helpers.h"
12 #include "base/command_line.h" 14 #include "base/command_line.h"
13 #include "base/location.h" 15 #include "base/location.h"
14 #include "base/logging.h" 16 #include "base/logging.h"
15 #include "base/macros.h" 17 #include "base/macros.h"
16 #include "base/metrics/field_trial.h" 18 #include "base/metrics/field_trial.h"
17 #include "base/strings/string_util.h" 19 #include "base/strings/string_util.h"
18 #include "base/strings/utf_string_conversions.h" 20 #include "base/strings/utf_string_conversions.h"
19 #include "base/synchronization/waitable_event.h" 21 #include "base/synchronization/waitable_event.h"
20 #include "build/build_config.h" 22 #include "build/build_config.h"
21 #include "content/common/media/media_stream_messages.h" 23 #include "content/common/media/media_stream_messages.h"
(...skipping 159 matching lines...) Expand 10 before | Expand all | Expand 10 after
181 // microphone or tab audio. 183 // microphone or tab audio.
182 RTCMediaConstraints native_audio_constraints(audio_constraints); 184 RTCMediaConstraints native_audio_constraints(audio_constraints);
183 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints); 185 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
184 186
185 StreamDeviceInfo device_info = source_data->device_info(); 187 StreamDeviceInfo device_info = source_data->device_info();
186 RTCMediaConstraints constraints = native_audio_constraints; 188 RTCMediaConstraints constraints = native_audio_constraints;
187 // May modify both |constraints| and |effects|. 189 // May modify both |constraints| and |effects|.
188 HarmonizeConstraintsAndEffects(&constraints, 190 HarmonizeConstraintsAndEffects(&constraints,
189 &device_info.device.input.effects); 191 &device_info.device.input.effects);
190 192
191 scoped_refptr<WebRtcAudioCapturer> capturer(CreateAudioCapturer( 193 scoped_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer(
192 render_frame_id, device_info, audio_constraints, source_data)); 194 render_frame_id, device_info, audio_constraints, source_data);
193 if (!capturer.get()) { 195 if (!capturer.get()) {
194 const std::string log_string = 196 const std::string log_string =
195 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; 197 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
196 WebRtcLogMessage(log_string); 198 WebRtcLogMessage(log_string);
197 DVLOG(1) << log_string; 199 DVLOG(1) << log_string;
198 // TODO(xians): Don't we need to check if source_observer is observing 200 // TODO(xians): Don't we need to check if source_observer is observing
199 // something? If not, then it looks like we have a leak here. 201 // something? If not, then it looks like we have a leak here.
200 // OTOH, if it _is_ observing something, then the callback might 202 // OTOH, if it _is_ observing something, then the callback might
201 // be called multiple times which is likely also a bug. 203 // be called multiple times which is likely also a bug.
202 return false; 204 return false;
203 } 205 }
204 source_data->SetAudioCapturer(capturer.get()); 206 source_data->SetAudioCapturer(std::move(capturer));
205 207
206 // Creates a LocalAudioSource object which holds audio options. 208 // Creates a LocalAudioSource object which holds audio options.
207 // TODO(xians): The option should apply to the track instead of the source. 209 // TODO(xians): The option should apply to the track instead of the source.
208 // TODO(perkj): Move audio constraints parsing to Chrome. 210 // TODO(perkj): Move audio constraints parsing to Chrome.
209 // Currently there are a few constraints that are parsed by libjingle and 211 // Currently there are a few constraints that are parsed by libjingle and
210 // the state is set to ended if parsing fails. 212 // the state is set to ended if parsing fails.
211 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( 213 scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
212 CreateLocalAudioSource(&constraints).get()); 214 CreateLocalAudioSource(&constraints).get());
213 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { 215 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
214 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; 216 DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
(...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after
527 const webrtc::MediaConstraintsInterface* constraints) { 529 const webrtc::MediaConstraintsInterface* constraints) {
528 scoped_refptr<webrtc::AudioSourceInterface> source = 530 scoped_refptr<webrtc::AudioSourceInterface> source =
529 GetPcFactory()->CreateAudioSource(constraints).get(); 531 GetPcFactory()->CreateAudioSource(constraints).get();
530 return source; 532 return source;
531 } 533 }
532 534
533 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( 535 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
534 const blink::WebMediaStreamTrack& track) { 536 const blink::WebMediaStreamTrack& track) {
535 blink::WebMediaStreamSource source = track.source(); 537 blink::WebMediaStreamSource source = track.source();
536 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); 538 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio);
537 DCHECK(!source.remote()); 539 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source);
538 MediaStreamAudioSource* source_data =
539 static_cast<MediaStreamAudioSource*>(source.getExtraData());
540 540
541 scoped_refptr<WebAudioCapturerSource> webaudio_source;
542 if (!source_data) { 541 if (!source_data) {
543 if (source.requiresAudioConsumer()) { 542 if (source.requiresAudioConsumer()) {
544 // We're adding a WebAudio MediaStream. 543 // We're adding a WebAudio MediaStream.
545 // Create a specific capturer for each WebAudio consumer. 544 // Create a specific capturer for each WebAudio consumer.
546 webaudio_source = CreateWebAudioSource(&source); 545 CreateWebAudioSource(&source);
547 source_data = static_cast<MediaStreamAudioSource*>(source.getExtraData()); 546 source_data = MediaStreamAudioSource::From(source);
547 DCHECK(source_data->webaudio_capturer());
548 } else { 548 } else {
549 NOTREACHED() << "Local track missing source extra data."; 549 NOTREACHED() << "Local track missing MediaStreamAudioSource instance.";
550 return; 550 return;
551 } 551 }
552 } 552 }
553 553
554 // Creates an adapter to hold all the libjingle objects. 554 // Creates an adapter to hold all the libjingle objects.
555 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( 555 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
556 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), 556 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
557 source_data->local_audio_source())); 557 source_data->local_audio_source()));
558 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( 558 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
559 track.isEnabled()); 559 track.isEnabled());
560 560
561 // TODO(xians): Merge |source| to the capturer(). We can't do this today 561 // TODO(xians): Merge |source| to the capturer(). We can't do this today
562 // because only one capturer() is supported while one |source| is created 562 // because only one capturer() is supported while one |source| is created
563 // for each audio track. 563 // for each audio track.
564 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack( 564 scoped_ptr<WebRtcLocalAudioTrack> audio_track(
565 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get())); 565 new WebRtcLocalAudioTrack(adapter.get()));
566 566
567 StartLocalAudioTrack(audio_track.get()); 567 // Start the source and connect the audio data flow to the track.
568 //
569 // TODO(miu): This logic will me moved to MediaStreamAudioSource (or a
570 // subclass of it) in soon-upcoming changes.
571 audio_track->Start(base::Bind(&MediaStreamAudioSource::StopAudioDeliveryTo,
572 source_data->GetWeakPtr(),
573 audio_track.get()));
574 if (source_data->webaudio_capturer())
575 source_data->webaudio_capturer()->Start(audio_track.get());
576 else if (source_data->audio_capturer())
577 source_data->audio_capturer()->AddTrack(audio_track.get());
578 else
579 NOTREACHED();
568 580
569 // Pass the ownership of the native local audio track to the blink track. 581 // Pass the ownership of the native local audio track to the blink track.
570 blink::WebMediaStreamTrack writable_track = track; 582 blink::WebMediaStreamTrack writable_track = track;
571 writable_track.setExtraData(audio_track.release()); 583 writable_track.setExtraData(audio_track.release());
572 } 584 }
573 585
574 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( 586 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack(
575 const blink::WebMediaStreamTrack& track) { 587 const blink::WebMediaStreamTrack& track) {
576 blink::WebMediaStreamSource source = track.source(); 588 blink::WebMediaStreamSource source = track.source();
577 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio); 589 DCHECK_EQ(source.getType(), blink::WebMediaStreamSource::TypeAudio);
578 DCHECK(source.remote()); 590 DCHECK(source.remote());
579 DCHECK(source.getExtraData()); 591 DCHECK(MediaStreamAudioSource::From(source));
580 592
581 blink::WebMediaStreamTrack writable_track = track; 593 blink::WebMediaStreamTrack writable_track = track;
582 writable_track.setExtraData( 594 writable_track.setExtraData(
583 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); 595 new MediaStreamRemoteAudioTrack(source, track.isEnabled()));
584 } 596 }
585 597
586 void PeerConnectionDependencyFactory::StartLocalAudioTrack( 598 void PeerConnectionDependencyFactory::CreateWebAudioSource(
587 WebRtcLocalAudioTrack* audio_track) {
588 // Start the audio track. This will hook the |audio_track| to the capturer
589 // as the sink of the audio, and only start the source of the capturer if
590 // it is the first audio track connecting to the capturer.
591 audio_track->Start();
592 }
593
594 scoped_refptr<WebAudioCapturerSource>
595 PeerConnectionDependencyFactory::CreateWebAudioSource(
596 blink::WebMediaStreamSource* source) { 599 blink::WebMediaStreamSource* source) {
597 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; 600 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
598 601
599 scoped_refptr<WebAudioCapturerSource>
600 webaudio_capturer_source(new WebAudioCapturerSource(*source));
601 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); 602 MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
602 603 source_data->SetWebAudioCapturer(
603 // Use the current default capturer for the WebAudio track so that the 604 make_scoped_ptr(new WebAudioCapturerSource(source)));
604 // WebAudio track can pass a valid delay value and |need_audio_processing|
605 // flag to PeerConnection.
606 // TODO(xians): Remove this after moving APM to Chrome.
607 if (GetWebRtcAudioDevice()) {
608 source_data->SetAudioCapturer(
609 GetWebRtcAudioDevice()->GetDefaultCapturer());
610 }
611 605
612 // Create a LocalAudioSource object which holds audio options. 606 // Create a LocalAudioSource object which holds audio options.
613 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. 607 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
614 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get()); 608 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
615 source->setExtraData(source_data); 609 source->setExtraData(source_data);
616
617 // Replace the default source with WebAudio as source instead.
618 source->addAudioConsumer(webaudio_capturer_source.get());
619
620 return webaudio_capturer_source;
621 } 610 }
622 611
623 scoped_refptr<webrtc::VideoTrackInterface> 612 scoped_refptr<webrtc::VideoTrackInterface>
624 PeerConnectionDependencyFactory::CreateLocalVideoTrack( 613 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
625 const std::string& id, 614 const std::string& id,
626 webrtc::VideoTrackSourceInterface* source) { 615 webrtc::VideoTrackSourceInterface* source) {
627 return GetPcFactory()->CreateVideoTrack(id, source).get(); 616 return GetPcFactory()->CreateVideoTrack(id, source).get();
628 } 617 }
629 618
630 scoped_refptr<webrtc::VideoTrackInterface> 619 scoped_refptr<webrtc::VideoTrackInterface>
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
751 // Stopping the thread will wait until all tasks have been 740 // Stopping the thread will wait until all tasks have been
752 // processed before returning. We wait for the above task to finish before 741 // processed before returning. We wait for the above task to finish before
753 // letting the the function continue to avoid any potential race issues. 742 // letting the the function continue to avoid any potential race issues.
754 chrome_worker_thread_.Stop(); 743 chrome_worker_thread_.Stop();
755 } else { 744 } else {
756 NOTREACHED() << "Worker thread not running."; 745 NOTREACHED() << "Worker thread not running.";
757 } 746 }
758 } 747 }
759 } 748 }
760 749
761 scoped_refptr<WebRtcAudioCapturer> 750 scoped_ptr<WebRtcAudioCapturer>
762 PeerConnectionDependencyFactory::CreateAudioCapturer( 751 PeerConnectionDependencyFactory::CreateAudioCapturer(
763 int render_frame_id, 752 int render_frame_id,
764 const StreamDeviceInfo& device_info, 753 const StreamDeviceInfo& device_info,
765 const blink::WebMediaConstraints& constraints, 754 const blink::WebMediaConstraints& constraints,
766 MediaStreamAudioSource* audio_source) { 755 MediaStreamAudioSource* audio_source) {
767 // TODO(xians): Handle the cases when gUM is called without a proper render 756 // TODO(xians): Handle the cases when gUM is called without a proper render
768 // view, for example, by an extension. 757 // view, for example, by an extension.
769 DCHECK_GE(render_frame_id, 0); 758 DCHECK_GE(render_frame_id, 0);
770 759
771 EnsureWebRtcAudioDeviceImpl(); 760 EnsureWebRtcAudioDeviceImpl();
(...skipping 24 matching lines...) Expand all
796 } 785 }
797 786
798 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { 787 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
799 if (audio_device_.get()) 788 if (audio_device_.get())
800 return; 789 return;
801 790
802 audio_device_ = new WebRtcAudioDeviceImpl(); 791 audio_device_ = new WebRtcAudioDeviceImpl();
803 } 792 }
804 793
805 } // namespace content 794 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698