OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | 5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <utility> | 9 #include <utility> |
10 #include <vector> | 10 #include <vector> |
11 | 11 |
| 12 #include "base/bind.h" |
| 13 #include "base/bind_helpers.h" |
12 #include "base/command_line.h" | 14 #include "base/command_line.h" |
13 #include "base/location.h" | 15 #include "base/location.h" |
14 #include "base/logging.h" | 16 #include "base/logging.h" |
15 #include "base/macros.h" | 17 #include "base/macros.h" |
16 #include "base/metrics/field_trial.h" | 18 #include "base/metrics/field_trial.h" |
17 #include "base/strings/string_util.h" | 19 #include "base/strings/string_util.h" |
18 #include "base/strings/utf_string_conversions.h" | 20 #include "base/strings/utf_string_conversions.h" |
19 #include "base/synchronization/waitable_event.h" | 21 #include "base/synchronization/waitable_event.h" |
20 #include "build/build_config.h" | 22 #include "build/build_config.h" |
21 #include "content/common/media/media_stream_messages.h" | 23 #include "content/common/media/media_stream_messages.h" |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
180 // microphone or tab audio. | 182 // microphone or tab audio. |
181 RTCMediaConstraints native_audio_constraints(audio_constraints); | 183 RTCMediaConstraints native_audio_constraints(audio_constraints); |
182 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints); | 184 MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints); |
183 | 185 |
184 StreamDeviceInfo device_info = source_data->device_info(); | 186 StreamDeviceInfo device_info = source_data->device_info(); |
185 RTCMediaConstraints constraints = native_audio_constraints; | 187 RTCMediaConstraints constraints = native_audio_constraints; |
186 // May modify both |constraints| and |effects|. | 188 // May modify both |constraints| and |effects|. |
187 HarmonizeConstraintsAndEffects(&constraints, | 189 HarmonizeConstraintsAndEffects(&constraints, |
188 &device_info.device.input.effects); | 190 &device_info.device.input.effects); |
189 | 191 |
190 scoped_refptr<WebRtcAudioCapturer> capturer(CreateAudioCapturer( | 192 scoped_ptr<WebRtcAudioCapturer> capturer = CreateAudioCapturer( |
191 render_frame_id, device_info, audio_constraints, source_data)); | 193 render_frame_id, device_info, audio_constraints, source_data); |
192 if (!capturer.get()) { | 194 if (!capturer.get()) { |
193 const std::string log_string = | 195 const std::string log_string = |
194 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; | 196 "PCDF::InitializeMediaStreamAudioSource: fails to create capturer"; |
195 WebRtcLogMessage(log_string); | 197 WebRtcLogMessage(log_string); |
196 DVLOG(1) << log_string; | 198 DVLOG(1) << log_string; |
197 // TODO(xians): Don't we need to check if source_observer is observing | 199 // TODO(xians): Don't we need to check if source_observer is observing |
198 // something? If not, then it looks like we have a leak here. | 200 // something? If not, then it looks like we have a leak here. |
199 // OTOH, if it _is_ observing something, then the callback might | 201 // OTOH, if it _is_ observing something, then the callback might |
200 // be called multiple times which is likely also a bug. | 202 // be called multiple times which is likely also a bug. |
201 return false; | 203 return false; |
202 } | 204 } |
203 source_data->SetAudioCapturer(capturer.get()); | 205 source_data->SetAudioCapturer(std::move(capturer)); |
204 | 206 |
205 // Creates a LocalAudioSource object which holds audio options. | 207 // Creates a LocalAudioSource object which holds audio options. |
206 // TODO(xians): The option should apply to the track instead of the source. | 208 // TODO(xians): The option should apply to the track instead of the source. |
207 // TODO(perkj): Move audio constraints parsing to Chrome. | 209 // TODO(perkj): Move audio constraints parsing to Chrome. |
208 // Currently there are a few constraints that are parsed by libjingle and | 210 // Currently there are a few constraints that are parsed by libjingle and |
209 // the state is set to ended if parsing fails. | 211 // the state is set to ended if parsing fails. |
210 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( | 212 scoped_refptr<webrtc::AudioSourceInterface> rtc_source( |
211 CreateLocalAudioSource(&constraints).get()); | 213 CreateLocalAudioSource(&constraints).get()); |
212 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { | 214 if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) { |
213 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; | 215 DLOG(WARNING) << "Failed to create rtc LocalAudioSource."; |
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
530 const webrtc::MediaConstraintsInterface* constraints) { | 532 const webrtc::MediaConstraintsInterface* constraints) { |
531 scoped_refptr<webrtc::AudioSourceInterface> source = | 533 scoped_refptr<webrtc::AudioSourceInterface> source = |
532 GetPcFactory()->CreateAudioSource(constraints).get(); | 534 GetPcFactory()->CreateAudioSource(constraints).get(); |
533 return source; | 535 return source; |
534 } | 536 } |
535 | 537 |
536 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( | 538 void PeerConnectionDependencyFactory::CreateLocalAudioTrack( |
537 const blink::WebMediaStreamTrack& track) { | 539 const blink::WebMediaStreamTrack& track) { |
538 blink::WebMediaStreamSource source = track.source(); | 540 blink::WebMediaStreamSource source = track.source(); |
539 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); | 541 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
540 DCHECK(!source.remote()); | 542 MediaStreamAudioSource* source_data = MediaStreamAudioSource::From(source); |
541 MediaStreamAudioSource* source_data = | |
542 static_cast<MediaStreamAudioSource*>(source.extraData()); | |
543 | 543 |
544 scoped_refptr<WebAudioCapturerSource> webaudio_source; | 544 scoped_ptr<WebAudioCapturerSource> webaudio_source; |
545 if (!source_data) { | 545 if (!source_data) { |
546 if (source.requiresAudioConsumer()) { | 546 if (source.requiresAudioConsumer()) { |
547 // We're adding a WebAudio MediaStream. | 547 // We're adding a WebAudio MediaStream. |
548 // Create a specific capturer for each WebAudio consumer. | 548 // Create a specific capturer for each WebAudio consumer. |
549 webaudio_source = CreateWebAudioSource(&source); | 549 webaudio_source = CreateWebAudioSource(&source); |
550 source_data = | 550 source_data = MediaStreamAudioSource::From(source); |
551 static_cast<MediaStreamAudioSource*>(source.extraData()); | |
552 } else { | 551 } else { |
553 NOTREACHED() << "Local track missing source extra data."; | 552 NOTREACHED() << "Local track missing MediaStreamAudioSource instance."; |
554 return; | 553 return; |
555 } | 554 } |
556 } | 555 } |
557 | 556 |
558 // Creates an adapter to hold all the libjingle objects. | 557 // Creates an adapter to hold all the libjingle objects. |
559 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( | 558 scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter( |
560 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), | 559 WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(), |
561 source_data->local_audio_source())); | 560 source_data->local_audio_source())); |
562 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( | 561 static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled( |
563 track.isEnabled()); | 562 track.isEnabled()); |
564 | 563 |
565 // TODO(xians): Merge |source| to the capturer(). We can't do this today | 564 // TODO(xians): Merge |source| to the capturer(). We can't do this today |
566 // because only one capturer() is supported while one |source| is created | 565 // because only one capturer() is supported while one |source| is created |
567 // for each audio track. | 566 // for each audio track. |
568 scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack( | 567 scoped_ptr<WebRtcLocalAudioTrack> audio_track( |
569 adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get())); | 568 new WebRtcLocalAudioTrack(adapter.get())); |
570 | 569 |
571 StartLocalAudioTrack(audio_track.get()); | 570 // Start the source and connect the audio data flow to the track. |
| 571 if (webaudio_source.get()) { |
| 572 webaudio_source->Start(audio_track.get()); |
| 573 // The stop callback takes ownership of the |webaudio_source|, which will |
| 574 // cause it to be auto-destroyed when the track is stopped. |
| 575 // |
| 576 // TODO(miu): In a future change, WebAudioCapturerSource will become a |
| 577 // subclass of MediaStreamAudioSource, and this will allow it to be owned by |
| 578 // the blink::WebMediaStreamSource so we don't need this hacky thing here. |
| 579 audio_track->AddStopObserver(base::Bind( |
| 580 &WebAudioCapturerSource::Stop, base::Owned(webaudio_source.release()))); |
| 581 } else if (WebRtcAudioCapturer* capturer = source_data->audio_capturer()) { |
| 582 capturer->AddTrack(audio_track.get()); |
| 583 } else { |
| 584 NOTREACHED(); |
| 585 } |
572 | 586 |
573 // Pass the ownership of the native local audio track to the blink track. | 587 // Pass the ownership of the native local audio track to the blink track. |
574 blink::WebMediaStreamTrack writable_track = track; | 588 blink::WebMediaStreamTrack writable_track = track; |
575 writable_track.setExtraData(audio_track.release()); | 589 writable_track.setExtraData(audio_track.release()); |
576 } | 590 } |
577 | 591 |
578 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( | 592 void PeerConnectionDependencyFactory::CreateRemoteAudioTrack( |
579 const blink::WebMediaStreamTrack& track) { | 593 const blink::WebMediaStreamTrack& track) { |
580 blink::WebMediaStreamSource source = track.source(); | 594 blink::WebMediaStreamSource source = track.source(); |
581 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); | 595 DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
582 DCHECK(source.remote()); | 596 DCHECK(source.remote()); |
583 DCHECK(source.extraData()); | 597 DCHECK(MediaStreamAudioSource::From(source)); |
584 | 598 |
585 blink::WebMediaStreamTrack writable_track = track; | 599 blink::WebMediaStreamTrack writable_track = track; |
586 writable_track.setExtraData( | 600 writable_track.setExtraData( |
587 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); | 601 new MediaStreamRemoteAudioTrack(source, track.isEnabled())); |
588 } | 602 } |
589 | 603 |
590 void PeerConnectionDependencyFactory::StartLocalAudioTrack( | 604 scoped_ptr<WebAudioCapturerSource> |
591 WebRtcLocalAudioTrack* audio_track) { | |
592 // Start the audio track. This will hook the |audio_track| to the capturer | |
593 // as the sink of the audio, and only start the source of the capturer if | |
594 // it is the first audio track connecting to the capturer. | |
595 audio_track->Start(); | |
596 } | |
597 | |
598 scoped_refptr<WebAudioCapturerSource> | |
599 PeerConnectionDependencyFactory::CreateWebAudioSource( | 605 PeerConnectionDependencyFactory::CreateWebAudioSource( |
600 blink::WebMediaStreamSource* source) { | 606 blink::WebMediaStreamSource* source) { |
601 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; | 607 DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()"; |
602 | 608 |
603 scoped_refptr<WebAudioCapturerSource> | |
604 webaudio_capturer_source(new WebAudioCapturerSource(*source)); | |
605 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); | 609 MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); |
606 | 610 |
607 // Use the current default capturer for the WebAudio track so that the | |
608 // WebAudio track can pass a valid delay value and |need_audio_processing| | |
609 // flag to PeerConnection. | |
610 // TODO(xians): Remove this after moving APM to Chrome. | |
611 if (GetWebRtcAudioDevice()) { | |
612 source_data->SetAudioCapturer( | |
613 GetWebRtcAudioDevice()->GetDefaultCapturer()); | |
614 } | |
615 | |
616 // Create a LocalAudioSource object which holds audio options. | 611 // Create a LocalAudioSource object which holds audio options. |
617 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. | 612 // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |
618 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get()); | 613 source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get()); |
619 source->setExtraData(source_data); | 614 source->setExtraData(source_data); |
620 | 615 |
621 // Replace the default source with WebAudio as source instead. | 616 return make_scoped_ptr(new WebAudioCapturerSource(source)); |
622 source->addAudioConsumer(webaudio_capturer_source.get()); | |
623 | |
624 return webaudio_capturer_source; | |
625 } | 617 } |
626 | 618 |
627 scoped_refptr<webrtc::VideoTrackInterface> | 619 scoped_refptr<webrtc::VideoTrackInterface> |
628 PeerConnectionDependencyFactory::CreateLocalVideoTrack( | 620 PeerConnectionDependencyFactory::CreateLocalVideoTrack( |
629 const std::string& id, | 621 const std::string& id, |
630 webrtc::VideoSourceInterface* source) { | 622 webrtc::VideoSourceInterface* source) { |
631 return GetPcFactory()->CreateVideoTrack(id, source).get(); | 623 return GetPcFactory()->CreateVideoTrack(id, source).get(); |
632 } | 624 } |
633 | 625 |
634 scoped_refptr<webrtc::VideoTrackInterface> | 626 scoped_refptr<webrtc::VideoTrackInterface> |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
755 // Stopping the thread will wait until all tasks have been | 747 // Stopping the thread will wait until all tasks have been |
756 // processed before returning. We wait for the above task to finish before | 748 // processed before returning. We wait for the above task to finish before |
757 // letting the the function continue to avoid any potential race issues. | 749 // letting the the function continue to avoid any potential race issues. |
758 chrome_worker_thread_.Stop(); | 750 chrome_worker_thread_.Stop(); |
759 } else { | 751 } else { |
760 NOTREACHED() << "Worker thread not running."; | 752 NOTREACHED() << "Worker thread not running."; |
761 } | 753 } |
762 } | 754 } |
763 } | 755 } |
764 | 756 |
765 scoped_refptr<WebRtcAudioCapturer> | 757 scoped_ptr<WebRtcAudioCapturer> |
766 PeerConnectionDependencyFactory::CreateAudioCapturer( | 758 PeerConnectionDependencyFactory::CreateAudioCapturer( |
767 int render_frame_id, | 759 int render_frame_id, |
768 const StreamDeviceInfo& device_info, | 760 const StreamDeviceInfo& device_info, |
769 const blink::WebMediaConstraints& constraints, | 761 const blink::WebMediaConstraints& constraints, |
770 MediaStreamAudioSource* audio_source) { | 762 MediaStreamAudioSource* audio_source) { |
771 // TODO(xians): Handle the cases when gUM is called without a proper render | 763 // TODO(xians): Handle the cases when gUM is called without a proper render |
772 // view, for example, by an extension. | 764 // view, for example, by an extension. |
773 DCHECK_GE(render_frame_id, 0); | 765 DCHECK_GE(render_frame_id, 0); |
774 | 766 |
775 EnsureWebRtcAudioDeviceImpl(); | 767 EnsureWebRtcAudioDeviceImpl(); |
(...skipping 24 matching lines...) Expand all Loading... |
800 } | 792 } |
801 | 793 |
802 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { | 794 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() { |
803 if (audio_device_.get()) | 795 if (audio_device_.get()) |
804 return; | 796 return; |
805 | 797 |
806 audio_device_ = new WebRtcAudioDeviceImpl(); | 798 audio_device_ = new WebRtcAudioDeviceImpl(); |
807 } | 799 } |
808 | 800 |
809 } // namespace content | 801 } // namespace content |
OLD | NEW |