Index: content/renderer/media/webrtc/webrtc_media_stream_adapter.cc |
diff --git a/content/renderer/media/webrtc/webrtc_media_stream_adapter.cc b/content/renderer/media/webrtc/webrtc_media_stream_adapter.cc |
index 3c34d794dded57cd712436cbad564dca909e531b..7708b50ccff296332a857b6cf8729cdb11cc9d13 100644 |
--- a/content/renderer/media/webrtc/webrtc_media_stream_adapter.cc |
+++ b/content/renderer/media/webrtc/webrtc_media_stream_adapter.cc |
@@ -5,11 +5,12 @@ |
#include "content/renderer/media/webrtc/webrtc_media_stream_adapter.h" |
#include "base/logging.h" |
-#include "content/renderer/media/media_stream_audio_source.h" |
#include "content/renderer/media/media_stream_audio_track.h" |
#include "content/renderer/media/media_stream_track.h" |
#include "content/renderer/media/webrtc/media_stream_video_webrtc_sink.h" |
#include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
+#include "content/renderer/media/webrtc/processed_local_audio_source.h" |
+#include "content/renderer/media/webrtc/webrtc_audio_sink.h" |
#include "third_party/WebKit/public/platform/WebMediaStreamSource.h" |
#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
#include "third_party/WebKit/public/platform/WebString.h" |
@@ -27,12 +28,12 @@ WebRtcMediaStreamAdapter::WebRtcMediaStreamAdapter( |
blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
web_stream_.audioTracks(audio_tracks); |
for (blink::WebMediaStreamTrack& audio_track : audio_tracks) |
- CreateAudioTrack(audio_track); |
+ AddAudioSinkToTrack(audio_track); |
blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
web_stream_.videoTracks(video_tracks); |
for (blink::WebMediaStreamTrack& video_track : video_tracks) |
- CreateVideoTrack(video_track); |
+ AddVideoSinkToTrack(video_track); |
MediaStream* const native_stream = MediaStream::GetMediaStream(web_stream_); |
native_stream->AddObserver(this); |
@@ -41,72 +42,110 @@ WebRtcMediaStreamAdapter::WebRtcMediaStreamAdapter( |
WebRtcMediaStreamAdapter::~WebRtcMediaStreamAdapter() { |
MediaStream* const native_stream = MediaStream::GetMediaStream(web_stream_); |
native_stream->RemoveObserver(this); |
+ |
+ blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
+ web_stream_.audioTracks(audio_tracks); |
+ for (blink::WebMediaStreamTrack& audio_track : audio_tracks) |
+ TrackRemoved(audio_track); |
+ DCHECK(audio_sinks_.empty()); |
+ blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
+ web_stream_.videoTracks(video_tracks); |
+ for (blink::WebMediaStreamTrack& video_track : video_tracks) |
+ TrackRemoved(video_track); |
+ DCHECK(video_sinks_.empty()); |
} |
void WebRtcMediaStreamAdapter::TrackAdded( |
const blink::WebMediaStreamTrack& track) { |
if (track.source().getType() == blink::WebMediaStreamSource::TypeAudio) |
- CreateAudioTrack(track); |
+ AddAudioSinkToTrack(track); |
else |
- CreateVideoTrack(track); |
+ AddVideoSinkToTrack(track); |
} |
void WebRtcMediaStreamAdapter::TrackRemoved( |
const blink::WebMediaStreamTrack& track) { |
const std::string track_id = track.id().utf8(); |
if (track.source().getType() == blink::WebMediaStreamSource::TypeAudio) { |
- webrtc_media_stream_->RemoveTrack( |
- webrtc_media_stream_->FindAudioTrack(track_id)); |
+ scoped_refptr<webrtc::AudioTrackInterface> webrtc_track = |
+ make_scoped_refptr( |
+ webrtc_media_stream_->FindAudioTrack(track_id).get()); |
+ if (!webrtc_track) |
+ return; |
+ webrtc_media_stream_->RemoveTrack(webrtc_track.get()); |
+ |
+ for (auto it = audio_sinks_.begin(); it != audio_sinks_.end(); ++it) { |
+ if ((*it)->webrtc_audio_track() == webrtc_track.get()) { |
+ if (auto* media_stream_track = MediaStreamAudioTrack::From(track)) |
+ media_stream_track->RemoveSink(it->get()); |
+ audio_sinks_.erase(it); |
+ break; |
+ } |
+ } |
} else { |
DCHECK_EQ(track.source().getType(), blink::WebMediaStreamSource::TypeVideo); |
scoped_refptr<webrtc::VideoTrackInterface> webrtc_track = |
- webrtc_media_stream_->FindVideoTrack(track_id).get(); |
+ make_scoped_refptr( |
+ webrtc_media_stream_->FindVideoTrack(track_id).get()); |
+ if (!webrtc_track) |
+ return; |
webrtc_media_stream_->RemoveTrack(webrtc_track.get()); |
- for (ScopedVector<MediaStreamVideoWebRtcSink>::iterator it = |
- video_adapters_.begin(); it != video_adapters_.end(); ++it) { |
+ for (auto it = video_sinks_.begin(); it != video_sinks_.end(); ++it) { |
if ((*it)->webrtc_video_track() == webrtc_track.get()) { |
- video_adapters_.erase(it); |
+ video_sinks_.erase(it); |
break; |
} |
} |
} |
} |
-void WebRtcMediaStreamAdapter::CreateAudioTrack( |
+void WebRtcMediaStreamAdapter::AddAudioSinkToTrack( |
const blink::WebMediaStreamTrack& track) { |
- DCHECK_EQ(track.source().getType(), blink::WebMediaStreamSource::TypeAudio); |
- // A media stream is connected to a peer connection, enable the |
- // peer connection mode for the sources. |
MediaStreamAudioTrack* native_track = MediaStreamAudioTrack::From(track); |
if (!native_track) { |
DLOG(ERROR) << "No native track for blink audio track."; |
return; |
} |
- webrtc::AudioTrackInterface* audio_track = native_track->GetAudioAdapter(); |
- if (!audio_track) { |
- DLOG(ERROR) << "Audio track doesn't support webrtc."; |
- return; |
- } |
- |
- if (native_track->is_local_track()) { |
- const blink::WebMediaStreamSource& source = track.source(); |
- MediaStreamAudioSource* audio_source = MediaStreamAudioSource::From(source); |
- if (audio_source && audio_source->audio_capturer()) |
- audio_source->audio_capturer()->EnablePeerConnectionMode(); |
+ WebRtcAudioSink* audio_sink; |
+ if (auto* media_stream_source = ProcessedLocalAudioSource::From( |
+ MediaStreamAudioSource::From(track.source()))) { |
+ audio_sink = new WebRtcAudioSink( |
+ track.id().utf8(), media_stream_source->rtc_source(), |
+ factory_->GetWebRtcSignalingThread()); |
+ audio_sink->SetLevel(media_stream_source->audio_level()); |
+ // The sink only grabs stats from the audio processor. Stats are only |
+ // available if audio processing is turned on. Therefore, only provide the |
+ // sink a reference to the processor if audio processing is turned on. |
+ if (auto processor = media_stream_source->audio_processor()) { |
+ if (processor && processor->has_audio_processing()) |
+ audio_sink->SetAudioProcessor(processor); |
+ } |
+ } else { |
+ // Remote sources and other non-WebRtc local sources do not provide an |
+ // instance of the webrtc::AudioSourceInterface, and also do not need |
+ // references to the audio level calculator or audio processor passed to the |
+ // sink. |
+ webrtc::AudioSourceInterface* source_interface = nullptr; |
+ audio_sink = new WebRtcAudioSink( |
+ track.id().utf8(), source_interface, |
+ factory_->GetWebRtcSignalingThread()); |
} |
- webrtc_media_stream_->AddTrack(audio_track); |
+ audio_sinks_.push_back(std::unique_ptr<WebRtcAudioSink>(audio_sink)); |
+ native_track->AddSink(audio_sink); |
+ webrtc_media_stream_->AddTrack(audio_sink->webrtc_audio_track()); |
} |
-void WebRtcMediaStreamAdapter::CreateVideoTrack( |
+void WebRtcMediaStreamAdapter::AddVideoSinkToTrack( |
const blink::WebMediaStreamTrack& track) { |
DCHECK_EQ(track.source().getType(), blink::WebMediaStreamSource::TypeVideo); |
- MediaStreamVideoWebRtcSink* adapter = |
+ MediaStreamVideoWebRtcSink* video_sink = |
new MediaStreamVideoWebRtcSink(track, factory_); |
- video_adapters_.push_back(adapter); |
- webrtc_media_stream_->AddTrack(adapter->webrtc_video_track()); |
+ video_sinks_.push_back( |
+ std::unique_ptr<MediaStreamVideoWebRtcSink>(video_sink)); |
+ webrtc_media_stream_->AddTrack(video_sink->webrtc_video_track()); |
} |
} // namespace content |