OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" | 5 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <list> | 9 #include <list> |
10 | 10 |
| 11 #include "base/bind.h" |
| 12 #include "base/bind_helpers.h" |
11 #include "base/logging.h" | 13 #include "base/logging.h" |
12 #include "content/public/renderer/media_stream_audio_sink.h" | 14 #include "content/public/renderer/media_stream_audio_sink.h" |
13 #include "third_party/webrtc/api/mediastreaminterface.h" | 15 #include "third_party/webrtc/api/mediastreaminterface.h" |
14 | 16 |
15 namespace content { | 17 namespace content { |
16 | 18 |
17 class MediaStreamRemoteAudioSource::AudioSink | 19 class MediaStreamRemoteAudioSource::AudioSink |
18 : public webrtc::AudioTrackSinkInterface { | 20 : public webrtc::AudioTrackSinkInterface { |
19 public: | 21 public: |
20 AudioSink() { | 22 AudioSink() { |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
108 }; | 110 }; |
109 std::list<SinkInfo> sinks_; | 111 std::list<SinkInfo> sinks_; |
110 base::ThreadChecker thread_checker_; | 112 base::ThreadChecker thread_checker_; |
111 media::AudioParameters params_; // Only used on the callback thread. | 113 media::AudioParameters params_; // Only used on the callback thread. |
112 scoped_ptr<media::AudioBus> audio_bus_; // Only used on the callback thread. | 114 scoped_ptr<media::AudioBus> audio_bus_; // Only used on the callback thread. |
113 }; | 115 }; |
114 | 116 |
115 MediaStreamRemoteAudioTrack::MediaStreamRemoteAudioTrack( | 117 MediaStreamRemoteAudioTrack::MediaStreamRemoteAudioTrack( |
116 const blink::WebMediaStreamSource& source, bool enabled) | 118 const blink::WebMediaStreamSource& source, bool enabled) |
117 : MediaStreamAudioTrack(false), source_(source), enabled_(enabled) { | 119 : MediaStreamAudioTrack(false), source_(source), enabled_(enabled) { |
118 DCHECK(source.extraData()); // Make sure the source has a native source. | 120 DCHECK(source.extraData()); |
| 121 |
| 122 // Stop means that a track should be stopped permanently. But |
| 123 // since there is no proper way of doing that on a remote track, we can |
| 124 // at least disable the track. Blink will not call down to the content layer |
| 125 // after a track has been stopped. |
| 126 MediaStreamAudioTrack::AddStopObserver(base::Bind( |
| 127 &MediaStreamRemoteAudioTrack::SetEnabled, base::Unretained(this), false)); |
119 } | 128 } |
120 | 129 |
121 MediaStreamRemoteAudioTrack::~MediaStreamRemoteAudioTrack() { | 130 MediaStreamRemoteAudioTrack::~MediaStreamRemoteAudioTrack() { |
122 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 131 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
123 source()->RemoveAll(this); | 132 source()->RemoveAll(this); |
| 133 |
| 134 // Even though the base class calls Stop(), do it here because the stop |
| 135 // callback added in this class's constructor needs to be run before the data |
| 136 // members of this class are destroyed. |
| 137 Stop(); |
124 } | 138 } |
125 | 139 |
126 void MediaStreamRemoteAudioTrack::SetEnabled(bool enabled) { | 140 void MediaStreamRemoteAudioTrack::SetEnabled(bool enabled) { |
127 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 141 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
128 | 142 |
129 // This affects the shared state of the source for whether or not it's a part | 143 // This affects the shared state of the source for whether or not it's a part |
130 // of the mixed audio that's rendered for remote tracks from WebRTC. | 144 // of the mixed audio that's rendered for remote tracks from WebRTC. |
131 // All tracks from the same source will share this state and thus can step | 145 // All tracks from the same source will share this state and thus can step |
132 // on each other's toes. | 146 // on each other's toes. |
133 // This is also why we can't check the |enabled_| state for equality with | 147 // This is also why we can't check the |enabled_| state for equality with |
134 // |enabled| before setting the mixing enabled state. |enabled_| and the | 148 // |enabled| before setting the mixing enabled state. |enabled_| and the |
135 // shared state might not be the same. | 149 // shared state might not be the same. |
136 source()->SetEnabledForMixing(enabled); | 150 source()->SetEnabledForMixing(enabled); |
137 | 151 |
138 enabled_ = enabled; | 152 enabled_ = enabled; |
139 source()->SetSinksEnabled(this, enabled); | 153 source()->SetSinksEnabled(this, enabled); |
140 } | 154 } |
141 | 155 |
142 void MediaStreamRemoteAudioTrack::Stop() { | |
143 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
144 // Stop means that a track should be stopped permanently. But | |
145 // since there is no proper way of doing that on a remote track, we can | |
146 // at least disable the track. Blink will not call down to the content layer | |
147 // after a track has been stopped. | |
148 SetEnabled(false); | |
149 } | |
150 | |
151 void MediaStreamRemoteAudioTrack::AddSink(MediaStreamAudioSink* sink) { | 156 void MediaStreamRemoteAudioTrack::AddSink(MediaStreamAudioSink* sink) { |
152 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 157 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
153 return source()->AddSink(sink, this, enabled_); | 158 return source()->AddSink(sink, this, enabled_); |
154 } | 159 } |
155 | 160 |
156 void MediaStreamRemoteAudioTrack::RemoveSink(MediaStreamAudioSink* sink) { | 161 void MediaStreamRemoteAudioTrack::RemoveSink(MediaStreamAudioSink* sink) { |
157 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 162 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
158 return source()->RemoveSink(sink, this); | 163 return source()->RemoveSink(sink, this); |
159 } | 164 } |
160 | 165 |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
222 if (sink_) | 227 if (sink_) |
223 sink_->RemoveAll(track); | 228 sink_->RemoveAll(track); |
224 } | 229 } |
225 | 230 |
226 webrtc::AudioTrackInterface* MediaStreamRemoteAudioSource::GetAudioAdapter() { | 231 webrtc::AudioTrackInterface* MediaStreamRemoteAudioSource::GetAudioAdapter() { |
227 DCHECK(thread_checker_.CalledOnValidThread()); | 232 DCHECK(thread_checker_.CalledOnValidThread()); |
228 return track_.get(); | 233 return track_.get(); |
229 } | 234 } |
230 | 235 |
231 } // namespace content | 236 } // namespace content |
OLD | NEW |