OLD | NEW |
| (Empty) |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" | |
6 | |
7 #include <stddef.h> | |
8 | |
9 #include <list> | |
10 | |
11 #include "base/logging.h" | |
12 #include "content/public/renderer/media_stream_audio_sink.h" | |
13 #include "third_party/webrtc/api/mediastreaminterface.h" | |
14 | |
15 namespace content { | |
16 | |
17 class MediaStreamRemoteAudioSource::AudioSink | |
18 : public webrtc::AudioTrackSinkInterface { | |
19 public: | |
20 AudioSink() { | |
21 } | |
22 ~AudioSink() override { | |
23 DCHECK(sinks_.empty()); | |
24 } | |
25 | |
26 void Add(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track, | |
27 bool enabled) { | |
28 DCHECK(thread_checker_.CalledOnValidThread()); | |
29 SinkInfo info(sink, track, enabled); | |
30 base::AutoLock lock(lock_); | |
31 sinks_.push_back(info); | |
32 } | |
33 | |
34 void Remove(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track) { | |
35 DCHECK(thread_checker_.CalledOnValidThread()); | |
36 base::AutoLock lock(lock_); | |
37 sinks_.remove_if([&sink, &track](const SinkInfo& info) { | |
38 return info.sink == sink && info.track == track; | |
39 }); | |
40 } | |
41 | |
42 void SetEnabled(MediaStreamAudioTrack* track, bool enabled) { | |
43 DCHECK(thread_checker_.CalledOnValidThread()); | |
44 base::AutoLock lock(lock_); | |
45 for (SinkInfo& info : sinks_) { | |
46 if (info.track == track) | |
47 info.enabled = enabled; | |
48 } | |
49 } | |
50 | |
51 void RemoveAll(MediaStreamAudioTrack* track) { | |
52 base::AutoLock lock(lock_); | |
53 sinks_.remove_if([&track](const SinkInfo& info) { | |
54 return info.track == track; | |
55 }); | |
56 } | |
57 | |
58 bool IsNeeded() const { | |
59 DCHECK(thread_checker_.CalledOnValidThread()); | |
60 return !sinks_.empty(); | |
61 } | |
62 | |
63 private: | |
64 void OnData(const void* audio_data, int bits_per_sample, int sample_rate, | |
65 size_t number_of_channels, size_t number_of_frames) override { | |
66 if (!audio_bus_ || | |
67 static_cast<size_t>(audio_bus_->channels()) != number_of_channels || | |
68 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) { | |
69 audio_bus_ = media::AudioBus::Create(number_of_channels, | |
70 number_of_frames); | |
71 } | |
72 | |
73 audio_bus_->FromInterleaved(audio_data, number_of_frames, | |
74 bits_per_sample / 8); | |
75 | |
76 bool format_changed = false; | |
77 if (params_.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY || | |
78 static_cast<size_t>(params_.channels()) != number_of_channels || | |
79 params_.sample_rate() != sample_rate || | |
80 static_cast<size_t>(params_.frames_per_buffer()) != number_of_frames) { | |
81 params_ = media::AudioParameters( | |
82 media::AudioParameters::AUDIO_PCM_LOW_LATENCY, | |
83 media::GuessChannelLayout(number_of_channels), | |
84 sample_rate, 16, number_of_frames); | |
85 format_changed = true; | |
86 } | |
87 | |
88 // TODO(tommi): We should get the timestamp from WebRTC. | |
89 base::TimeTicks estimated_capture_time(base::TimeTicks::Now()); | |
90 | |
91 base::AutoLock lock(lock_); | |
92 for (const SinkInfo& info : sinks_) { | |
93 if (info.enabled) { | |
94 if (format_changed) | |
95 info.sink->OnSetFormat(params_); | |
96 info.sink->OnData(*audio_bus_.get(), estimated_capture_time); | |
97 } | |
98 } | |
99 } | |
100 | |
101 mutable base::Lock lock_; | |
102 struct SinkInfo { | |
103 SinkInfo(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track, | |
104 bool enabled) : sink(sink), track(track), enabled(enabled) {} | |
105 MediaStreamAudioSink* sink; | |
106 MediaStreamAudioTrack* track; | |
107 bool enabled; | |
108 }; | |
109 std::list<SinkInfo> sinks_; | |
110 base::ThreadChecker thread_checker_; | |
111 media::AudioParameters params_; // Only used on the callback thread. | |
112 scoped_ptr<media::AudioBus> audio_bus_; // Only used on the callback thread. | |
113 }; | |
114 | |
115 MediaStreamRemoteAudioTrack::MediaStreamRemoteAudioTrack( | |
116 const blink::WebMediaStreamSource& source, bool enabled) | |
117 : MediaStreamAudioTrack(false), source_(source), enabled_(enabled) { | |
118 DCHECK(source.extraData()); // Make sure the source has a native source. | |
119 } | |
120 | |
121 MediaStreamRemoteAudioTrack::~MediaStreamRemoteAudioTrack() { | |
122 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
123 source()->RemoveAll(this); | |
124 } | |
125 | |
126 void MediaStreamRemoteAudioTrack::SetEnabled(bool enabled) { | |
127 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
128 | |
129 // This affects the shared state of the source for whether or not it's a part | |
130 // of the mixed audio that's rendered for remote tracks from WebRTC. | |
131 // All tracks from the same source will share this state and thus can step | |
132 // on each other's toes. | |
133 // This is also why we can't check the |enabled_| state for equality with | |
134 // |enabled| before setting the mixing enabled state. |enabled_| and the | |
135 // shared state might not be the same. | |
136 source()->SetEnabledForMixing(enabled); | |
137 | |
138 enabled_ = enabled; | |
139 source()->SetSinksEnabled(this, enabled); | |
140 } | |
141 | |
142 void MediaStreamRemoteAudioTrack::Stop() { | |
143 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
144 // Stop means that a track should be stopped permanently. But | |
145 // since there is no proper way of doing that on a remote track, we can | |
146 // at least disable the track. Blink will not call down to the content layer | |
147 // after a track has been stopped. | |
148 SetEnabled(false); | |
149 } | |
150 | |
151 void MediaStreamRemoteAudioTrack::AddSink(MediaStreamAudioSink* sink) { | |
152 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
153 return source()->AddSink(sink, this, enabled_); | |
154 } | |
155 | |
156 void MediaStreamRemoteAudioTrack::RemoveSink(MediaStreamAudioSink* sink) { | |
157 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
158 return source()->RemoveSink(sink, this); | |
159 } | |
160 | |
161 media::AudioParameters MediaStreamRemoteAudioTrack::GetOutputFormat() const { | |
162 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
163 // This method is not implemented on purpose and should be removed. | |
164 // TODO(tommi): See comment for GetOutputFormat in MediaStreamAudioTrack. | |
165 NOTIMPLEMENTED(); | |
166 return media::AudioParameters(); | |
167 } | |
168 | |
169 webrtc::AudioTrackInterface* MediaStreamRemoteAudioTrack::GetAudioAdapter() { | |
170 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | |
171 return source()->GetAudioAdapter(); | |
172 } | |
173 | |
174 MediaStreamRemoteAudioSource* MediaStreamRemoteAudioTrack::source() const { | |
175 return static_cast<MediaStreamRemoteAudioSource*>(source_.extraData()); | |
176 } | |
177 | |
178 MediaStreamRemoteAudioSource::MediaStreamRemoteAudioSource( | |
179 const scoped_refptr<webrtc::AudioTrackInterface>& track) : track_(track) {} | |
180 | |
181 MediaStreamRemoteAudioSource::~MediaStreamRemoteAudioSource() { | |
182 DCHECK(thread_checker_.CalledOnValidThread()); | |
183 } | |
184 | |
185 void MediaStreamRemoteAudioSource::SetEnabledForMixing(bool enabled) { | |
186 DCHECK(thread_checker_.CalledOnValidThread()); | |
187 track_->set_enabled(enabled); | |
188 } | |
189 | |
190 void MediaStreamRemoteAudioSource::AddSink(MediaStreamAudioSink* sink, | |
191 MediaStreamAudioTrack* track, | |
192 bool enabled) { | |
193 DCHECK(thread_checker_.CalledOnValidThread()); | |
194 if (!sink_) { | |
195 sink_.reset(new AudioSink()); | |
196 track_->AddSink(sink_.get()); | |
197 } | |
198 | |
199 sink_->Add(sink, track, enabled); | |
200 } | |
201 | |
202 void MediaStreamRemoteAudioSource::RemoveSink(MediaStreamAudioSink* sink, | |
203 MediaStreamAudioTrack* track) { | |
204 DCHECK(thread_checker_.CalledOnValidThread()); | |
205 DCHECK(sink_); | |
206 | |
207 sink_->Remove(sink, track); | |
208 | |
209 if (!sink_->IsNeeded()) { | |
210 track_->RemoveSink(sink_.get()); | |
211 sink_.reset(); | |
212 } | |
213 } | |
214 | |
215 void MediaStreamRemoteAudioSource::SetSinksEnabled(MediaStreamAudioTrack* track, | |
216 bool enabled) { | |
217 if (sink_) | |
218 sink_->SetEnabled(track, enabled); | |
219 } | |
220 | |
221 void MediaStreamRemoteAudioSource::RemoveAll(MediaStreamAudioTrack* track) { | |
222 if (sink_) | |
223 sink_->RemoveAll(track); | |
224 } | |
225 | |
226 webrtc::AudioTrackInterface* MediaStreamRemoteAudioSource::GetAudioAdapter() { | |
227 DCHECK(thread_checker_.CalledOnValidThread()); | |
228 return track_.get(); | |
229 } | |
230 | |
231 } // namespace content | |
OLD | NEW |