Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(335)

Side by Side Diff: content/renderer/media/webrtc/peer_connection_remote_audio_source.cc

Issue 1834323002: MediaStream audio: Refactor 3 separate "glue" implementations into one. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Addressed comments from PS2: AudioInputDevice --> AudioCapturerSource, and refptr foo in WebRtcMedi… Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" 5 #include "content/renderer/media/webrtc/peer_connection_remote_audio_source.h"
6
7 #include <stddef.h>
8
9 #include <list>
10 6
11 #include "base/logging.h" 7 #include "base/logging.h"
12 #include "content/public/renderer/media_stream_audio_sink.h" 8 #include "base/time/time.h"
13 #include "third_party/webrtc/api/mediastreaminterface.h" 9 #include "media/base/audio_bus.h"
14 10
15 namespace content { 11 namespace content {
16 12
17 class MediaStreamRemoteAudioSource::AudioSink 13 namespace {
18 : public webrtc::AudioTrackSinkInterface { 14 // Used as an identifier for the down-casters.
19 public: 15 void* const kClassIdentifier = const_cast<void**>(&kClassIdentifier);
20 AudioSink() { 16 } // namespace
21 }
22 ~AudioSink() override {
23 DCHECK(sinks_.empty());
24 }
25 17
26 void Add(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track, 18 PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack(
27 bool enabled) { 19 scoped_refptr<webrtc::AudioTrackInterface> track_interface)
28 DCHECK(thread_checker_.CalledOnValidThread()); 20 : MediaStreamAudioTrack(false /* is_local_track */),
29 SinkInfo info(sink, track, enabled); 21 track_interface_(std::move(track_interface)) {
30 base::AutoLock lock(lock_); 22 DVLOG(1)
31 sinks_.push_back(info); 23 << "PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack()";
32 }
33
34 void Remove(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track) {
35 DCHECK(thread_checker_.CalledOnValidThread());
36 base::AutoLock lock(lock_);
37 sinks_.remove_if([&sink, &track](const SinkInfo& info) {
38 return info.sink == sink && info.track == track;
39 });
40 }
41
42 void SetEnabled(MediaStreamAudioTrack* track, bool enabled) {
43 DCHECK(thread_checker_.CalledOnValidThread());
44 base::AutoLock lock(lock_);
45 for (SinkInfo& info : sinks_) {
46 if (info.track == track)
47 info.enabled = enabled;
48 }
49 }
50
51 void RemoveAll(MediaStreamAudioTrack* track) {
52 base::AutoLock lock(lock_);
53 sinks_.remove_if([&track](const SinkInfo& info) {
54 return info.track == track;
55 });
56 }
57
58 bool IsNeeded() const {
59 DCHECK(thread_checker_.CalledOnValidThread());
60 return !sinks_.empty();
61 }
62
63 private:
64 void OnData(const void* audio_data, int bits_per_sample, int sample_rate,
65 size_t number_of_channels, size_t number_of_frames) override {
66 if (!audio_bus_ ||
67 static_cast<size_t>(audio_bus_->channels()) != number_of_channels ||
68 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) {
69 audio_bus_ = media::AudioBus::Create(number_of_channels,
70 number_of_frames);
71 }
72
73 audio_bus_->FromInterleaved(audio_data, number_of_frames,
74 bits_per_sample / 8);
75
76 bool format_changed = false;
77 if (params_.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY ||
78 static_cast<size_t>(params_.channels()) != number_of_channels ||
79 params_.sample_rate() != sample_rate ||
80 static_cast<size_t>(params_.frames_per_buffer()) != number_of_frames) {
81 params_ = media::AudioParameters(
82 media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
83 media::GuessChannelLayout(number_of_channels),
84 sample_rate, 16, number_of_frames);
85 format_changed = true;
86 }
87
88 // TODO(tommi): We should get the timestamp from WebRTC.
89 base::TimeTicks estimated_capture_time(base::TimeTicks::Now());
90
91 base::AutoLock lock(lock_);
92 for (const SinkInfo& info : sinks_) {
93 if (info.enabled) {
94 if (format_changed)
95 info.sink->OnSetFormat(params_);
96 info.sink->OnData(*audio_bus_.get(), estimated_capture_time);
97 }
98 }
99 }
100
101 mutable base::Lock lock_;
102 struct SinkInfo {
103 SinkInfo(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track,
104 bool enabled) : sink(sink), track(track), enabled(enabled) {}
105 MediaStreamAudioSink* sink;
106 MediaStreamAudioTrack* track;
107 bool enabled;
108 };
109 std::list<SinkInfo> sinks_;
110 base::ThreadChecker thread_checker_;
111 media::AudioParameters params_; // Only used on the callback thread.
112 scoped_ptr<media::AudioBus> audio_bus_; // Only used on the callback thread.
113 };
114
115 MediaStreamRemoteAudioTrack::MediaStreamRemoteAudioTrack(
116 const blink::WebMediaStreamSource& source, bool enabled)
117 : MediaStreamAudioTrack(false), source_(source), enabled_(enabled) {
118 DCHECK(source.getExtraData()); // Make sure the source has a native source.
119 } 24 }
120 25
121 MediaStreamRemoteAudioTrack::~MediaStreamRemoteAudioTrack() { 26 PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack() {
122 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 27 DVLOG(1)
28 << "PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack()";
123 // Ensure the track is stopped. 29 // Ensure the track is stopped.
124 MediaStreamAudioTrack::Stop(); 30 MediaStreamAudioTrack::Stop();
125 } 31 }
126 32
127 void MediaStreamRemoteAudioTrack::SetEnabled(bool enabled) { 33 // static
128 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 34 PeerConnectionRemoteAudioTrack* PeerConnectionRemoteAudioTrack::From(
35 MediaStreamAudioTrack* track) {
36 if (track && track->GetClassIdentifier() == kClassIdentifier)
37 return static_cast<PeerConnectionRemoteAudioTrack*>(track);
38 return nullptr;
39 }
40
41 void PeerConnectionRemoteAudioTrack::SetEnabled(bool enabled) {
42 DCHECK(thread_checker_.CalledOnValidThread());
129 43
130 // This affects the shared state of the source for whether or not it's a part 44 // This affects the shared state of the source for whether or not it's a part
131 // of the mixed audio that's rendered for remote tracks from WebRTC. 45 // of the mixed audio that's rendered for remote tracks from WebRTC.
132 // All tracks from the same source will share this state and thus can step 46 // All tracks from the same source will share this state and thus can step
133 // on each other's toes. 47 // on each other's toes.
134 // This is also why we can't check the |enabled_| state for equality with 48 // This is also why we can't check the enabled state for equality with
135 // |enabled| before setting the mixing enabled state. |enabled_| and the 49 // |enabled| before setting the mixing enabled state. This track's enabled
136 // shared state might not be the same. 50 // state and the shared state might not be the same.
137 source()->SetEnabledForMixing(enabled); 51 track_interface_->set_enabled(enabled);
138 52
139 enabled_ = enabled; 53 MediaStreamAudioTrack::SetEnabled(enabled);
140 source()->SetSinksEnabled(this, enabled);
141 } 54 }
142 55
143 void MediaStreamRemoteAudioTrack::OnStop() { 56 void* PeerConnectionRemoteAudioTrack::GetClassIdentifier() const {
144 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 57 return kClassIdentifier;
145 DVLOG(1) << "MediaStreamRemoteAudioTrack::OnStop()"; 58 }
146 59
147 source()->RemoveAll(this); 60 void PeerConnectionRemoteAudioTrack::OnStop() {
61 DCHECK(thread_checker_.CalledOnValidThread());
62 DVLOG(1) << "PeerConnectionRemoteAudioTrack::OnStop()";
148 63
149 // Stop means that a track should be stopped permanently. But 64 // Stop means that a track should be stopped permanently. But
150 // since there is no proper way of doing that on a remote track, we can 65 // since there is no proper way of doing that on a remote track, we can
151 // at least disable the track. Blink will not call down to the content layer 66 // at least disable the track. Blink will not call down to the content layer
152 // after a track has been stopped. 67 // after a track has been stopped.
153 SetEnabled(false); 68 SetEnabled(false);
154 } 69 }
155 70
156 void MediaStreamRemoteAudioTrack::AddSink(MediaStreamAudioSink* sink) { 71 PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource(
157 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 72 scoped_refptr<webrtc::AudioTrackInterface> track_interface)
158 return source()->AddSink(sink, this, enabled_); 73 : MediaStreamAudioSource(false /* is_local_source */),
74 track_interface_(std::move(track_interface)),
75 is_sink_of_peer_connection_(false) {
76 DCHECK(track_interface_);
77 DVLOG(1)
78 << "PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource()";
159 } 79 }
160 80
161 void MediaStreamRemoteAudioTrack::RemoveSink(MediaStreamAudioSink* sink) { 81 PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource() {
162 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 82 DVLOG(1)
163 return source()->RemoveSink(sink, this); 83 << "PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource()";
84 EnsureSourceIsStopped();
164 } 85 }
165 86
166 media::AudioParameters MediaStreamRemoteAudioTrack::GetOutputFormat() const { 87 scoped_ptr<MediaStreamAudioTrack>
167 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 88 PeerConnectionRemoteAudioSource::CreateMediaStreamAudioTrack(
168 // This method is not implemented on purpose and should be removed. 89 const std::string& id) {
169 // TODO(tommi): See comment for GetOutputFormat in MediaStreamAudioTrack. 90 DCHECK(thread_checker_.CalledOnValidThread());
170 NOTIMPLEMENTED(); 91 return scoped_ptr<MediaStreamAudioTrack>(
171 return media::AudioParameters(); 92 new PeerConnectionRemoteAudioTrack(track_interface_));
172 } 93 }
173 94
174 webrtc::AudioTrackInterface* MediaStreamRemoteAudioTrack::GetAudioAdapter() { 95 bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() {
175 DCHECK(main_render_thread_checker_.CalledOnValidThread()); 96 DCHECK(thread_checker_.CalledOnValidThread());
176 return source()->GetAudioAdapter(); 97 if (is_sink_of_peer_connection_)
98 return true;
99 VLOG(1) << "Starting PeerConnection remote audio source with id="
100 << track_interface_->id();
101 track_interface_->AddSink(this);
102 is_sink_of_peer_connection_ = true;
103 return true;
177 } 104 }
178 105
179 MediaStreamRemoteAudioSource* MediaStreamRemoteAudioTrack::source() const { 106 void PeerConnectionRemoteAudioSource::EnsureSourceIsStopped() {
180 return static_cast<MediaStreamRemoteAudioSource*>(source_.getExtraData());
181 }
182
183 MediaStreamRemoteAudioSource::MediaStreamRemoteAudioSource(
184 const scoped_refptr<webrtc::AudioTrackInterface>& track) : track_(track) {}
185
186 MediaStreamRemoteAudioSource::~MediaStreamRemoteAudioSource() {
187 DCHECK(thread_checker_.CalledOnValidThread()); 107 DCHECK(thread_checker_.CalledOnValidThread());
188 } 108 if (is_sink_of_peer_connection_) {
189 109 track_interface_->RemoveSink(this);
190 void MediaStreamRemoteAudioSource::SetEnabledForMixing(bool enabled) { 110 is_sink_of_peer_connection_ = false;
191 DCHECK(thread_checker_.CalledOnValidThread()); 111 VLOG(1) << "Stopped PeerConnection remote audio source with id="
192 track_->set_enabled(enabled); 112 << track_interface_->id();
193 }
194
195 void MediaStreamRemoteAudioSource::AddSink(MediaStreamAudioSink* sink,
196 MediaStreamAudioTrack* track,
197 bool enabled) {
198 DCHECK(thread_checker_.CalledOnValidThread());
199 if (!sink_) {
200 sink_.reset(new AudioSink());
201 track_->AddSink(sink_.get());
202 }
203
204 sink_->Add(sink, track, enabled);
205 }
206
207 void MediaStreamRemoteAudioSource::RemoveSink(MediaStreamAudioSink* sink,
208 MediaStreamAudioTrack* track) {
209 DCHECK(thread_checker_.CalledOnValidThread());
210 DCHECK(sink_);
211
212 sink_->Remove(sink, track);
213
214 if (!sink_->IsNeeded()) {
215 track_->RemoveSink(sink_.get());
216 sink_.reset();
217 } 113 }
218 } 114 }
219 115
220 void MediaStreamRemoteAudioSource::SetSinksEnabled(MediaStreamAudioTrack* track, 116 void PeerConnectionRemoteAudioSource::OnData(const void* audio_data,
221 bool enabled) { 117 int bits_per_sample,
222 if (sink_) 118 int sample_rate,
223 sink_->SetEnabled(track, enabled); 119 size_t number_of_channels,
224 } 120 size_t number_of_frames) {
121 // TODO(tommi): We should get the timestamp from WebRTC.
122 base::TimeTicks playout_time(base::TimeTicks::Now());
o1ka 2016/04/01 15:11:41 Probably have paranoid thread checks in all those
miu 2016/04/19 00:40:22 Done. But, it's not that they should be coming fr
o1ka 2016/04/21 18:51:22 Acknowledged.
225 123
226 void MediaStreamRemoteAudioSource::RemoveAll(MediaStreamAudioTrack* track) { 124 if (!audio_bus_ ||
227 if (sink_) 125 static_cast<size_t>(audio_bus_->channels()) != number_of_channels ||
228 sink_->RemoveAll(track); 126 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) {
229 } 127 audio_bus_ = media::AudioBus::Create(number_of_channels, number_of_frames);
128 }
230 129
231 webrtc::AudioTrackInterface* MediaStreamRemoteAudioSource::GetAudioAdapter() { 130 audio_bus_->FromInterleaved(audio_data, number_of_frames,
232 DCHECK(thread_checker_.CalledOnValidThread()); 131 bits_per_sample / 8);
233 return track_.get(); 132
133 media::AudioParameters params = MediaStreamAudioSource::GetAudioParameters();
134 if (!params.IsValid() ||
135 params.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY ||
136 static_cast<size_t>(params.channels()) != number_of_channels ||
137 params.sample_rate() != sample_rate ||
138 static_cast<size_t>(params.frames_per_buffer()) != number_of_frames) {
139 MediaStreamAudioSource::SetFormat(
140 media::AudioParameters(media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
141 media::GuessChannelLayout(number_of_channels),
142 sample_rate, bits_per_sample, number_of_frames));
143 }
144
145 MediaStreamAudioSource::DeliverDataToTracks(*audio_bus_, playout_time);
234 } 146 }
235 147
236 } // namespace content 148 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698