Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webrtc/media_stream_remote_audio_track.h" | 5 #include "content/renderer/media/webrtc/peer_connection_remote_audio_source.h" |
| 6 | |
| 7 #include <stddef.h> | |
| 8 | |
| 9 #include <list> | |
| 10 | 6 |
| 11 #include "base/logging.h" | 7 #include "base/logging.h" |
| 12 #include "content/public/renderer/media_stream_audio_sink.h" | 8 #include "base/time/time.h" |
| 13 #include "third_party/webrtc/api/mediastreaminterface.h" | 9 #include "media/base/audio_bus.h" |
| 14 | 10 |
| 15 namespace content { | 11 namespace content { |
| 16 | 12 |
| 17 class MediaStreamRemoteAudioSource::AudioSink | 13 namespace { |
| 18 : public webrtc::AudioTrackSinkInterface { | 14 // Used as an identifier for the down-casters. |
| 19 public: | 15 void* const kClassIdentifier = const_cast<void**>(&kClassIdentifier); |
| 20 AudioSink() { | 16 } // namespace |
| 21 } | |
| 22 ~AudioSink() override { | |
| 23 DCHECK(sinks_.empty()); | |
| 24 } | |
| 25 | 17 |
| 26 void Add(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track, | 18 PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack( |
| 27 bool enabled) { | 19 scoped_refptr<webrtc::AudioTrackInterface> track_interface) |
| 28 DCHECK(thread_checker_.CalledOnValidThread()); | 20 : MediaStreamAudioTrack(false /* is_local_track */), |
| 29 SinkInfo info(sink, track, enabled); | 21 track_interface_(std::move(track_interface)) { |
| 30 base::AutoLock lock(lock_); | 22 DVLOG(1) |
| 31 sinks_.push_back(info); | 23 << "PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack()"; |
| 32 } | |
| 33 | |
| 34 void Remove(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track) { | |
| 35 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 36 base::AutoLock lock(lock_); | |
| 37 sinks_.remove_if([&sink, &track](const SinkInfo& info) { | |
| 38 return info.sink == sink && info.track == track; | |
| 39 }); | |
| 40 } | |
| 41 | |
| 42 void SetEnabled(MediaStreamAudioTrack* track, bool enabled) { | |
| 43 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 44 base::AutoLock lock(lock_); | |
| 45 for (SinkInfo& info : sinks_) { | |
| 46 if (info.track == track) | |
| 47 info.enabled = enabled; | |
| 48 } | |
| 49 } | |
| 50 | |
| 51 void RemoveAll(MediaStreamAudioTrack* track) { | |
| 52 base::AutoLock lock(lock_); | |
| 53 sinks_.remove_if([&track](const SinkInfo& info) { | |
| 54 return info.track == track; | |
| 55 }); | |
| 56 } | |
| 57 | |
| 58 bool IsNeeded() const { | |
| 59 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 60 return !sinks_.empty(); | |
| 61 } | |
| 62 | |
| 63 private: | |
| 64 void OnData(const void* audio_data, int bits_per_sample, int sample_rate, | |
| 65 size_t number_of_channels, size_t number_of_frames) override { | |
| 66 if (!audio_bus_ || | |
| 67 static_cast<size_t>(audio_bus_->channels()) != number_of_channels || | |
| 68 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) { | |
| 69 audio_bus_ = media::AudioBus::Create(number_of_channels, | |
| 70 number_of_frames); | |
| 71 } | |
| 72 | |
| 73 audio_bus_->FromInterleaved(audio_data, number_of_frames, | |
| 74 bits_per_sample / 8); | |
| 75 | |
| 76 bool format_changed = false; | |
| 77 if (params_.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY || | |
| 78 static_cast<size_t>(params_.channels()) != number_of_channels || | |
| 79 params_.sample_rate() != sample_rate || | |
| 80 static_cast<size_t>(params_.frames_per_buffer()) != number_of_frames) { | |
| 81 params_ = media::AudioParameters( | |
| 82 media::AudioParameters::AUDIO_PCM_LOW_LATENCY, | |
| 83 media::GuessChannelLayout(number_of_channels), | |
| 84 sample_rate, 16, number_of_frames); | |
| 85 format_changed = true; | |
| 86 } | |
| 87 | |
| 88 // TODO(tommi): We should get the timestamp from WebRTC. | |
| 89 base::TimeTicks estimated_capture_time(base::TimeTicks::Now()); | |
| 90 | |
| 91 base::AutoLock lock(lock_); | |
| 92 for (const SinkInfo& info : sinks_) { | |
| 93 if (info.enabled) { | |
| 94 if (format_changed) | |
| 95 info.sink->OnSetFormat(params_); | |
| 96 info.sink->OnData(*audio_bus_.get(), estimated_capture_time); | |
| 97 } | |
| 98 } | |
| 99 } | |
| 100 | |
| 101 mutable base::Lock lock_; | |
| 102 struct SinkInfo { | |
| 103 SinkInfo(MediaStreamAudioSink* sink, MediaStreamAudioTrack* track, | |
| 104 bool enabled) : sink(sink), track(track), enabled(enabled) {} | |
| 105 MediaStreamAudioSink* sink; | |
| 106 MediaStreamAudioTrack* track; | |
| 107 bool enabled; | |
| 108 }; | |
| 109 std::list<SinkInfo> sinks_; | |
| 110 base::ThreadChecker thread_checker_; | |
| 111 media::AudioParameters params_; // Only used on the callback thread. | |
| 112 std::unique_ptr<media::AudioBus> | |
| 113 audio_bus_; // Only used on the callback thread. | |
| 114 }; | |
| 115 | |
| 116 MediaStreamRemoteAudioTrack::MediaStreamRemoteAudioTrack( | |
| 117 const blink::WebMediaStreamSource& source, bool enabled) | |
| 118 : MediaStreamAudioTrack(false), source_(source), enabled_(enabled) { | |
| 119 DCHECK(source.getExtraData()); // Make sure the source has a native source. | |
| 120 } | 24 } |
| 121 | 25 |
| 122 MediaStreamRemoteAudioTrack::~MediaStreamRemoteAudioTrack() { | 26 PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack() { |
| 123 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 27 DVLOG(1) |
| 28 << "PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack()"; | |
| 124 // Ensure the track is stopped. | 29 // Ensure the track is stopped. |
| 125 MediaStreamAudioTrack::Stop(); | 30 MediaStreamAudioTrack::Stop(); |
| 126 } | 31 } |
| 127 | 32 |
| 128 void MediaStreamRemoteAudioTrack::SetEnabled(bool enabled) { | 33 // static |
| 129 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 34 PeerConnectionRemoteAudioTrack* PeerConnectionRemoteAudioTrack::From( |
| 35 MediaStreamAudioTrack* track) { | |
| 36 if (track && track->GetClassIdentifier() == kClassIdentifier) | |
| 37 return static_cast<PeerConnectionRemoteAudioTrack*>(track); | |
| 38 return nullptr; | |
| 39 } | |
| 40 | |
| 41 void PeerConnectionRemoteAudioTrack::SetEnabled(bool enabled) { | |
| 42 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 130 | 43 |
| 131 // This affects the shared state of the source for whether or not it's a part | 44 // This affects the shared state of the source for whether or not it's a part |
| 132 // of the mixed audio that's rendered for remote tracks from WebRTC. | 45 // of the mixed audio that's rendered for remote tracks from WebRTC. |
| 133 // All tracks from the same source will share this state and thus can step | 46 // All tracks from the same source will share this state and thus can step |
| 134 // on each other's toes. | 47 // on each other's toes. |
| 135 // This is also why we can't check the |enabled_| state for equality with | 48 // This is also why we can't check the enabled state for equality with |
| 136 // |enabled| before setting the mixing enabled state. |enabled_| and the | 49 // |enabled| before setting the mixing enabled state. This track's enabled |
| 137 // shared state might not be the same. | 50 // state and the shared state might not be the same. |
| 138 source()->SetEnabledForMixing(enabled); | 51 track_interface_->set_enabled(enabled); |
| 139 | 52 |
| 140 enabled_ = enabled; | 53 MediaStreamAudioTrack::SetEnabled(enabled); |
| 141 source()->SetSinksEnabled(this, enabled); | |
| 142 } | 54 } |
| 143 | 55 |
| 144 void MediaStreamRemoteAudioTrack::OnStop() { | 56 void* PeerConnectionRemoteAudioTrack::GetClassIdentifier() const { |
| 145 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 57 return kClassIdentifier; |
| 146 DVLOG(1) << "MediaStreamRemoteAudioTrack::OnStop()"; | 58 } |
| 147 | 59 |
| 148 source()->RemoveAll(this); | 60 void PeerConnectionRemoteAudioTrack::OnStop() { |
| 61 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 62 DVLOG(1) << "PeerConnectionRemoteAudioTrack::OnStop()"; | |
| 149 | 63 |
| 150 // Stop means that a track should be stopped permanently. But | 64 // Stop means that a track should be stopped permanently. But |
| 151 // since there is no proper way of doing that on a remote track, we can | 65 // since there is no proper way of doing that on a remote track, we can |
| 152 // at least disable the track. Blink will not call down to the content layer | 66 // at least disable the track. Blink will not call down to the content layer |
| 153 // after a track has been stopped. | 67 // after a track has been stopped. |
| 154 SetEnabled(false); | 68 SetEnabled(false); |
| 155 } | 69 } |
| 156 | 70 |
| 157 void MediaStreamRemoteAudioTrack::AddSink(MediaStreamAudioSink* sink) { | 71 PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource( |
| 158 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 72 scoped_refptr<webrtc::AudioTrackInterface> track_interface) |
| 159 return source()->AddSink(sink, this, enabled_); | 73 : MediaStreamAudioSource(false /* is_local_source */), |
| 74 track_interface_(std::move(track_interface)), | |
| 75 is_sink_of_peer_connection_(false) { | |
| 76 DCHECK(track_interface_); | |
| 77 DVLOG(1) | |
| 78 << "PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource()"; | |
| 160 } | 79 } |
| 161 | 80 |
| 162 void MediaStreamRemoteAudioTrack::RemoveSink(MediaStreamAudioSink* sink) { | 81 PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource() { |
| 163 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 82 DVLOG(1) |
| 164 return source()->RemoveSink(sink, this); | 83 << "PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource()"; |
| 84 EnsureSourceIsStopped(); | |
| 165 } | 85 } |
| 166 | 86 |
| 167 media::AudioParameters MediaStreamRemoteAudioTrack::GetOutputFormat() const { | 87 std::unique_ptr<MediaStreamAudioTrack> |
| 168 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 88 PeerConnectionRemoteAudioSource::CreateMediaStreamAudioTrack( |
| 169 // This method is not implemented on purpose and should be removed. | 89 const std::string& id) { |
| 170 // TODO(tommi): See comment for GetOutputFormat in MediaStreamAudioTrack. | 90 DCHECK(thread_checker_.CalledOnValidThread()); |
| 171 NOTIMPLEMENTED(); | 91 return std::unique_ptr<MediaStreamAudioTrack>( |
| 172 return media::AudioParameters(); | 92 new PeerConnectionRemoteAudioTrack(track_interface_)); |
| 173 } | 93 } |
| 174 | 94 |
| 175 webrtc::AudioTrackInterface* MediaStreamRemoteAudioTrack::GetAudioAdapter() { | 95 bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() { |
| 176 DCHECK(main_render_thread_checker_.CalledOnValidThread()); | 96 DCHECK(thread_checker_.CalledOnValidThread()); |
| 177 return source()->GetAudioAdapter(); | 97 if (is_sink_of_peer_connection_) |
| 98 return true; | |
| 99 VLOG(1) << "Starting PeerConnection remote audio source with id=" | |
| 100 << track_interface_->id(); | |
| 101 track_interface_->AddSink(this); | |
| 102 is_sink_of_peer_connection_ = true; | |
| 103 return true; | |
| 178 } | 104 } |
| 179 | 105 |
| 180 MediaStreamRemoteAudioSource* MediaStreamRemoteAudioTrack::source() const { | 106 void PeerConnectionRemoteAudioSource::EnsureSourceIsStopped() { |
| 181 return static_cast<MediaStreamRemoteAudioSource*>(source_.getExtraData()); | |
| 182 } | |
| 183 | |
| 184 MediaStreamRemoteAudioSource::MediaStreamRemoteAudioSource( | |
| 185 const scoped_refptr<webrtc::AudioTrackInterface>& track) : track_(track) {} | |
| 186 | |
| 187 MediaStreamRemoteAudioSource::~MediaStreamRemoteAudioSource() { | |
| 188 DCHECK(thread_checker_.CalledOnValidThread()); | 107 DCHECK(thread_checker_.CalledOnValidThread()); |
| 189 } | 108 if (is_sink_of_peer_connection_) { |
| 190 | 109 track_interface_->RemoveSink(this); |
| 191 void MediaStreamRemoteAudioSource::SetEnabledForMixing(bool enabled) { | 110 is_sink_of_peer_connection_ = false; |
| 192 DCHECK(thread_checker_.CalledOnValidThread()); | 111 VLOG(1) << "Stopped PeerConnection remote audio source with id=" |
| 193 track_->set_enabled(enabled); | 112 << track_interface_->id(); |
| 194 } | |
| 195 | |
| 196 void MediaStreamRemoteAudioSource::AddSink(MediaStreamAudioSink* sink, | |
| 197 MediaStreamAudioTrack* track, | |
| 198 bool enabled) { | |
| 199 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 200 if (!sink_) { | |
| 201 sink_.reset(new AudioSink()); | |
| 202 track_->AddSink(sink_.get()); | |
| 203 } | |
| 204 | |
| 205 sink_->Add(sink, track, enabled); | |
| 206 } | |
| 207 | |
| 208 void MediaStreamRemoteAudioSource::RemoveSink(MediaStreamAudioSink* sink, | |
| 209 MediaStreamAudioTrack* track) { | |
| 210 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 211 DCHECK(sink_); | |
| 212 | |
| 213 sink_->Remove(sink, track); | |
| 214 | |
| 215 if (!sink_->IsNeeded()) { | |
| 216 track_->RemoveSink(sink_.get()); | |
| 217 sink_.reset(); | |
| 218 } | 113 } |
| 219 } | 114 } |
| 220 | 115 |
| 221 void MediaStreamRemoteAudioSource::SetSinksEnabled(MediaStreamAudioTrack* track, | 116 void PeerConnectionRemoteAudioSource::OnData(const void* audio_data, |
| 222 bool enabled) { | 117 int bits_per_sample, |
| 223 if (sink_) | 118 int sample_rate, |
| 224 sink_->SetEnabled(track, enabled); | 119 size_t number_of_channels, |
| 225 } | 120 size_t number_of_frames) { |
| 121 // Debug builds: Note that this lock isn't meant to synchronize anything. | |
| 122 // Instead, it is being used as a run-time check to ensure there isn't already | |
| 123 // another thread executing this method. The reason we don't use | |
| 124 // base::ThreadChecker here is because we shouldn't be making assumptions | |
| 125 // about the private threading model of libjingle. | |
|
o1ka
2016/04/21 18:51:22
I think "for example.." part of your review commen
miu
2016/04/21 20:42:30
Done.
o1ka
2016/04/22 11:29:25
Thanks!
| |
| 126 #ifndef NDEBUG | |
| 127 const bool is_only_thread_here = single_audio_thread_guard_.Try(); | |
| 128 DCHECK(is_only_thread_here); | |
| 129 #endif | |
| 226 | 130 |
| 227 void MediaStreamRemoteAudioSource::RemoveAll(MediaStreamAudioTrack* track) { | 131 // TODO(tommi): We should get the timestamp from WebRTC. |
| 228 if (sink_) | 132 base::TimeTicks playout_time(base::TimeTicks::Now()); |
| 229 sink_->RemoveAll(track); | |
| 230 } | |
| 231 | 133 |
| 232 webrtc::AudioTrackInterface* MediaStreamRemoteAudioSource::GetAudioAdapter() { | 134 if (!audio_bus_ || |
| 233 DCHECK(thread_checker_.CalledOnValidThread()); | 135 static_cast<size_t>(audio_bus_->channels()) != number_of_channels || |
| 234 return track_.get(); | 136 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) { |
| 137 audio_bus_ = media::AudioBus::Create(number_of_channels, number_of_frames); | |
| 138 } | |
| 139 | |
| 140 audio_bus_->FromInterleaved(audio_data, number_of_frames, | |
| 141 bits_per_sample / 8); | |
| 142 | |
| 143 media::AudioParameters params = MediaStreamAudioSource::GetAudioParameters(); | |
| 144 if (!params.IsValid() || | |
| 145 params.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY || | |
| 146 static_cast<size_t>(params.channels()) != number_of_channels || | |
| 147 params.sample_rate() != sample_rate || | |
| 148 static_cast<size_t>(params.frames_per_buffer()) != number_of_frames) { | |
| 149 MediaStreamAudioSource::SetFormat( | |
| 150 media::AudioParameters(media::AudioParameters::AUDIO_PCM_LOW_LATENCY, | |
| 151 media::GuessChannelLayout(number_of_channels), | |
| 152 sample_rate, bits_per_sample, number_of_frames)); | |
| 153 } | |
| 154 | |
| 155 MediaStreamAudioSource::DeliverDataToTracks(*audio_bus_, playout_time); | |
| 156 | |
| 157 #ifndef NDEBUG | |
| 158 single_audio_thread_guard_.Release(); | |
| 159 #endif | |
| 235 } | 160 } |
| 236 | 161 |
| 237 } // namespace content | 162 } // namespace content |
| OLD | NEW |