| Index: content/renderer/media/media_stream_audio_track.cc
|
| diff --git a/content/renderer/media/media_stream_audio_track.cc b/content/renderer/media/media_stream_audio_track.cc
|
| index 9eee57d61428d6146bda28f8c930294a8f84857e..14d29baa48bcc1489723fed72abbe7374ba600a2 100644
|
| --- a/content/renderer/media/media_stream_audio_track.cc
|
| +++ b/content/renderer/media/media_stream_audio_track.cc
|
| @@ -6,20 +6,21 @@
|
|
|
| #include "base/callback_helpers.h"
|
| #include "base/logging.h"
|
| +#include "content/public/renderer/media_stream_audio_sink.h"
|
| +#include "media/base/audio_bus.h"
|
| #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
|
| -#include "third_party/webrtc/api/mediastreaminterface.h"
|
|
|
| namespace content {
|
|
|
| MediaStreamAudioTrack::MediaStreamAudioTrack(bool is_local_track)
|
| - : MediaStreamTrack(is_local_track) {
|
| - DVLOG(1) << "MediaStreamAudioTrack::MediaStreamAudioTrack(is a "
|
| + : MediaStreamTrack(is_local_track), is_enabled_(1), weak_factory_(this) {
|
| + DVLOG(1) << "MediaStreamAudioTrack@" << this << "::MediaStreamAudioTrack("
|
| << (is_local_track ? "local" : "remote") << " track)";
|
| }
|
|
|
| MediaStreamAudioTrack::~MediaStreamAudioTrack() {
|
| - DCHECK(main_render_thread_checker_.CalledOnValidThread());
|
| - DVLOG(1) << "MediaStreamAudioTrack::~MediaStreamAudioTrack()";
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DVLOG(1) << "MediaStreamAudioTrack@" << this << " is being destroyed.";
|
| DCHECK(stop_callback_.is_null())
|
| << "BUG: Subclass must ensure Stop() is called.";
|
| }
|
| @@ -34,27 +35,99 @@ MediaStreamAudioTrack* MediaStreamAudioTrack::From(
|
| return static_cast<MediaStreamAudioTrack*>(track.getExtraData());
|
| }
|
|
|
| +void MediaStreamAudioTrack::AddSink(MediaStreamAudioSink* sink) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| +
|
| + // If the track has already stopped, just notify the sink of this fact without
|
| + // adding it.
|
| + if (stop_callback_.is_null()) {
|
| + sink->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
|
| + return;
|
| + }
|
| +
|
| + DVLOG(1) << "Adding MediaStreamAudioSink@" << sink
|
| + << " to MediaStreamAudioTrack@" << this << '.';
|
| + deliverer_.AddConsumer(sink);
|
| +}
|
| +
|
| +void MediaStreamAudioTrack::RemoveSink(MediaStreamAudioSink* sink) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + deliverer_.RemoveConsumer(sink);
|
| + DVLOG(1) << "Removed MediaStreamAudioSink@" << sink
|
| + << " from MediaStreamAudioTrack@" << this << '.';
|
| +}
|
| +
|
| +media::AudioParameters MediaStreamAudioTrack::GetOutputFormat() const {
|
| + return deliverer_.GetAudioParameters();
|
| +}
|
| +
|
| +void MediaStreamAudioTrack::SetEnabled(bool enabled) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DVLOG(1) << "MediaStreamAudioTrack@" << this << "::SetEnabled("
|
| + << (enabled ? 'Y' : 'N') << ')';
|
| +
|
| + const bool previously_enabled =
|
| + !!base::subtle::NoBarrier_AtomicExchange(&is_enabled_, enabled ? 1 : 0);
|
| + if (enabled == previously_enabled)
|
| + return;
|
| +
|
| + std::vector<MediaStreamAudioSink*> sinks_to_notify;
|
| + deliverer_.GetConsumerList(&sinks_to_notify);
|
| + for (MediaStreamAudioSink* sink : sinks_to_notify)
|
| + sink->OnEnabledChanged(enabled);
|
| +}
|
| +
|
| +void* MediaStreamAudioTrack::GetClassIdentifier() const {
|
| + return nullptr;
|
| +}
|
| +
|
| void MediaStreamAudioTrack::Start(const base::Closure& stop_callback) {
|
| - DCHECK(main_render_thread_checker_.CalledOnValidThread());
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK(!stop_callback.is_null());
|
| DCHECK(stop_callback_.is_null());
|
| - DVLOG(1) << "MediaStreamAudioTrack::Start()";
|
| + DVLOG(1) << "Starting MediaStreamAudioTrack@" << this << '.';
|
| stop_callback_ = stop_callback;
|
| }
|
|
|
| void MediaStreamAudioTrack::Stop() {
|
| - DCHECK(main_render_thread_checker_.CalledOnValidThread());
|
| - DVLOG(1) << "MediaStreamAudioTrack::Stop()";
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DVLOG(1) << "Stopping MediaStreamAudioTrack@" << this << '.';
|
| +
|
| if (!stop_callback_.is_null())
|
| base::ResetAndReturn(&stop_callback_).Run();
|
| +
|
| OnStop();
|
| +
|
| + std::vector<MediaStreamAudioSink*> sinks_to_end;
|
| + deliverer_.GetConsumerList(&sinks_to_end);
|
| + for (MediaStreamAudioSink* sink : sinks_to_end) {
|
| + deliverer_.RemoveConsumer(sink);
|
| + sink->OnReadyStateChanged(blink::WebMediaStreamSource::ReadyStateEnded);
|
| + }
|
| +
|
| + weak_factory_.InvalidateWeakPtrs();
|
| }
|
|
|
| void MediaStreamAudioTrack::OnStop() {}
|
|
|
| -webrtc::AudioTrackInterface* MediaStreamAudioTrack::GetAudioAdapter() {
|
| - NOTREACHED();
|
| - return nullptr;
|
| +void MediaStreamAudioTrack::OnSetFormat(const media::AudioParameters& params) {
|
| + deliverer_.OnSetFormat(params);
|
| +}
|
| +
|
| +void MediaStreamAudioTrack::OnData(const media::AudioBus& audio_bus,
|
| + base::TimeTicks reference_time) {
|
| + if (base::subtle::NoBarrier_Load(&is_enabled_)) {
|
| + deliverer_.OnData(audio_bus, reference_time);
|
| + } else {
|
| + // The W3C spec requires silent audio to flow while a track is disabled.
|
| + if (!silent_bus_ || silent_bus_->channels() != audio_bus.channels() ||
|
| + silent_bus_->frames() != audio_bus.frames()) {
|
| + silent_bus_ = media::AudioBus::Create(audio_bus.channels(),
|
| + audio_bus.frames());
|
| + silent_bus_->Zero();
|
| + }
|
| + deliverer_.OnData(*silent_bus_, reference_time);
|
| + }
|
| }
|
|
|
| } // namespace content
|
|
|