Chromium Code Reviews| Index: content/renderer/media/webrtc/webrtc_audio_sink.cc |
| diff --git a/content/renderer/media/webrtc/webrtc_local_audio_track_adapter.cc b/content/renderer/media/webrtc/webrtc_audio_sink.cc |
| similarity index 29% |
| rename from content/renderer/media/webrtc/webrtc_local_audio_track_adapter.cc |
| rename to content/renderer/media/webrtc/webrtc_audio_sink.cc |
| index c86881b07a90eed64bca82048846e137536a2b21..d7500b45af3bcf5d7ce8bfad90802a63e2cb6864 100644 |
| --- a/content/renderer/media/webrtc/webrtc_local_audio_track_adapter.cc |
| +++ b/content/renderer/media/webrtc/webrtc_audio_sink.cc |
| @@ -1,134 +1,154 @@ |
| -// Copyright 2014 The Chromium Authors. All rights reserved. |
| +// Copyright 2016 The Chromium Authors. All rights reserved. |
| // Use of this source code is governed by a BSD-style license that can be |
| // found in the LICENSE file. |
| -#include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" |
| +#include "content/renderer/media/webrtc/webrtc_audio_sink.h" |
| +#include <algorithm> |
| +#include <limits> |
| + |
| +#include "base/bind.h" |
| +#include "base/bind_helpers.h" |
| #include "base/location.h" |
| #include "base/logging.h" |
| -#include "content/renderer/media/media_stream_audio_processor.h" |
| -#include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
| -#include "content/renderer/media/webrtc/webrtc_audio_sink_adapter.h" |
| -#include "content/renderer/media/webrtc_local_audio_track.h" |
| -#include "content/renderer/render_thread_impl.h" |
| -#include "third_party/webrtc/api/mediastreaminterface.h" |
| +#include "base/message_loop/message_loop.h" |
| namespace content { |
| -static const char kAudioTrackKind[] = "audio"; |
| - |
| -scoped_refptr<WebRtcLocalAudioTrackAdapter> |
| -WebRtcLocalAudioTrackAdapter::Create( |
| - const std::string& label, |
| - webrtc::AudioSourceInterface* track_source) { |
| - // TODO(tommi): Change this so that the signaling thread is one of the |
| - // parameters to this method. |
| - scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner; |
| - RenderThreadImpl* current = RenderThreadImpl::current(); |
| - if (current) { |
| - PeerConnectionDependencyFactory* pc_factory = |
| - current->GetPeerConnectionDependencyFactory(); |
| - signaling_task_runner = pc_factory->GetWebRtcSignalingThread(); |
| - LOG_IF(ERROR, !signaling_task_runner) << "No signaling thread!"; |
| - } else { |
| - LOG(WARNING) << "Assuming single-threaded operation for unit test."; |
| - } |
| - |
| - rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>* adapter = |
| - new rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>( |
| - label, track_source, std::move(signaling_task_runner)); |
| - return adapter; |
| -} |
| - |
| -WebRtcLocalAudioTrackAdapter::WebRtcLocalAudioTrackAdapter( |
| +WebRtcAudioSink::WebRtcAudioSink( |
| const std::string& label, |
| - webrtc::AudioSourceInterface* track_source, |
| + scoped_refptr<webrtc::AudioSourceInterface> track_source, |
| scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner) |
| - : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label), |
| - owner_(NULL), |
| - track_source_(track_source), |
| - signaling_task_runner_(std::move(signaling_task_runner)) {} |
| - |
| -WebRtcLocalAudioTrackAdapter::~WebRtcLocalAudioTrackAdapter() { |
| + : adapter_(new rtc::RefCountedObject<Adapter>( |
| + label, std::move(track_source), std::move(signaling_task_runner))), |
| + fifo_(base::Bind(&WebRtcAudioSink::DeliverRebufferedAudio, |
| + base::Unretained(this))) { |
| + DVLOG(1) << "WebRtcAudioSink::WebRtcAudioSink()"; |
| } |
| -void WebRtcLocalAudioTrackAdapter::Initialize(WebRtcLocalAudioTrack* owner) { |
| - DCHECK(!owner_); |
| - DCHECK(owner); |
| - owner_ = owner; |
| +WebRtcAudioSink::~WebRtcAudioSink() { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DVLOG(1) << "WebRtcAudioSink::~WebRtcAudioSink()"; |
| } |
| -void WebRtcLocalAudioTrackAdapter::SetAudioProcessor( |
| +void WebRtcAudioSink::SetAudioProcessor( |
| scoped_refptr<MediaStreamAudioProcessor> processor) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| DCHECK(processor.get()); |
| - DCHECK(!audio_processor_); |
| - audio_processor_ = std::move(processor); |
| + adapter_->set_processor(std::move(processor)); |
| } |
| -void WebRtcLocalAudioTrackAdapter::SetLevel( |
| +void WebRtcAudioSink::SetLevel( |
| scoped_refptr<MediaStreamAudioLevelCalculator::Level> level) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| DCHECK(level.get()); |
| - DCHECK(!level_); |
| - level_ = std::move(level); |
| + adapter_->set_level(std::move(level)); |
| +} |
| + |
| +void WebRtcAudioSink::OnEnabledChanged(bool enabled) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + adapter_->signaling_task_runner()->PostTask( |
| + FROM_HERE, |
| + base::Bind( |
| + base::IgnoreResult(&WebRtcAudioSink::Adapter::set_enabled), |
| + adapter_, enabled)); |
| +} |
| + |
| +void WebRtcAudioSink::OnData(const media::AudioBus& audio_bus, |
| + base::TimeTicks estimated_capture_time) { |
| + DCHECK(audio_thread_checker_.CalledOnValidThread()); |
| + // The following will result in zero, one, or multiple synchronous calls to |
| + // DeliverRebufferedAudio(). |
| + fifo_.Push(audio_bus); |
| +} |
| + |
| +void WebRtcAudioSink::OnSetFormat(const media::AudioParameters& params) { |
| + // On a format change, the thread delivering audio might have also changed. |
| + audio_thread_checker_.DetachFromThread(); |
| + DCHECK(audio_thread_checker_.CalledOnValidThread()); |
|
o1ka
2016/04/21 18:51:22
I think I'm missing something: what is the guarant
miu
2016/04/21 20:42:30
This is a sink, so only a MediaStreamAudioTrack sh
|
| + |
| + DCHECK(params.IsValid()); |
| + params_ = params; |
| + fifo_.Reset(params_.frames_per_buffer()); |
| + const int num_pcm16_data_elements = |
| + params_.frames_per_buffer() * params_.channels(); |
| + interleaved_data_.reset(new int16_t[num_pcm16_data_elements]); |
| } |
| -std::string WebRtcLocalAudioTrackAdapter::kind() const { |
| - return kAudioTrackKind; |
| +void WebRtcAudioSink::DeliverRebufferedAudio(const media::AudioBus& audio_bus, |
| + int frame_delay) { |
| + DCHECK(audio_thread_checker_.CalledOnValidThread()); |
| + DCHECK(params_.IsValid()); |
| + |
| + // TODO(miu): Why doesn't a WebRTC sink care about reference time passed to |
| + // OnData(), and the |frame_delay| here? How is AV sync achieved otherwise? |
| + |
| + // TODO(henrika): Remove this conversion once the interface in libjingle |
| + // supports float vectors. |
| + audio_bus.ToInterleaved(audio_bus.frames(), |
| + sizeof(interleaved_data_[0]), |
| + interleaved_data_.get()); |
| + adapter_->DeliverPCMToWebRtcSinks(interleaved_data_.get(), |
| + params_.sample_rate(), |
| + audio_bus.channels(), |
| + audio_bus.frames()); |
| } |
| -bool WebRtcLocalAudioTrackAdapter::set_enabled(bool enable) { |
| - // If we're not called on the signaling thread, we need to post a task to |
| - // change the state on the correct thread. |
| - if (signaling_task_runner_ && |
| - !signaling_task_runner_->BelongsToCurrentThread()) { |
| - signaling_task_runner_->PostTask(FROM_HERE, |
| - base::Bind( |
| - base::IgnoreResult(&WebRtcLocalAudioTrackAdapter::set_enabled), |
| - this, enable)); |
| - return true; |
| +WebRtcAudioSink::Adapter::Adapter( |
| + const std::string& label, |
| + scoped_refptr<webrtc::AudioSourceInterface> source, |
| + scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner) |
| + : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label), |
| + source_(std::move(source)), |
| + signaling_task_runner_(std::move(signaling_task_runner)) { |
| + DCHECK(signaling_task_runner_); |
| +} |
| + |
| +WebRtcAudioSink::Adapter::~Adapter() {} |
| + |
| +void WebRtcAudioSink::Adapter::DeliverPCMToWebRtcSinks( |
| + const int16_t* audio_data, |
| + int sample_rate, |
| + size_t number_of_channels, |
| + size_t number_of_frames) { |
| + base::AutoLock auto_lock(lock_); |
| + for (webrtc::AudioTrackSinkInterface* sink : sinks_) { |
| + sink->OnData(audio_data, sizeof(int16_t) * 8, sample_rate, |
| + number_of_channels, number_of_frames); |
| } |
| +} |
| + |
| +std::string WebRtcAudioSink::Adapter::kind() const { |
| + return webrtc::MediaStreamTrackInterface::kAudioKind; |
| +} |
| +bool WebRtcAudioSink::Adapter::set_enabled(bool enable) { |
| + DCHECK(!signaling_task_runner_ || |
| + signaling_task_runner_->RunsTasksOnCurrentThread()); |
| return webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>:: |
| set_enabled(enable); |
| } |
| -void WebRtcLocalAudioTrackAdapter::AddSink( |
| - webrtc::AudioTrackSinkInterface* sink) { |
| +void WebRtcAudioSink::Adapter::AddSink(webrtc::AudioTrackSinkInterface* sink) { |
| DCHECK(!signaling_task_runner_ || |
| signaling_task_runner_->RunsTasksOnCurrentThread()); |
| DCHECK(sink); |
| -#ifndef NDEBUG |
| - // Verify that |sink| has not been added. |
| - for (ScopedVector<WebRtcAudioSinkAdapter>::const_iterator it = |
| - sink_adapters_.begin(); |
| - it != sink_adapters_.end(); ++it) { |
| - DCHECK(!(*it)->IsEqual(sink)); |
| - } |
| -#endif |
| - |
| - std::unique_ptr<WebRtcAudioSinkAdapter> adapter( |
| - new WebRtcAudioSinkAdapter(sink)); |
| - owner_->AddSink(adapter.get()); |
| - sink_adapters_.push_back(adapter.release()); |
| + base::AutoLock auto_lock(lock_); |
| + DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end()); |
| + sinks_.push_back(sink); |
| } |
| -void WebRtcLocalAudioTrackAdapter::RemoveSink( |
| +void WebRtcAudioSink::Adapter::RemoveSink( |
| webrtc::AudioTrackSinkInterface* sink) { |
| DCHECK(!signaling_task_runner_ || |
| signaling_task_runner_->RunsTasksOnCurrentThread()); |
| - DCHECK(sink); |
| - for (ScopedVector<WebRtcAudioSinkAdapter>::iterator it = |
| - sink_adapters_.begin(); |
| - it != sink_adapters_.end(); ++it) { |
| - if ((*it)->IsEqual(sink)) { |
| - owner_->RemoveSink(*it); |
| - sink_adapters_.erase(it); |
| - return; |
| - } |
| - } |
| + base::AutoLock auto_lock(lock_); |
| + const auto it = std::find(sinks_.begin(), sinks_.end(), sink); |
| + if (it != sinks_.end()) |
| + sinks_.erase(it); |
| } |
| -bool WebRtcLocalAudioTrackAdapter::GetSignalLevel(int* level) { |
| +bool WebRtcAudioSink::Adapter::GetSignalLevel(int* level) { |
| DCHECK(!signaling_task_runner_ || |
| signaling_task_runner_->RunsTasksOnCurrentThread()); |
| @@ -146,16 +166,16 @@ bool WebRtcLocalAudioTrackAdapter::GetSignalLevel(int* level) { |
| } |
| rtc::scoped_refptr<webrtc::AudioProcessorInterface> |
| -WebRtcLocalAudioTrackAdapter::GetAudioProcessor() { |
| +WebRtcAudioSink::Adapter::GetAudioProcessor() { |
| DCHECK(!signaling_task_runner_ || |
| signaling_task_runner_->RunsTasksOnCurrentThread()); |
| return audio_processor_.get(); |
| } |
| -webrtc::AudioSourceInterface* WebRtcLocalAudioTrackAdapter::GetSource() const { |
| +webrtc::AudioSourceInterface* WebRtcAudioSink::Adapter::GetSource() const { |
| DCHECK(!signaling_task_runner_ || |
| signaling_task_runner_->RunsTasksOnCurrentThread()); |
| - return track_source_; |
| + return source_.get(); |
| } |
| } // namespace content |