| Index: content/renderer/media/webrtc/peer_connection_remote_audio_source.cc
|
| diff --git a/content/renderer/media/webrtc/peer_connection_remote_audio_source.cc b/content/renderer/media/webrtc/peer_connection_remote_audio_source.cc
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..91800553da1ea46d22acafe2d80230b415c295a0
|
| --- /dev/null
|
| +++ b/content/renderer/media/webrtc/peer_connection_remote_audio_source.cc
|
| @@ -0,0 +1,139 @@
|
| +// Copyright 2015 The Chromium Authors. All rights reserved.
|
| +// Use of this source code is governed by a BSD-style license that can be
|
| +// found in the LICENSE file.
|
| +
|
| +#include "content/renderer/media/webrtc/peer_connection_remote_audio_source.h"
|
| +
|
| +#include "base/logging.h"
|
| +#include "media/base/audio_bus.h"
|
| +
|
| +namespace content {
|
| +
|
| +namespace {
|
| +// Used as an identifier for the down-casters.
|
| +void* const kClassIdentifier = const_cast<void**>(&kClassIdentifier);
|
| +} // namespace
|
| +
|
| +PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack(
|
| + const scoped_refptr<webrtc::AudioTrackInterface>& track_interface)
|
| + : MediaStreamAudioTrack(false /* is_local_track*/),
|
| + track_interface_(track_interface) {
|
| + DVLOG(1)
|
| + << "PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack()";
|
| +}
|
| +
|
| +PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack() {
|
| + DVLOG(1)
|
| + << "PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack()";
|
| +}
|
| +
|
| +// static
|
| +PeerConnectionRemoteAudioTrack* PeerConnectionRemoteAudioTrack::From(
|
| + MediaStreamAudioTrack* track) {
|
| + if (track && track->GetClassIdentifier() == kClassIdentifier)
|
| + return static_cast<PeerConnectionRemoteAudioTrack*>(track);
|
| + return nullptr;
|
| +}
|
| +
|
| +void PeerConnectionRemoteAudioTrack::SetEnabled(bool enabled) {
|
| + DCHECK(main_render_thread_checker_.CalledOnValidThread());
|
| +
|
| + // This affects the shared state of the source for whether or not it's a part
|
| + // of the mixed audio that's rendered for remote tracks from WebRTC.
|
| + // All tracks from the same source will share this state and thus can step
|
| + // on each other's toes.
|
| + // This is also why we can't check the enabled state for equality with
|
| + // |enabled| before setting the mixing enabled state. This track's enabled
|
| + // state and the shared state might not be the same.
|
| + track_interface_->set_enabled(enabled);
|
| +
|
| + MediaStreamAudioTrack::SetEnabled(enabled);
|
| +}
|
| +
|
| +void* PeerConnectionRemoteAudioTrack::GetClassIdentifier() const {
|
| + return kClassIdentifier;
|
| +}
|
| +
|
| +PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource(
|
| + const scoped_refptr<webrtc::AudioTrackInterface>& track_interface)
|
| + : MediaStreamAudioSource(false /* is_local_source */),
|
| + track_interface_(track_interface),
|
| + is_started_(false) {
|
| + DCHECK(track_interface_.get());
|
| + DVLOG(1)
|
| + << "PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource()";
|
| +}
|
| +
|
| +PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource() {
|
| + DVLOG(1)
|
| + << "PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource()";
|
| + // Superclass will call StopSource() just in case.
|
| +}
|
| +
|
| +scoped_ptr<MediaStreamAudioTrack>
|
| +PeerConnectionRemoteAudioSource::CreateMediaStreamAudioTrack(
|
| + const std::string& id) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + return make_scoped_ptr<MediaStreamAudioTrack>(
|
| + new PeerConnectionRemoteAudioTrack(track_interface_));
|
| +}
|
| +
|
| +void PeerConnectionRemoteAudioSource::DoStopSource() {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + if (is_stopped_)
|
| + return;
|
| + if (is_started_) {
|
| + track_interface_->RemoveSink(this);
|
| + VLOG(1) << "Stopped PeerConnection remote audio source with id="
|
| + << track_interface_->id();
|
| + }
|
| + is_stopped_ = true;
|
| +}
|
| +
|
| +bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| +
|
| + if (is_stopped_)
|
| + return false;
|
| + if (is_started_)
|
| + return true;
|
| +
|
| + VLOG(1) << "Starting PeerConnection remote audio source with id="
|
| + << track_interface_->id();
|
| + track_interface_->AddSink(this);
|
| + is_started_ = true;
|
| + return true;
|
| +}
|
| +
|
| +void PeerConnectionRemoteAudioSource::OnData(
|
| + const void* audio_data, int bits_per_sample, int sample_rate,
|
| + size_t number_of_channels, size_t number_of_frames) {
|
| + // TODO(tommi): We should get the timestamp from WebRTC.
|
| + base::TimeTicks playout_time(base::TimeTicks::Now());
|
| +
|
| + if (!audio_bus_ ||
|
| + static_cast<size_t>(audio_bus_->channels()) != number_of_channels ||
|
| + static_cast<size_t>(audio_bus_->frames()) != number_of_frames) {
|
| + audio_bus_ = media::AudioBus::Create(number_of_channels,
|
| + number_of_frames);
|
| + }
|
| +
|
| + audio_bus_->FromInterleaved(audio_data, number_of_frames,
|
| + bits_per_sample / 8);
|
| +
|
| + media::AudioParameters params = MediaStreamAudioSource::GetAudioParameters();
|
| + if (!params.IsValid() ||
|
| + params.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY ||
|
| + static_cast<size_t>(params.channels()) != number_of_channels ||
|
| + params.sample_rate() != sample_rate ||
|
| + static_cast<size_t>(params.frames_per_buffer()) != number_of_frames) {
|
| + MediaStreamAudioSource::SetFormat(media::AudioParameters(
|
| + media::AudioParameters::AUDIO_PCM_LOW_LATENCY,
|
| + media::GuessChannelLayout(number_of_channels),
|
| + sample_rate, 16, number_of_frames));
|
| + }
|
| +
|
| + MediaStreamAudioSource::DeliverDataToTracks(*audio_bus_, playout_time);
|
| +}
|
| +
|
| +} // namespace content
|
|
|