OLD | NEW |
(Empty) | |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "content/renderer/media/webrtc/peer_connection_remote_audio_source.h" |
| 6 |
| 7 #include "base/logging.h" |
| 8 #include "media/base/audio_bus.h" |
| 9 |
| 10 namespace content { |
| 11 |
| 12 namespace { |
| 13 // Used as an identifier for the down-casters. |
| 14 void* const kClassIdentifier = const_cast<void**>(&kClassIdentifier); |
| 15 } // namespace |
| 16 |
| 17 PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack( |
| 18 const scoped_refptr<webrtc::AudioTrackInterface>& track_interface) |
| 19 : MediaStreamAudioTrack(false /* is_local_track*/), |
| 20 track_interface_(track_interface) { |
| 21 DVLOG(1) |
| 22 << "PeerConnectionRemoteAudioTrack::PeerConnectionRemoteAudioTrack()"; |
| 23 } |
| 24 |
| 25 PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack() { |
| 26 DVLOG(1) |
| 27 << "PeerConnectionRemoteAudioTrack::~PeerConnectionRemoteAudioTrack()"; |
| 28 } |
| 29 |
| 30 // static |
| 31 PeerConnectionRemoteAudioTrack* PeerConnectionRemoteAudioTrack::From( |
| 32 MediaStreamAudioTrack* track) { |
| 33 if (track && track->GetClassIdentifier() == kClassIdentifier) |
| 34 return static_cast<PeerConnectionRemoteAudioTrack*>(track); |
| 35 return nullptr; |
| 36 } |
| 37 |
| 38 void PeerConnectionRemoteAudioTrack::SetEnabled(bool enabled) { |
| 39 DCHECK(main_render_thread_checker_.CalledOnValidThread()); |
| 40 |
| 41 // This affects the shared state of the source for whether or not it's a part |
| 42 // of the mixed audio that's rendered for remote tracks from WebRTC. |
| 43 // All tracks from the same source will share this state and thus can step |
| 44 // on each other's toes. |
| 45 // This is also why we can't check the enabled state for equality with |
| 46 // |enabled| before setting the mixing enabled state. This track's enabled |
| 47 // state and the shared state might not be the same. |
| 48 track_interface_->set_enabled(enabled); |
| 49 |
| 50 MediaStreamAudioTrack::SetEnabled(enabled); |
| 51 } |
| 52 |
| 53 void* PeerConnectionRemoteAudioTrack::GetClassIdentifier() const { |
| 54 return kClassIdentifier; |
| 55 } |
| 56 |
| 57 PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource( |
| 58 const scoped_refptr<webrtc::AudioTrackInterface>& track_interface) |
| 59 : MediaStreamAudioSource(false /* is_local_source */), |
| 60 track_interface_(track_interface), |
| 61 is_started_(false) { |
| 62 DCHECK(track_interface_.get()); |
| 63 DVLOG(1) |
| 64 << "PeerConnectionRemoteAudioSource::PeerConnectionRemoteAudioSource()"; |
| 65 } |
| 66 |
| 67 PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource() { |
| 68 DVLOG(1) |
| 69 << "PeerConnectionRemoteAudioSource::~PeerConnectionRemoteAudioSource()"; |
| 70 // Superclass will call StopSource() just in case. |
| 71 } |
| 72 |
| 73 scoped_ptr<MediaStreamAudioTrack> |
| 74 PeerConnectionRemoteAudioSource::CreateMediaStreamAudioTrack( |
| 75 const std::string& id) { |
| 76 DCHECK(thread_checker_.CalledOnValidThread()); |
| 77 return make_scoped_ptr<MediaStreamAudioTrack>( |
| 78 new PeerConnectionRemoteAudioTrack(track_interface_)); |
| 79 } |
| 80 |
| 81 void PeerConnectionRemoteAudioSource::DoStopSource() { |
| 82 DCHECK(thread_checker_.CalledOnValidThread()); |
| 83 if (is_stopped_) |
| 84 return; |
| 85 if (is_started_) { |
| 86 track_interface_->RemoveSink(this); |
| 87 VLOG(1) << "Stopped PeerConnection remote audio source with id=" |
| 88 << track_interface_->id(); |
| 89 } |
| 90 is_stopped_ = true; |
| 91 } |
| 92 |
| 93 bool PeerConnectionRemoteAudioSource::EnsureSourceIsStarted() { |
| 94 DCHECK(thread_checker_.CalledOnValidThread()); |
| 95 |
| 96 if (is_stopped_) |
| 97 return false; |
| 98 if (is_started_) |
| 99 return true; |
| 100 |
| 101 VLOG(1) << "Starting PeerConnection remote audio source with id=" |
| 102 << track_interface_->id(); |
| 103 track_interface_->AddSink(this); |
| 104 is_started_ = true; |
| 105 return true; |
| 106 } |
| 107 |
| 108 void PeerConnectionRemoteAudioSource::OnData( |
| 109 const void* audio_data, int bits_per_sample, int sample_rate, |
| 110 size_t number_of_channels, size_t number_of_frames) { |
| 111 // TODO(tommi): We should get the timestamp from WebRTC. |
| 112 base::TimeTicks playout_time(base::TimeTicks::Now()); |
| 113 |
| 114 if (!audio_bus_ || |
| 115 static_cast<size_t>(audio_bus_->channels()) != number_of_channels || |
| 116 static_cast<size_t>(audio_bus_->frames()) != number_of_frames) { |
| 117 audio_bus_ = media::AudioBus::Create(number_of_channels, |
| 118 number_of_frames); |
| 119 } |
| 120 |
| 121 audio_bus_->FromInterleaved(audio_data, number_of_frames, |
| 122 bits_per_sample / 8); |
| 123 |
| 124 media::AudioParameters params = MediaStreamAudioSource::GetAudioParameters(); |
| 125 if (!params.IsValid() || |
| 126 params.format() != media::AudioParameters::AUDIO_PCM_LOW_LATENCY || |
| 127 static_cast<size_t>(params.channels()) != number_of_channels || |
| 128 params.sample_rate() != sample_rate || |
| 129 static_cast<size_t>(params.frames_per_buffer()) != number_of_frames) { |
| 130 MediaStreamAudioSource::SetFormat(media::AudioParameters( |
| 131 media::AudioParameters::AUDIO_PCM_LOW_LATENCY, |
| 132 media::GuessChannelLayout(number_of_channels), |
| 133 sample_rate, 16, number_of_frames)); |
| 134 } |
| 135 |
| 136 MediaStreamAudioSource::DeliverDataToTracks(*audio_bus_, playout_time); |
| 137 } |
| 138 |
| 139 } // namespace content |
OLD | NEW |