OLD | NEW |
| (Empty) |
1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/webrtc/webrtc_audio_sink.h" | |
6 | |
7 #include <algorithm> | |
8 #include <limits> | |
9 | |
10 #include "base/bind.h" | |
11 #include "base/bind_helpers.h" | |
12 #include "base/location.h" | |
13 #include "base/logging.h" | |
14 #include "base/message_loop/message_loop.h" | |
15 | |
16 namespace content { | |
17 | |
18 WebRtcAudioSink::WebRtcAudioSink( | |
19 const std::string& label, | |
20 scoped_refptr<webrtc::AudioSourceInterface> track_source, | |
21 scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner) | |
22 : adapter_(new rtc::RefCountedObject<Adapter>( | |
23 label, std::move(track_source), std::move(signaling_task_runner))), | |
24 fifo_(base::Bind(&WebRtcAudioSink::DeliverRebufferedAudio, | |
25 base::Unretained(this))) { | |
26 DVLOG(1) << "WebRtcAudioSink::WebRtcAudioSink()"; | |
27 } | |
28 | |
29 WebRtcAudioSink::~WebRtcAudioSink() { | |
30 DCHECK(thread_checker_.CalledOnValidThread()); | |
31 DVLOG(1) << "WebRtcAudioSink::~WebRtcAudioSink()"; | |
32 } | |
33 | |
34 void WebRtcAudioSink::SetAudioProcessor( | |
35 scoped_refptr<MediaStreamAudioProcessor> processor) { | |
36 DCHECK(thread_checker_.CalledOnValidThread()); | |
37 DCHECK(processor.get()); | |
38 adapter_->set_processor(std::move(processor)); | |
39 } | |
40 | |
41 void WebRtcAudioSink::SetLevel( | |
42 scoped_refptr<MediaStreamAudioLevelCalculator::Level> level) { | |
43 DCHECK(thread_checker_.CalledOnValidThread()); | |
44 DCHECK(level.get()); | |
45 adapter_->set_level(std::move(level)); | |
46 } | |
47 | |
48 void WebRtcAudioSink::OnEnabledChanged(bool enabled) { | |
49 DCHECK(thread_checker_.CalledOnValidThread()); | |
50 adapter_->signaling_task_runner()->PostTask( | |
51 FROM_HERE, | |
52 base::Bind( | |
53 base::IgnoreResult(&WebRtcAudioSink::Adapter::set_enabled), | |
54 adapter_, enabled)); | |
55 } | |
56 | |
57 void WebRtcAudioSink::OnData(const media::AudioBus& audio_bus, | |
58 base::TimeTicks estimated_capture_time) { | |
59 DCHECK(audio_thread_checker_.CalledOnValidThread()); | |
60 // The following will result in zero, one, or multiple synchronous calls to | |
61 // DeliverRebufferedAudio(). | |
62 fifo_.Push(audio_bus); | |
63 } | |
64 | |
65 void WebRtcAudioSink::OnSetFormat(const media::AudioParameters& params) { | |
66 // On a format change, the thread delivering audio might have also changed. | |
67 audio_thread_checker_.DetachFromThread(); | |
68 DCHECK(audio_thread_checker_.CalledOnValidThread()); | |
69 | |
70 DCHECK(params.IsValid()); | |
71 params_ = params; | |
72 fifo_.Reset(params_.frames_per_buffer()); | |
73 const int num_pcm16_data_elements = | |
74 params_.frames_per_buffer() * params_.channels(); | |
75 interleaved_data_.reset(new int16_t[num_pcm16_data_elements]); | |
76 } | |
77 | |
78 void WebRtcAudioSink::DeliverRebufferedAudio(const media::AudioBus& audio_bus, | |
79 int frame_delay) { | |
80 DCHECK(audio_thread_checker_.CalledOnValidThread()); | |
81 DCHECK(params_.IsValid()); | |
82 | |
83 // TODO(miu): Why doesn't a WebRTC sink care about reference time passed to | |
84 // OnData(), and the |frame_delay| here? How is AV sync achieved otherwise? | |
85 | |
86 // TODO(henrika): Remove this conversion once the interface in libjingle | |
87 // supports float vectors. | |
88 audio_bus.ToInterleaved(audio_bus.frames(), | |
89 sizeof(interleaved_data_[0]), | |
90 interleaved_data_.get()); | |
91 adapter_->DeliverPCMToWebRtcSinks(interleaved_data_.get(), | |
92 params_.sample_rate(), | |
93 audio_bus.channels(), | |
94 audio_bus.frames()); | |
95 } | |
96 | |
97 namespace { | |
98 // TODO(miu): MediaStreamAudioProcessor destructor requires this nonsense. | |
99 void DereferenceOnMainThread( | |
100 const scoped_refptr<MediaStreamAudioProcessor>& processor) {} | |
101 } // namespace | |
102 | |
103 WebRtcAudioSink::Adapter::Adapter( | |
104 const std::string& label, | |
105 scoped_refptr<webrtc::AudioSourceInterface> source, | |
106 scoped_refptr<base::SingleThreadTaskRunner> signaling_task_runner) | |
107 : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label), | |
108 source_(std::move(source)), | |
109 signaling_task_runner_(std::move(signaling_task_runner)), | |
110 main_task_runner_(base::MessageLoop::current()->task_runner()) { | |
111 DCHECK(signaling_task_runner_); | |
112 } | |
113 | |
114 WebRtcAudioSink::Adapter::~Adapter() { | |
115 if (audio_processor_) { | |
116 main_task_runner_->PostTask( | |
117 FROM_HERE, | |
118 base::Bind(&DereferenceOnMainThread, std::move(audio_processor_))); | |
119 } | |
120 } | |
121 | |
122 void WebRtcAudioSink::Adapter::DeliverPCMToWebRtcSinks( | |
123 const int16_t* audio_data, | |
124 int sample_rate, | |
125 size_t number_of_channels, | |
126 size_t number_of_frames) { | |
127 base::AutoLock auto_lock(lock_); | |
128 for (webrtc::AudioTrackSinkInterface* sink : sinks_) { | |
129 sink->OnData(audio_data, sizeof(int16_t) * 8, sample_rate, | |
130 number_of_channels, number_of_frames); | |
131 } | |
132 } | |
133 | |
134 std::string WebRtcAudioSink::Adapter::kind() const { | |
135 return webrtc::MediaStreamTrackInterface::kAudioKind; | |
136 } | |
137 | |
138 bool WebRtcAudioSink::Adapter::set_enabled(bool enable) { | |
139 DCHECK(!signaling_task_runner_ || | |
140 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
141 return webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>:: | |
142 set_enabled(enable); | |
143 } | |
144 | |
145 void WebRtcAudioSink::Adapter::AddSink(webrtc::AudioTrackSinkInterface* sink) { | |
146 DCHECK(!signaling_task_runner_ || | |
147 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
148 DCHECK(sink); | |
149 base::AutoLock auto_lock(lock_); | |
150 DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end()); | |
151 sinks_.push_back(sink); | |
152 } | |
153 | |
154 void WebRtcAudioSink::Adapter::RemoveSink( | |
155 webrtc::AudioTrackSinkInterface* sink) { | |
156 DCHECK(!signaling_task_runner_ || | |
157 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
158 base::AutoLock auto_lock(lock_); | |
159 const auto it = std::find(sinks_.begin(), sinks_.end(), sink); | |
160 if (it != sinks_.end()) | |
161 sinks_.erase(it); | |
162 } | |
163 | |
164 bool WebRtcAudioSink::Adapter::GetSignalLevel(int* level) { | |
165 DCHECK(!signaling_task_runner_ || | |
166 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
167 | |
168 // |level_| is only set once, so it's safe to read without first acquiring a | |
169 // mutex. | |
170 if (!level_) | |
171 return false; | |
172 const float signal_level = level_->GetCurrent(); | |
173 DCHECK_GE(signal_level, 0.0f); | |
174 DCHECK_LE(signal_level, 1.0f); | |
175 // Convert from float in range [0.0,1.0] to an int in range [0,32767]. | |
176 *level = static_cast<int>(signal_level * std::numeric_limits<int16_t>::max() + | |
177 0.5f /* rounding to nearest int */); | |
178 return true; | |
179 } | |
180 | |
181 rtc::scoped_refptr<webrtc::AudioProcessorInterface> | |
182 WebRtcAudioSink::Adapter::GetAudioProcessor() { | |
183 DCHECK(!signaling_task_runner_ || | |
184 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
185 return audio_processor_.get(); | |
186 } | |
187 | |
188 webrtc::AudioSourceInterface* WebRtcAudioSink::Adapter::GetSource() const { | |
189 DCHECK(!signaling_task_runner_ || | |
190 signaling_task_runner_->RunsTasksOnCurrentThread()); | |
191 return source_.get(); | |
192 } | |
193 | |
194 } // namespace content | |
OLD | NEW |