Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(998)

Unified Diff: remoting/protocol/webrtc_audio_source_adapter.cc

Issue 2392963003: Add Audio support in Chromoting host when using WebRTC. (Closed)
Patch Set: . Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: remoting/protocol/webrtc_audio_source_adapter.cc
diff --git a/remoting/protocol/webrtc_audio_source_adapter.cc b/remoting/protocol/webrtc_audio_source_adapter.cc
new file mode 100644
index 0000000000000000000000000000000000000000..d9d9ca3817c602da9b5cd03b65d90bc92b1ce434
--- /dev/null
+++ b/remoting/protocol/webrtc_audio_source_adapter.cc
@@ -0,0 +1,191 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "remoting/protocol/webrtc_audio_source_adapter.h"
+
+#include "base/bind.h"
+#include "base/logging.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread_checker.h"
+#include "remoting/proto/audio.pb.h"
+#include "remoting/protocol/audio_source.h"
+
+namespace remoting {
+namespace protocol {
+
+static const int kChannels = 2;
+static const int kBytesPerSample = 2;
+
+// Frame size expected by webrtc::AudioTrackSinkInterface.
+static constexpr base::TimeDelta kAudioFrameDuration =
+ base::TimeDelta::FromMilliseconds(10);
+
+class WebrtcAudioSourceAdapter::Core {
+ public:
+ Core();
+ ~Core();
+
+ void Start(std::unique_ptr<AudioSource> audio_source);
+ void Pause(bool pause);
+ void AddSink(webrtc::AudioTrackSinkInterface* sink);
+ void RemoveSink(webrtc::AudioTrackSinkInterface* sink);
+
+ private:
+ void OnAudioPacket(std::unique_ptr<AudioPacket> packet);
+
+ std::unique_ptr<AudioSource> audio_source_;
+
+ bool paused_ = false;
+
+ int sampling_rate_ = 0;
+
+ // webrtc::AudioTrackSinkInterface expects to get audio in 10ms frames (see
+ // kAudioFrameDuration). AudioSource may generate AudioPackets for time
+ // intervals that are not multiple of 10ms. In that case the left-over samples
+ // are kept in |partial_frame_| until the next AudioPacket is captured by the
+ // AudioSource.
+ std::vector<uint8_t> partial_frame_;
+
+ base::ObserverList<webrtc::AudioTrackSinkInterface> audio_sinks_;
+ base::Lock audio_sinks_lock_;
+
+ base::ThreadChecker thread_checker_;
+};
+
+WebrtcAudioSourceAdapter::Core::Core() {
+ thread_checker_.DetachFromThread();
+}
+
+WebrtcAudioSourceAdapter::Core::~Core() {}
+
+void WebrtcAudioSourceAdapter::Core::Start(
+ std::unique_ptr<AudioSource> audio_source) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ audio_source_ = std::move(audio_source);
+ audio_source_->Start(
+ base::Bind(&Core::OnAudioPacket, base::Unretained(this)));
+}
+
+void WebrtcAudioSourceAdapter::Core::Pause(bool pause) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ paused_ = pause;
+}
+
+void WebrtcAudioSourceAdapter::Core::AddSink(
+ webrtc::AudioTrackSinkInterface* sink) {
+ // Can be called on any thread.
+ base::AutoLock lock(audio_sinks_lock_);
+ audio_sinks_.AddObserver(sink);
+}
+
+void WebrtcAudioSourceAdapter::Core::RemoveSink(
+ webrtc::AudioTrackSinkInterface* sink) {
+ // Can be called on any thread.
+ base::AutoLock lock(audio_sinks_lock_);
+ audio_sinks_.RemoveObserver(sink);
+}
+
+void WebrtcAudioSourceAdapter::Core::OnAudioPacket(
+ std::unique_ptr<AudioPacket> packet) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (paused_)
+ return;
+
+ DCHECK_EQ(packet->channels(), kChannels);
+ DCHECK_EQ(packet->bytes_per_sample(), kBytesPerSample);
+
+ if (sampling_rate_ != packet->sampling_rate()) {
+ sampling_rate_ = packet->sampling_rate();
+ partial_frame_.clear();
+ }
+
+ size_t samples_per_frame =
+ kAudioFrameDuration * sampling_rate_ / base::TimeDelta::FromSeconds(1);
+ size_t bytes_per_frame = kBytesPerSample * kChannels * samples_per_frame;
+
+ const std::string& data = packet->data(0);
+
+ size_t position = 0;
+
+ base::AutoLock lock(audio_sinks_lock_);
+
+ if (!partial_frame_.empty()) {
+ size_t bytes_to_append =
+ std::min(bytes_per_frame - partial_frame_.size(), data.size());
+ position += bytes_to_append;
+ partial_frame_.insert(partial_frame_.end(), data.data(),
+ data.data() + bytes_to_append);
+ if (partial_frame_.size() < bytes_per_frame) {
+ // Still don't have full frame.
+ return;
+ }
+
+ // Here |partial_frame_| always contains a full frame.
+ DCHECK_EQ(partial_frame_.size(), bytes_per_frame);
+
+ FOR_EACH_OBSERVER(webrtc::AudioTrackSinkInterface, audio_sinks_,
+ OnData(&partial_frame_.front(), kBytesPerSample * 8,
+ sampling_rate_, kChannels, samples_per_frame));
+ }
+
+ while (position + bytes_per_frame <= data.size()) {
+ FOR_EACH_OBSERVER(webrtc::AudioTrackSinkInterface, audio_sinks_,
+ OnData(data.data() + position, kBytesPerSample * 8,
+ sampling_rate_, kChannels, samples_per_frame));
+ position += bytes_per_frame;
+ }
+
+ partial_frame_.assign(data.data() + position, data.data() + data.size());
+}
+
+WebrtcAudioSourceAdapter::WebrtcAudioSourceAdapter(
+ scoped_refptr<base::SingleThreadTaskRunner> audio_task_runner)
+ : audio_task_runner_(audio_task_runner), core_(new Core()) {}
+
+WebrtcAudioSourceAdapter::~WebrtcAudioSourceAdapter() {
+ audio_task_runner_->DeleteSoon(FROM_HERE, core_.release());
+}
+
+void WebrtcAudioSourceAdapter::Start(
+ std::unique_ptr<AudioSource> audio_source) {
+ audio_task_runner_->PostTask(
+ FROM_HERE, base::Bind(&Core::Start, base::Unretained(core_.get()),
+ base::Passed(&audio_source)));
+}
+
+void WebrtcAudioSourceAdapter::Pause(bool pause) {
+ audio_task_runner_->PostTask(
+ FROM_HERE,
+ base::Bind(&Core::Pause, base::Unretained(core_.get()), pause));
+}
+
+WebrtcAudioSourceAdapter::SourceState WebrtcAudioSourceAdapter::state() const {
+ return kLive;
+}
+
+bool WebrtcAudioSourceAdapter::remote() const {
+ return false;
+}
+
+void WebrtcAudioSourceAdapter::RegisterAudioObserver(AudioObserver* observer) {}
+
+void WebrtcAudioSourceAdapter::UnregisterAudioObserver(
+ AudioObserver* observer) {}
+
+void WebrtcAudioSourceAdapter::AddSink(webrtc::AudioTrackSinkInterface* sink) {
+ core_->AddSink(sink);
+}
+void WebrtcAudioSourceAdapter::RemoveSink(
+ webrtc::AudioTrackSinkInterface* sink) {
+ core_->RemoveSink(sink);
+}
+
+void WebrtcAudioSourceAdapter::RegisterObserver(
+ webrtc::ObserverInterface* observer) {}
+void WebrtcAudioSourceAdapter::UnregisterObserver(
+ webrtc::ObserverInterface* observer) {}
+
+} // namespace protocol
+} // namespace remoting
« no previous file with comments | « remoting/protocol/webrtc_audio_source_adapter.h ('k') | remoting/protocol/webrtc_audio_source_adapter_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698