Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(26)

Unified Diff: content/renderer/media/media_stream_audio_source.h

Issue 1647773002: MediaStream audio sourcing: Bypass audio processing for non-WebRTC cases. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: First attempt Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/renderer/media/media_stream_audio_source.h
diff --git a/content/renderer/media/media_stream_audio_source.h b/content/renderer/media/media_stream_audio_source.h
index e332b83e0c8fcb51143f42bf8f3cfecba2b92186..b9d8942c44a3728efc2fc536c93ac55463bca709 100644
--- a/content/renderer/media/media_stream_audio_source.h
+++ b/content/renderer/media/media_stream_audio_source.h
@@ -5,60 +5,180 @@
#ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_
#define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_
+#include <vector>
+
#include "base/compiler_specific.h"
#include "base/macros.h"
+#include "base/memory/weak_ptr.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/thread_checker.h"
#include "content/common/content_export.h"
+#include "content/renderer/media/media_stream_audio_track.h"
#include "content/renderer/media/media_stream_source.h"
#include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
#include "content/renderer/media/webrtc_audio_capturer.h"
+#include "media/base/audio_capturer_source.h"
#include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
namespace content {
+// Represents a source of audio, and manages the delivery of audio data between
+// a media::AudioCapturerSource and one or more MediaStreamAudioTracks. There
+// are three main use cases, corresponding to the three available constructors:
+//
+// 1. Null source: Makes this MediaStreamAudioSource a place-holder
+// implementation that goes through all the motions, but never transports
+// any audio data.
+// 2. Local source: Uses content::AudioDeviceFactory to auto-create the
+// source, using the parameters and session ID found in StreamDeviceInfo,
+// just before the first track is connected. Automatically determines
+// whether to transport audio data directly to the tracks, or to instead
+// pass it through the WebRTC audio processing pipeline (including
+// MediaStreamAudioProcessor).
+// 3. Externally-provided local or remote source: Allows users of the public
+// content::MediaStreamApi to provide a media::AudioCapturerSource to be
+// used as the source of audio data. Audio data is transported directly to
+// the tracks (i.e., there is no audio processing).
+//
+// An instance of this class is owned by blink::WebMediaStreamSource.
class CONTENT_EXPORT MediaStreamAudioSource
hubbe 2016/01/28 21:48:13 Would it make sense to write this as an abstract c
miu 2016/01/29 19:43:59 Good point. I'll work on this and ping when ready
- : NON_EXPORTED_BASE(public MediaStreamSource) {
+ : NON_EXPORTED_BASE(public MediaStreamSource),
+ NON_EXPORTED_BASE(public media::AudioCapturerSource::CaptureCallback) {
public:
- MediaStreamAudioSource(int render_frame_id,
- const StreamDeviceInfo& device_info,
- const SourceStoppedCallback& stop_callback,
- PeerConnectionDependencyFactory* factory);
+ // Construct a "null" source (as a place-holder, or for testing).
MediaStreamAudioSource();
- ~MediaStreamAudioSource() override;
- void AddTrack(const blink::WebMediaStreamTrack& track,
- const blink::WebMediaConstraints& constraints,
- const ConstraintsCallback& callback);
+ // Construct a local source (e.g., microphone or loopback audio capture) of
+ // audio, using the audio parameters found in |device_info|.
+ // |consumer_render_frame_id| references the RenderFrame that will consume the
+ // audio data. The source is not started until the first call to
+ // ConnectToTrack().
+ MediaStreamAudioSource(int consumer_render_frame_id,
+ const StreamDeviceInfo& device_info);
+
+ // Construct a source of audio that wraps a media::AudioCapturerSource
+ // implementation. MediaStreamAudioSource will call the source's Initialize()
+ // and Start/Stop() methods at some point in the future. Audio will be
+ // provided in the format specified by |sample_rate|, |channel_layout|, and
+ // |frames_per_buffer|. |is_remote| must be true if the content is being
+ // generated from outside of the application (e.g., audio that is being
+ // streamed from a remote device). The source is not started until the first
+ // call to ConnectToTrack().
+ MediaStreamAudioSource(
+ const scoped_refptr<media::AudioCapturerSource>& source,
+ int sample_rate,
+ media::ChannelLayout channel_layout,
+ int frames_per_buffer,
+ bool is_remote);
+
+ ~MediaStreamAudioSource() final;
+
+ // To enable WebRTC-specific audio-processing features, this must be called
+ // before the first call to ConnectToTrack().
+ void set_dependency_factory(PeerConnectionDependencyFactory* factory) {
+ pc_factory_ = factory;
+ }
+ // Connects this source to the given |track|, creating the appropriate
+ // implementation of the content::MediaStreamAudioTrack interface, which
+ // becomes associated with and owned by |track|. |constraints| is optional.
+ //
+ // Returns true if the source was successfully started and the
+ // MediaStreamAudioTrack assigned to |track.extraData()|.
+ bool ConnectToTrack(const blink::WebMediaStreamTrack& track,
+ const blink::WebMediaConstraints& constraints);
+
+ // Getters/Setters to hold references to objects when the WebRTC audio
+ // pipeline is being used.
void SetLocalAudioSource(webrtc::AudioSourceInterface* source) {
local_audio_source_ = source;
}
-
void SetAudioCapturer(const scoped_refptr<WebRtcAudioCapturer>& capturer) {
DCHECK(!audio_capturer_.get());
audio_capturer_ = capturer;
}
-
const scoped_refptr<WebRtcAudioCapturer>& GetAudioCapturer() {
return audio_capturer_;
}
-
webrtc::AudioSourceInterface* local_audio_source() {
return local_audio_source_.get();
}
protected:
- void DoStopSource() override;
+ // Called by the superclass to stop whichever source implementation is being
+ // used.
+ void DoStopSource() final;
private:
- const int render_frame_id_;
- PeerConnectionDependencyFactory* const factory_;
-
- // This member holds an instance of webrtc::LocalAudioSource. This is used
- // as a container for audio options.
+ // Implements the MediaStreamAudioTrack interface, providing the functionality
+ // of adding and removing MediaStreamAudioSinks and delivering audio data to
+ // each; all in a thread-safe manner.
+ //
+ // An instance of this class is owned by blink::WebMediaStreamTrack, but the
+ // AudioTee holds a weak reference to |this| to notify of its destruction.
+ class AudioTee;
+
+ // Determines whether the default audio pipeline or the WebRTC audio pipeline
+ // will be used, and then starts the appropriate source for that pipeline if
+ // needed. Returns true if the source was successfully started and
+ // MediaStreamAudioTracks can be created and connected to it.
+ bool EnsureSourceIsStarted(const blink::WebMediaConstraints& constraints);
+ void StartDefaultPipeline();
+ void StartWebRtcPipeline(const blink::WebMediaConstraints& constraints);
+
+ // Removes |tee| from the list of instances that get a copy of the source
+ // audio data.
+ void StopAudioDeliveryTo(AudioTee* tee);
+
+ // media::AudioCapturerSource::CaptureCallback implementation.
+ void Capture(const media::AudioBus* audio_bus,
+ int audio_delay_milliseconds,
+ double volume,
+ bool key_pressed) final;
+ void OnCaptureError(const std::string& message) final;
+
+ // The audio parameters to use for |source_|.
+ const media::AudioParameters params_;
+
+ // True if |source_| provides audio data from a remote application.
+ const bool is_remote_;
+
+ // Used when creating AudioInputDevices via the AudioDeviceFactory.
+ const int consumer_render_frame_id_;
+
+ // The current state of this source.
+ enum {
+ NULL_SOURCE_NOT_STARTED, // This instance is a "null" audio source.
hubbe 2016/01/28 21:48:13 I'm not a big fan of using different states for di
+ INPUT_DEVICE_NOT_STARTED, // AudioInputDevice not started yet.
+ SOURCE_NOT_STARTED, // Source provided via ctor not started yet.
+ STARTED_DEFAULT_PIPELINE, // Started, this instance delivers audio.
+ STARTED_WEBRTC_PIPELINE, // Started, but using WebRTC audio pipeline.
+ STOPPED, // Source stopped.
+ } current_state_;
+
+ // This is lazy-instantiated on the first call to EnsureSourceIsStarted().
+ scoped_refptr<media::AudioCapturerSource> source_;
+
+ // List of currently-connected AudioTees. This is empty when using the
+ // WebRTC audio pipeline. While MediaStreamAudioSource creates these
+ // instances, blink::WebMediaStreamTrack instances own the objects.
+ std::vector<AudioTee*> audio_tees_;
+ base::Lock lock_; // Protects concurrent access to |audio_tees_|.
+
+ // References to WebRTC audio pipeline objects. These are null, if not
+ // applicable.
+ PeerConnectionDependencyFactory* pc_factory_; // May be null, if unused.
scoped_refptr<webrtc::AudioSourceInterface> local_audio_source_;
-
scoped_refptr<WebRtcAudioCapturer> audio_capturer_;
+ // In debug builds, check that all methods that could cause object graph
+ // or data flow changes are being called on the same thread.
+ base::ThreadChecker thread_checker_;
+
+ // Provides weak pointers so that AudioTees can call StopAudioDeliveryTo()
+ // safely.
+ base::WeakPtrFactory<MediaStreamAudioSource> weak_factory_;
+
DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioSource);
};

Powered by Google App Engine
This is Rietveld 408576698