Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ | 5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ |
| 6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ | 6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ |
| 7 | 7 |
| 8 #include <vector> | |
| 9 | |
| 8 #include "base/compiler_specific.h" | 10 #include "base/compiler_specific.h" |
| 9 #include "base/macros.h" | 11 #include "base/macros.h" |
| 12 #include "base/memory/weak_ptr.h" | |
| 13 #include "base/synchronization/lock.h" | |
| 14 #include "base/threading/thread_checker.h" | |
| 10 #include "content/common/content_export.h" | 15 #include "content/common/content_export.h" |
| 16 #include "content/renderer/media/media_stream_audio_track.h" | |
| 11 #include "content/renderer/media/media_stream_source.h" | 17 #include "content/renderer/media/media_stream_source.h" |
| 12 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | 18 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
| 13 #include "content/renderer/media/webrtc_audio_capturer.h" | 19 #include "content/renderer/media/webrtc_audio_capturer.h" |
| 20 #include "media/base/audio_capturer_source.h" | |
| 14 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" | 21 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" |
| 15 | 22 |
| 16 namespace content { | 23 namespace content { |
| 17 | 24 |
| 25 // Represents a source of audio, and manages the delivery of audio data between | |
| 26 // a media::AudioCapturerSource and one or more MediaStreamAudioTracks. There | |
| 27 // are three main use cases, corresponding to the three available constructors: | |
| 28 // | |
| 29 // 1. Null source: Makes this MediaStreamAudioSource a place-holder | |
| 30 // implementation that goes through all the motions, but never transports | |
| 31 // any audio data. | |
| 32 // 2. Local source: Uses content::AudioDeviceFactory to auto-create the | |
| 33 // source, using the parameters and session ID found in StreamDeviceInfo, | |
| 34 // just before the first track is connected. Automatically determines | |
| 35 // whether to transport audio data directly to the tracks, or to instead | |
| 36 // pass it through the WebRTC audio processing pipeline (including | |
| 37 // MediaStreamAudioProcessor). | |
| 38 // 3. Externally-provided local or remote source: Allows users of the public | |
| 39 // content::MediaStreamApi to provide a media::AudioCapturerSource to be | |
| 40 // used as the source of audio data. Audio data is transported directly to | |
| 41 // the tracks (i.e., there is no audio processing). | |
| 42 // | |
| 43 // An instance of this class is owned by blink::WebMediaStreamSource. | |
| 18 class CONTENT_EXPORT MediaStreamAudioSource | 44 class CONTENT_EXPORT MediaStreamAudioSource |
|
hubbe
2016/01/28 21:48:13
Would it make sense to write this as an abstract c
miu
2016/01/29 19:43:59
Good point. I'll work on this and ping when ready
| |
| 19 : NON_EXPORTED_BASE(public MediaStreamSource) { | 45 : NON_EXPORTED_BASE(public MediaStreamSource), |
| 46 NON_EXPORTED_BASE(public media::AudioCapturerSource::CaptureCallback) { | |
| 20 public: | 47 public: |
| 21 MediaStreamAudioSource(int render_frame_id, | 48 // Construct a "null" source (as a place-holder, or for testing). |
| 22 const StreamDeviceInfo& device_info, | |
| 23 const SourceStoppedCallback& stop_callback, | |
| 24 PeerConnectionDependencyFactory* factory); | |
| 25 MediaStreamAudioSource(); | 49 MediaStreamAudioSource(); |
| 26 ~MediaStreamAudioSource() override; | |
| 27 | 50 |
| 28 void AddTrack(const blink::WebMediaStreamTrack& track, | 51 // Construct a local source (e.g., microphone or loopback audio capture) of |
| 29 const blink::WebMediaConstraints& constraints, | 52 // audio, using the audio parameters found in |device_info|. |
| 30 const ConstraintsCallback& callback); | 53 // |consumer_render_frame_id| references the RenderFrame that will consume the |
| 54 // audio data. The source is not started until the first call to | |
| 55 // ConnectToTrack(). | |
| 56 MediaStreamAudioSource(int consumer_render_frame_id, | |
| 57 const StreamDeviceInfo& device_info); | |
| 31 | 58 |
| 59 // Construct a source of audio that wraps a media::AudioCapturerSource | |
| 60 // implementation. MediaStreamAudioSource will call the source's Initialize() | |
| 61 // and Start/Stop() methods at some point in the future. Audio will be | |
| 62 // provided in the format specified by |sample_rate|, |channel_layout|, and | |
| 63 // |frames_per_buffer|. |is_remote| must be true if the content is being | |
| 64 // generated from outside of the application (e.g., audio that is being | |
| 65 // streamed from a remote device). The source is not started until the first | |
| 66 // call to ConnectToTrack(). | |
| 67 MediaStreamAudioSource( | |
| 68 const scoped_refptr<media::AudioCapturerSource>& source, | |
| 69 int sample_rate, | |
| 70 media::ChannelLayout channel_layout, | |
| 71 int frames_per_buffer, | |
| 72 bool is_remote); | |
| 73 | |
| 74 ~MediaStreamAudioSource() final; | |
| 75 | |
| 76 // To enable WebRTC-specific audio-processing features, this must be called | |
| 77 // before the first call to ConnectToTrack(). | |
| 78 void set_dependency_factory(PeerConnectionDependencyFactory* factory) { | |
| 79 pc_factory_ = factory; | |
| 80 } | |
| 81 | |
| 82 // Connects this source to the given |track|, creating the appropriate | |
| 83 // implementation of the content::MediaStreamAudioTrack interface, which | |
| 84 // becomes associated with and owned by |track|. |constraints| is optional. | |
| 85 // | |
| 86 // Returns true if the source was successfully started and the | |
| 87 // MediaStreamAudioTrack assigned to |track.extraData()|. | |
| 88 bool ConnectToTrack(const blink::WebMediaStreamTrack& track, | |
| 89 const blink::WebMediaConstraints& constraints); | |
| 90 | |
| 91 // Getters/Setters to hold references to objects when the WebRTC audio | |
| 92 // pipeline is being used. | |
| 32 void SetLocalAudioSource(webrtc::AudioSourceInterface* source) { | 93 void SetLocalAudioSource(webrtc::AudioSourceInterface* source) { |
| 33 local_audio_source_ = source; | 94 local_audio_source_ = source; |
| 34 } | 95 } |
| 35 | |
| 36 void SetAudioCapturer(const scoped_refptr<WebRtcAudioCapturer>& capturer) { | 96 void SetAudioCapturer(const scoped_refptr<WebRtcAudioCapturer>& capturer) { |
| 37 DCHECK(!audio_capturer_.get()); | 97 DCHECK(!audio_capturer_.get()); |
| 38 audio_capturer_ = capturer; | 98 audio_capturer_ = capturer; |
| 39 } | 99 } |
| 40 | |
| 41 const scoped_refptr<WebRtcAudioCapturer>& GetAudioCapturer() { | 100 const scoped_refptr<WebRtcAudioCapturer>& GetAudioCapturer() { |
| 42 return audio_capturer_; | 101 return audio_capturer_; |
| 43 } | 102 } |
| 44 | |
| 45 webrtc::AudioSourceInterface* local_audio_source() { | 103 webrtc::AudioSourceInterface* local_audio_source() { |
| 46 return local_audio_source_.get(); | 104 return local_audio_source_.get(); |
| 47 } | 105 } |
| 48 | 106 |
| 49 protected: | 107 protected: |
| 50 void DoStopSource() override; | 108 // Called by the superclass to stop whichever source implementation is being |
| 109 // used. | |
| 110 void DoStopSource() final; | |
| 51 | 111 |
| 52 private: | 112 private: |
| 53 const int render_frame_id_; | 113 // Implements the MediaStreamAudioTrack interface, providing the functionality |
| 54 PeerConnectionDependencyFactory* const factory_; | 114 // of adding and removing MediaStreamAudioSinks and delivering audio data to |
| 115 // each; all in a thread-safe manner. | |
| 116 // | |
| 117 // An instance of this class is owned by blink::WebMediaStreamTrack, but the | |
| 118 // AudioTee holds a weak reference to |this| to notify of its destruction. | |
| 119 class AudioTee; | |
| 55 | 120 |
| 56 // This member holds an instance of webrtc::LocalAudioSource. This is used | 121 // Determines whether the default audio pipeline or the WebRTC audio pipeline |
| 57 // as a container for audio options. | 122 // will be used, and then starts the appropriate source for that pipeline if |
| 123 // needed. Returns true if the source was successfully started and | |
| 124 // MediaStreamAudioTracks can be created and connected to it. | |
| 125 bool EnsureSourceIsStarted(const blink::WebMediaConstraints& constraints); | |
| 126 void StartDefaultPipeline(); | |
| 127 void StartWebRtcPipeline(const blink::WebMediaConstraints& constraints); | |
| 128 | |
| 129 // Removes |tee| from the list of instances that get a copy of the source | |
| 130 // audio data. | |
| 131 void StopAudioDeliveryTo(AudioTee* tee); | |
| 132 | |
| 133 // media::AudioCapturerSource::CaptureCallback implementation. | |
| 134 void Capture(const media::AudioBus* audio_bus, | |
| 135 int audio_delay_milliseconds, | |
| 136 double volume, | |
| 137 bool key_pressed) final; | |
| 138 void OnCaptureError(const std::string& message) final; | |
| 139 | |
| 140 // The audio parameters to use for |source_|. | |
| 141 const media::AudioParameters params_; | |
| 142 | |
| 143 // True if |source_| provides audio data from a remote application. | |
| 144 const bool is_remote_; | |
| 145 | |
| 146 // Used when creating AudioInputDevices via the AudioDeviceFactory. | |
| 147 const int consumer_render_frame_id_; | |
| 148 | |
| 149 // The current state of this source. | |
| 150 enum { | |
| 151 NULL_SOURCE_NOT_STARTED, // This instance is a "null" audio source. | |
|
hubbe
2016/01/28 21:48:13
I'm not a big fan of using different states for di
| |
| 152 INPUT_DEVICE_NOT_STARTED, // AudioInputDevice not started yet. | |
| 153 SOURCE_NOT_STARTED, // Source provided via ctor not started yet. | |
| 154 STARTED_DEFAULT_PIPELINE, // Started, this instance delivers audio. | |
| 155 STARTED_WEBRTC_PIPELINE, // Started, but using WebRTC audio pipeline. | |
| 156 STOPPED, // Source stopped. | |
| 157 } current_state_; | |
| 158 | |
| 159 // This is lazy-instantiated on the first call to EnsureSourceIsStarted(). | |
| 160 scoped_refptr<media::AudioCapturerSource> source_; | |
| 161 | |
| 162 // List of currently-connected AudioTees. This is empty when using the | |
| 163 // WebRTC audio pipeline. While MediaStreamAudioSource creates these | |
| 164 // instances, blink::WebMediaStreamTrack instances own the objects. | |
| 165 std::vector<AudioTee*> audio_tees_; | |
| 166 base::Lock lock_; // Protects concurrent access to |audio_tees_|. | |
| 167 | |
| 168 // References to WebRTC audio pipeline objects. These are null, if not | |
| 169 // applicable. | |
| 170 PeerConnectionDependencyFactory* pc_factory_; // May be null, if unused. | |
| 58 scoped_refptr<webrtc::AudioSourceInterface> local_audio_source_; | 171 scoped_refptr<webrtc::AudioSourceInterface> local_audio_source_; |
| 172 scoped_refptr<WebRtcAudioCapturer> audio_capturer_; | |
| 59 | 173 |
| 60 scoped_refptr<WebRtcAudioCapturer> audio_capturer_; | 174 // In debug builds, check that all methods that could cause object graph |
| 175 // or data flow changes are being called on the same thread. | |
| 176 base::ThreadChecker thread_checker_; | |
| 177 | |
| 178 // Provides weak pointers so that AudioTees can call StopAudioDeliveryTo() | |
| 179 // safely. | |
| 180 base::WeakPtrFactory<MediaStreamAudioSource> weak_factory_; | |
| 61 | 181 |
| 62 DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioSource); | 182 DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioSource); |
| 63 }; | 183 }; |
| 64 | 184 |
| 65 } // namespace content | 185 } // namespace content |
| 66 | 186 |
| 67 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ | 187 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_SOURCE_H_ |
| OLD | NEW |