OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
7 | 7 |
8 #include "base/atomicops.h" | 8 #include "base/atomicops.h" |
9 #include "base/files/file.h" | 9 #include "base/files/file.h" |
10 #include "base/synchronization/lock.h" | 10 #include "base/synchronization/lock.h" |
(...skipping 17 matching lines...) Expand all Loading... |
28 class AudioParameters; | 28 class AudioParameters; |
29 } // namespace media | 29 } // namespace media |
30 | 30 |
31 namespace webrtc { | 31 namespace webrtc { |
32 class AudioFrame; | 32 class AudioFrame; |
33 class TypingDetection; | 33 class TypingDetection; |
34 } | 34 } |
35 | 35 |
36 namespace content { | 36 namespace content { |
37 | 37 |
| 38 class MediaStreamAudioBus; |
| 39 class MediaStreamAudioFifo; |
38 class RTCMediaConstraints; | 40 class RTCMediaConstraints; |
39 | 41 |
40 using webrtc::AudioProcessorInterface; | 42 using webrtc::AudioProcessorInterface; |
41 | 43 |
42 // This class owns an object of webrtc::AudioProcessing which contains signal | 44 // This class owns an object of webrtc::AudioProcessing which contains signal |
43 // processing components like AGC, AEC and NS. It enables the components based | 45 // processing components like AGC, AEC and NS. It enables the components based |
44 // on the getUserMedia constraints, processes the data and outputs it in a unit | 46 // on the getUserMedia constraints, processes the data and outputs it in a unit |
45 // of 10 ms data chunk. | 47 // of 10 ms data chunk. |
46 class CONTENT_EXPORT MediaStreamAudioProcessor : | 48 class CONTENT_EXPORT MediaStreamAudioProcessor : |
47 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink), | 49 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink), |
48 NON_EXPORTED_BASE(public AudioProcessorInterface), | 50 NON_EXPORTED_BASE(public AudioProcessorInterface), |
49 NON_EXPORTED_BASE(public AecDumpMessageFilter::AecDumpDelegate) { | 51 NON_EXPORTED_BASE(public AecDumpMessageFilter::AecDumpDelegate) { |
50 public: | 52 public: |
51 // Returns false if |kDisableAudioTrackProcessing| is set to true, otherwise | 53 // Returns false if |kDisableAudioTrackProcessing| is set to true, otherwise |
52 // returns true. | 54 // returns true. |
53 static bool IsAudioTrackProcessingEnabled(); | 55 static bool IsAudioTrackProcessingEnabled(); |
54 | 56 |
55 // |playout_data_source| is used to register this class as a sink to the | 57 // |playout_data_source| is used to register this class as a sink to the |
56 // WebRtc playout data for processing AEC. If clients do not enable AEC, | 58 // WebRtc playout data for processing AEC. If clients do not enable AEC, |
57 // |playout_data_source| won't be used. | 59 // |playout_data_source| won't be used. |
58 MediaStreamAudioProcessor(const blink::WebMediaConstraints& constraints, | 60 MediaStreamAudioProcessor(const blink::WebMediaConstraints& constraints, |
59 int effects, | 61 int effects, |
60 WebRtcPlayoutDataSource* playout_data_source); | 62 WebRtcPlayoutDataSource* playout_data_source); |
61 | 63 |
62 // Called when format of the capture data has changed. | 64 // Called when the format of the capture data has changed. |
63 // Called on the main render thread. The caller is responsible for stopping | 65 // Called on the main render thread. The caller is responsible for stopping |
64 // the capture thread before calling this method. | 66 // the capture thread before calling this method. |
65 // After this method, the capture thread will be changed to a new capture | 67 // After this method, the capture thread will be changed to a new capture |
66 // thread. | 68 // thread. |
67 void OnCaptureFormatChanged(const media::AudioParameters& source_params); | 69 void OnCaptureFormatChanged(const media::AudioParameters& source_params); |
68 | 70 |
69 // Pushes capture data in |audio_source| to the internal FIFO. | 71 // Pushes capture data in |audio_source| to the internal FIFO. Each call to |
| 72 // this method should be followed by calls to ProcessAndConsumeData() while |
| 73 // it returns false, to pull out all available data. |
70 // Called on the capture audio thread. | 74 // Called on the capture audio thread. |
71 void PushCaptureData(const media::AudioBus* audio_source); | 75 void PushCaptureData(const media::AudioBus* audio_source); |
72 | 76 |
73 // Processes a block of 10 ms data from the internal FIFO and outputs it via | 77 // Processes a block of 10 ms data from the internal FIFO and outputs it via |
74 // |out|. |out| is the address of the pointer that will be pointed to | 78 // |out|. |out| is the address of the pointer that will be pointed to |
75 // the post-processed data if the method is returning a true. The lifetime | 79 // the post-processed data if the method is returning a true. The lifetime |
76 // of the data represeted by |out| is guaranteed to outlive the method call. | 80 // of the data represeted by |out| is guaranteed until this method is called |
77 // That also says *|out| won't change until this method is called again. | 81 // again. |
78 // |new_volume| receives the new microphone volume from the AGC. | 82 // |new_volume| receives the new microphone volume from the AGC. |
79 // The new microphoen volume range is [0, 255], and the value will be 0 if | 83 // The new microphone volume range is [0, 255], and the value will be 0 if |
80 // the microphone volume should not be adjusted. | 84 // the microphone volume should not be adjusted. |
81 // Returns true if the internal FIFO has at least 10 ms data for processing, | 85 // Returns true if the internal FIFO has at least 10 ms data for processing, |
82 // otherwise false. | 86 // otherwise false. |
83 // |capture_delay|, |volume| and |key_pressed| will be passed to | |
84 // webrtc::AudioProcessing to help processing the data. | |
85 // Called on the capture audio thread. | 87 // Called on the capture audio thread. |
| 88 // |
| 89 // TODO(ajm): Don't we want this to output float? |
86 bool ProcessAndConsumeData(base::TimeDelta capture_delay, | 90 bool ProcessAndConsumeData(base::TimeDelta capture_delay, |
87 int volume, | 91 int volume, |
88 bool key_pressed, | 92 bool key_pressed, |
89 int* new_volume, | 93 int* new_volume, |
90 int16** out); | 94 int16** out); |
91 | 95 |
92 // Stops the audio processor, no more AEC dump or render data after calling | 96 // Stops the audio processor, no more AEC dump or render data after calling |
93 // this method. | 97 // this method. |
94 void Stop(); | 98 void Stop(); |
95 | 99 |
96 // The audio format of the input to the processor. | 100 // The audio formats of the capture input to and output from the processor. |
| 101 // Must only be called on the main render or audio capture threads. |
97 const media::AudioParameters& InputFormat() const; | 102 const media::AudioParameters& InputFormat() const; |
98 | |
99 // The audio format of the output from the processor. | |
100 const media::AudioParameters& OutputFormat() const; | 103 const media::AudioParameters& OutputFormat() const; |
101 | 104 |
102 // Accessor to check if the audio processing is enabled or not. | 105 // Accessor to check if the audio processing is enabled or not. |
103 bool has_audio_processing() const { return audio_processing_ != NULL; } | 106 bool has_audio_processing() const { return audio_processing_ != NULL; } |
104 | 107 |
105 // AecDumpMessageFilter::AecDumpDelegate implementation. | 108 // AecDumpMessageFilter::AecDumpDelegate implementation. |
106 // Called on the main render thread. | 109 // Called on the main render thread. |
107 virtual void OnAecDumpFile( | 110 virtual void OnAecDumpFile( |
108 const IPC::PlatformFileForTransit& file_handle) OVERRIDE; | 111 const IPC::PlatformFileForTransit& file_handle) OVERRIDE; |
109 virtual void OnDisableAecDump() OVERRIDE; | 112 virtual void OnDisableAecDump() OVERRIDE; |
110 virtual void OnIpcClosing() OVERRIDE; | 113 virtual void OnIpcClosing() OVERRIDE; |
111 | 114 |
112 protected: | 115 protected: |
113 friend class base::RefCountedThreadSafe<MediaStreamAudioProcessor>; | 116 friend class base::RefCountedThreadSafe<MediaStreamAudioProcessor>; |
114 virtual ~MediaStreamAudioProcessor(); | 117 virtual ~MediaStreamAudioProcessor(); |
115 | 118 |
116 private: | 119 private: |
117 friend class MediaStreamAudioProcessorTest; | 120 friend class MediaStreamAudioProcessorTest; |
118 FRIEND_TEST_ALL_PREFIXES(MediaStreamAudioProcessorTest, | 121 FRIEND_TEST_ALL_PREFIXES(MediaStreamAudioProcessorTest, |
119 GetAecDumpMessageFilter); | 122 GetAecDumpMessageFilter); |
120 | 123 |
121 class MediaStreamAudioConverter; | |
122 | |
123 // WebRtcPlayoutDataSource::Sink implementation. | 124 // WebRtcPlayoutDataSource::Sink implementation. |
124 virtual void OnPlayoutData(media::AudioBus* audio_bus, | 125 virtual void OnPlayoutData(media::AudioBus* audio_bus, |
125 int sample_rate, | 126 int sample_rate, |
126 int audio_delay_milliseconds) OVERRIDE; | 127 int audio_delay_milliseconds) OVERRIDE; |
127 virtual void OnPlayoutDataSourceChanged() OVERRIDE; | 128 virtual void OnPlayoutDataSourceChanged() OVERRIDE; |
128 | 129 |
129 // webrtc::AudioProcessorInterface implementation. | 130 // webrtc::AudioProcessorInterface implementation. |
130 // This method is called on the libjingle thread. | 131 // This method is called on the libjingle thread. |
131 virtual void GetStats(AudioProcessorStats* stats) OVERRIDE; | 132 virtual void GetStats(AudioProcessorStats* stats) OVERRIDE; |
132 | 133 |
133 // Helper to initialize the WebRtc AudioProcessing. | 134 // Helper to initialize the WebRtc AudioProcessing. |
134 void InitializeAudioProcessingModule( | 135 void InitializeAudioProcessingModule( |
135 const blink::WebMediaConstraints& constraints, int effects); | 136 const blink::WebMediaConstraints& constraints, int effects); |
136 | 137 |
137 // Helper to initialize the capture converter. | 138 // Helper to initialize the capture converter. |
138 void InitializeCaptureConverter(const media::AudioParameters& source_params); | 139 void InitializeCaptureFifo(const media::AudioParameters& input_format); |
139 | 140 |
140 // Helper to initialize the render converter. | 141 // Helper to initialize the render converter. |
141 void InitializeRenderConverterIfNeeded(int sample_rate, | 142 void InitializeRenderFifoIfNeeded(int sample_rate, |
142 int number_of_channels, | 143 int number_of_channels, |
143 int frames_per_buffer); | 144 int frames_per_buffer); |
144 | 145 |
145 // Called by ProcessAndConsumeData(). | 146 // Called by ProcessAndConsumeData(). |
146 // Returns the new microphone volume in the range of |0, 255]. | 147 // Returns the new microphone volume in the range of |0, 255]. |
147 // When the volume does not need to be updated, it returns 0. | 148 // When the volume does not need to be updated, it returns 0. |
148 int ProcessData(webrtc::AudioFrame* audio_frame, | 149 int ProcessData(const float* const* process_ptrs, |
| 150 int process_frames, |
149 base::TimeDelta capture_delay, | 151 base::TimeDelta capture_delay, |
150 int volume, | 152 int volume, |
151 bool key_pressed); | 153 bool key_pressed, |
| 154 float* const* output_ptrs); |
152 | 155 |
153 // Cached value for the render delay latency. This member is accessed by | 156 // Cached value for the render delay latency. This member is accessed by |
154 // both the capture audio thread and the render audio thread. | 157 // both the capture audio thread and the render audio thread. |
155 base::subtle::Atomic32 render_delay_ms_; | 158 base::subtle::Atomic32 render_delay_ms_; |
156 | 159 |
157 // webrtc::AudioProcessing module which does AEC, AGC, NS, HighPass filter, | 160 // Module to handle processing and format conversion. |
158 // ..etc. | |
159 scoped_ptr<webrtc::AudioProcessing> audio_processing_; | 161 scoped_ptr<webrtc::AudioProcessing> audio_processing_; |
160 | 162 |
161 // Converter used for the down-mixing and resampling of the capture data. | 163 // FIFO to provide 10 ms capture chunks. |
162 scoped_ptr<MediaStreamAudioConverter> capture_converter_; | 164 scoped_ptr<MediaStreamAudioFifo> capture_fifo_; |
| 165 // Receives processing output. |
| 166 scoped_ptr<MediaStreamAudioBus> output_bus_; |
| 167 // Receives interleaved int16 data for output. |
| 168 scoped_ptr<int16[]> output_data_; |
163 | 169 |
164 // AudioFrame used to hold the output of |capture_converter_|. | 170 // FIFO to provide 10 ms render chunks when the AEC is enabled. |
165 webrtc::AudioFrame capture_frame_; | 171 scoped_ptr<MediaStreamAudioFifo> render_fifo_; |
166 | 172 |
167 // Converter used for the down-mixing and resampling of the render data when | 173 // These are mutated on the main render thread in OnCaptureFormatChanged(). |
168 // the AEC is enabled. | 174 // The caller guarantees this does not run concurrently with accesses on the |
169 scoped_ptr<MediaStreamAudioConverter> render_converter_; | 175 // capture audio thread. |
170 | 176 media::AudioParameters input_format_; |
171 // AudioFrame used to hold the output of |render_converter_|. | 177 media::AudioParameters output_format_; |
172 webrtc::AudioFrame render_frame_; | 178 // Only used on the render audio thread. |
173 | 179 media::AudioParameters render_format_; |
174 // Data bus to help converting interleaved data to an AudioBus. | |
175 scoped_ptr<media::AudioBus> render_data_bus_; | |
176 | 180 |
177 // Raw pointer to the WebRtcPlayoutDataSource, which is valid for the | 181 // Raw pointer to the WebRtcPlayoutDataSource, which is valid for the |
178 // lifetime of RenderThread. | 182 // lifetime of RenderThread. |
179 WebRtcPlayoutDataSource* playout_data_source_; | 183 WebRtcPlayoutDataSource* playout_data_source_; |
180 | 184 |
181 // Used to DCHECK that the destructor is called on the main render thread. | 185 // Used to DCHECK that some methods are called on the main render thread. |
182 base::ThreadChecker main_thread_checker_; | 186 base::ThreadChecker main_thread_checker_; |
183 | |
184 // Used to DCHECK that some methods are called on the capture audio thread. | 187 // Used to DCHECK that some methods are called on the capture audio thread. |
185 base::ThreadChecker capture_thread_checker_; | 188 base::ThreadChecker capture_thread_checker_; |
186 | 189 // Used to DCHECK that some methods are called on the render audio thread. |
187 // Used to DCHECK that PushRenderData() is called on the render audio thread. | |
188 base::ThreadChecker render_thread_checker_; | 190 base::ThreadChecker render_thread_checker_; |
189 | 191 |
190 // Flag to enable the stereo channels mirroring. | 192 // Flag to enable stereo channel mirroring. |
191 bool audio_mirroring_; | 193 bool audio_mirroring_; |
192 | 194 |
193 // Used by the typing detection. | |
194 scoped_ptr<webrtc::TypingDetection> typing_detector_; | 195 scoped_ptr<webrtc::TypingDetection> typing_detector_; |
195 | |
196 // This flag is used to show the result of typing detection. | 196 // This flag is used to show the result of typing detection. |
197 // It can be accessed by the capture audio thread and by the libjingle thread | 197 // It can be accessed by the capture audio thread and by the libjingle thread |
198 // which calls GetStats(). | 198 // which calls GetStats(). |
199 base::subtle::Atomic32 typing_detected_; | 199 base::subtle::Atomic32 typing_detected_; |
200 | 200 |
201 // Communication with browser for AEC dump. | 201 // Communication with browser for AEC dump. |
202 scoped_refptr<AecDumpMessageFilter> aec_dump_message_filter_; | 202 scoped_refptr<AecDumpMessageFilter> aec_dump_message_filter_; |
203 | 203 |
204 // Flag to avoid executing Stop() more than once. | 204 // Flag to avoid executing Stop() more than once. |
205 bool stopped_; | 205 bool stopped_; |
206 }; | 206 }; |
207 | 207 |
208 } // namespace content | 208 } // namespace content |
209 | 209 |
210 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 210 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
OLD | NEW |