OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
7 | 7 |
8 #include "base/atomicops.h" | 8 #include "base/atomicops.h" |
9 #include "base/synchronization/lock.h" | 9 #include "base/synchronization/lock.h" |
10 #include "base/threading/thread_checker.h" | 10 #include "base/threading/thread_checker.h" |
11 #include "base/time/time.h" | 11 #include "base/time/time.h" |
12 #include "content/common/content_export.h" | 12 #include "content/common/content_export.h" |
13 #include "content/renderer/media/webrtc_audio_device_impl.h" | 13 #include "content/renderer/media/webrtc_audio_device_impl.h" |
14 #include "media/base/audio_converter.h" | 14 #include "media/base/audio_converter.h" |
| 15 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" |
15 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h
" | 16 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h
" |
16 #include "third_party/webrtc/modules/interface/module_common_types.h" | 17 #include "third_party/webrtc/modules/interface/module_common_types.h" |
17 | 18 |
18 namespace blink { | 19 namespace blink { |
19 class WebMediaConstraints; | 20 class WebMediaConstraints; |
20 } | 21 } |
21 | 22 |
22 namespace media { | 23 namespace media { |
23 class AudioBus; | 24 class AudioBus; |
24 class AudioFifo; | 25 class AudioFifo; |
25 class AudioParameters; | 26 class AudioParameters; |
26 } // namespace media | 27 } // namespace media |
27 | 28 |
28 namespace webrtc { | 29 namespace webrtc { |
29 class AudioFrame; | 30 class AudioFrame; |
30 class TypingDetection; | 31 class TypingDetection; |
31 } | 32 } |
32 | 33 |
33 namespace content { | 34 namespace content { |
34 | 35 |
35 class RTCMediaConstraints; | 36 class RTCMediaConstraints; |
36 | 37 |
| 38 using webrtc::AudioProcessorInterface; |
| 39 |
37 // This class owns an object of webrtc::AudioProcessing which contains signal | 40 // This class owns an object of webrtc::AudioProcessing which contains signal |
38 // processing components like AGC, AEC and NS. It enables the components based | 41 // processing components like AGC, AEC and NS. It enables the components based |
39 // on the getUserMedia constraints, processes the data and outputs it in a unit | 42 // on the getUserMedia constraints, processes the data and outputs it in a unit |
40 // of 10 ms data chunk. | 43 // of 10 ms data chunk. |
41 class CONTENT_EXPORT MediaStreamAudioProcessor : | 44 class CONTENT_EXPORT MediaStreamAudioProcessor : |
42 public base::RefCountedThreadSafe<MediaStreamAudioProcessor>, | 45 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink), |
43 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink) { | 46 NON_EXPORTED_BASE(public AudioProcessorInterface) { |
44 public: | 47 public: |
45 // |playout_data_source| is used to register this class as a sink to the | 48 // |playout_data_source| is used to register this class as a sink to the |
46 // WebRtc playout data for processing AEC. If clients do not enable AEC, | 49 // WebRtc playout data for processing AEC. If clients do not enable AEC, |
47 // |playout_data_source| won't be used. | 50 // |playout_data_source| won't be used. |
48 MediaStreamAudioProcessor(const media::AudioParameters& source_params, | 51 MediaStreamAudioProcessor(const media::AudioParameters& source_params, |
49 const blink::WebMediaConstraints& constraints, | 52 const blink::WebMediaConstraints& constraints, |
50 int effects, | 53 int effects, |
51 WebRtcPlayoutDataSource* playout_data_source); | 54 WebRtcPlayoutDataSource* playout_data_source); |
52 | 55 |
53 // Pushes capture data in |audio_source| to the internal FIFO. | 56 // Pushes capture data in |audio_source| to the internal FIFO. |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
90 private: | 93 private: |
91 friend class MediaStreamAudioProcessorTest; | 94 friend class MediaStreamAudioProcessorTest; |
92 | 95 |
93 class MediaStreamAudioConverter; | 96 class MediaStreamAudioConverter; |
94 | 97 |
95 // WebRtcPlayoutDataSource::Sink implementation. | 98 // WebRtcPlayoutDataSource::Sink implementation. |
96 virtual void OnPlayoutData(media::AudioBus* audio_bus, | 99 virtual void OnPlayoutData(media::AudioBus* audio_bus, |
97 int sample_rate, | 100 int sample_rate, |
98 int audio_delay_milliseconds) OVERRIDE; | 101 int audio_delay_milliseconds) OVERRIDE; |
99 | 102 |
| 103 // webrtc::AudioProcessorInterface implementation. |
| 104 // This method is called on the libjingle thread. |
| 105 virtual void GetStats(AudioProcessorStats* stats) OVERRIDE; |
| 106 |
100 // Helper to initialize the WebRtc AudioProcessing. | 107 // Helper to initialize the WebRtc AudioProcessing. |
101 void InitializeAudioProcessingModule( | 108 void InitializeAudioProcessingModule( |
102 const blink::WebMediaConstraints& constraints, int effects); | 109 const blink::WebMediaConstraints& constraints, int effects); |
103 | 110 |
104 // Helper to initialize the capture converter. | 111 // Helper to initialize the capture converter. |
105 void InitializeCaptureConverter(const media::AudioParameters& source_params); | 112 void InitializeCaptureConverter(const media::AudioParameters& source_params); |
106 | 113 |
107 // Helper to initialize the render converter. | 114 // Helper to initialize the render converter. |
108 void InitializeRenderConverterIfNeeded(int sample_rate, | 115 void InitializeRenderConverterIfNeeded(int sample_rate, |
109 int number_of_channels, | 116 int number_of_channels, |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
156 | 163 |
157 // Used to DCHECK that PushRenderData() is called on the render audio thread. | 164 // Used to DCHECK that PushRenderData() is called on the render audio thread. |
158 base::ThreadChecker render_thread_checker_; | 165 base::ThreadChecker render_thread_checker_; |
159 | 166 |
160 // Flag to enable the stereo channels mirroring. | 167 // Flag to enable the stereo channels mirroring. |
161 bool audio_mirroring_; | 168 bool audio_mirroring_; |
162 | 169 |
163 // Used by the typing detection. | 170 // Used by the typing detection. |
164 scoped_ptr<webrtc::TypingDetection> typing_detector_; | 171 scoped_ptr<webrtc::TypingDetection> typing_detector_; |
165 | 172 |
166 // Result from the typing detection. | 173 // This flag is used to show the result of typing detection. |
167 bool typing_detected_; | 174 // It can be accessed by the capture audio thread and by the libjingle thread |
| 175 // which calls GetStats(). |
| 176 base::subtle::Atomic32 typing_detected_; |
168 }; | 177 }; |
169 | 178 |
170 } // namespace content | 179 } // namespace content |
171 | 180 |
172 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 181 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
OLD | NEW |