OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
7 | 7 |
8 #include "base/atomicops.h" | 8 #include "base/atomicops.h" |
9 #include "base/synchronization/lock.h" | 9 #include "base/synchronization/lock.h" |
10 #include "base/threading/thread_checker.h" | 10 #include "base/threading/thread_checker.h" |
11 #include "base/time/time.h" | 11 #include "base/time/time.h" |
12 #include "content/common/content_export.h" | 12 #include "content/common/content_export.h" |
| 13 #include "content/renderer/media/webrtc_audio_device_impl.h" |
13 #include "media/base/audio_converter.h" | 14 #include "media/base/audio_converter.h" |
14 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h
" | 15 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h
" |
15 #include "third_party/webrtc/modules/interface/module_common_types.h" | 16 #include "third_party/webrtc/modules/interface/module_common_types.h" |
16 | 17 |
17 namespace blink { | 18 namespace blink { |
18 class WebMediaConstraints; | 19 class WebMediaConstraints; |
19 } | 20 } |
20 | 21 |
21 namespace media { | 22 namespace media { |
22 class AudioBus; | 23 class AudioBus; |
23 class AudioFifo; | 24 class AudioFifo; |
24 class AudioParameters; | 25 class AudioParameters; |
25 } // namespace media | 26 } // namespace media |
26 | 27 |
27 namespace webrtc { | 28 namespace webrtc { |
28 class AudioFrame; | 29 class AudioFrame; |
29 } | 30 } |
30 | 31 |
31 namespace content { | 32 namespace content { |
32 | 33 |
33 class RTCMediaConstraints; | 34 class RTCMediaConstraints; |
34 | 35 |
35 // This class owns an object of webrtc::AudioProcessing which contains signal | 36 // This class owns an object of webrtc::AudioProcessing which contains signal |
36 // processing components like AGC, AEC and NS. It enables the components based | 37 // processing components like AGC, AEC and NS. It enables the components based |
37 // on the getUserMedia constraints, processes the data and outputs it in a unit | 38 // on the getUserMedia constraints, processes the data and outputs it in a unit |
38 // of 10 ms data chunk. | 39 // of 10 ms data chunk. |
39 class CONTENT_EXPORT MediaStreamAudioProcessor : | 40 class CONTENT_EXPORT MediaStreamAudioProcessor : |
40 public base::RefCountedThreadSafe<MediaStreamAudioProcessor> { | 41 public base::RefCountedThreadSafe<MediaStreamAudioProcessor>, |
| 42 public WebRtcAudioRendererSource { |
41 public: | 43 public: |
42 MediaStreamAudioProcessor(const media::AudioParameters& source_params, | 44 MediaStreamAudioProcessor(const media::AudioParameters& source_params, |
43 const blink::WebMediaConstraints& constraints, | 45 const blink::WebMediaConstraints& constraints, |
44 int effects); | 46 int effects, |
| 47 WebRtcAudioDeviceImpl* audio_device); |
45 | 48 |
46 // Pushes capture data in |audio_source| to the internal FIFO. | 49 // Pushes capture data in |audio_source| to the internal FIFO. |
47 // Called on the capture audio thread. | 50 // Called on the capture audio thread. |
48 void PushCaptureData(media::AudioBus* audio_source); | 51 void PushCaptureData(media::AudioBus* audio_source); |
49 | 52 |
50 // Push the render audio to webrtc::AudioProcessing for analysis. This is | |
51 // needed iff echo processing is enabled. | |
52 // |render_audio| is the pointer to the render audio data, its format | |
53 // is specified by |sample_rate|, |number_of_channels| and |number_of_frames|. | |
54 // Called on the render audio thread. | |
55 void PushRenderData(const int16* render_audio, | |
56 int sample_rate, | |
57 int number_of_channels, | |
58 int number_of_frames, | |
59 base::TimeDelta render_delay); | |
60 | |
61 // Processes a block of 10 ms data from the internal FIFO and outputs it via | 53 // Processes a block of 10 ms data from the internal FIFO and outputs it via |
62 // |out|. |out| is the address of the pointer that will be pointed to | 54 // |out|. |out| is the address of the pointer that will be pointed to |
63 // the post-processed data if the method is returning a true. The lifetime | 55 // the post-processed data if the method is returning a true. The lifetime |
64 // of the data represeted by |out| is guaranteed to outlive the method call. | 56 // of the data represeted by |out| is guaranteed to outlive the method call. |
65 // That also says *|out| won't change until this method is called again. | 57 // That also says *|out| won't change until this method is called again. |
66 // Returns true if the internal FIFO has at least 10 ms data for processing, | 58 // Returns true if the internal FIFO has at least 10 ms data for processing, |
67 // otherwise false. | 59 // otherwise false. |
68 // |capture_delay|, |volume| and |key_pressed| will be passed to | 60 // |capture_delay|, |volume| and |key_pressed| will be passed to |
69 // webrtc::AudioProcessing to help processing the data. | 61 // webrtc::AudioProcessing to help processing the data. |
70 // Called on the capture audio thread. | 62 // Called on the capture audio thread. |
(...skipping 12 matching lines...) Expand all Loading... |
83 // Accessor to check if the audio processing is enabled or not. | 75 // Accessor to check if the audio processing is enabled or not. |
84 bool has_audio_processing() const { return audio_processing_ != NULL; } | 76 bool has_audio_processing() const { return audio_processing_ != NULL; } |
85 | 77 |
86 protected: | 78 protected: |
87 friend class base::RefCountedThreadSafe<MediaStreamAudioProcessor>; | 79 friend class base::RefCountedThreadSafe<MediaStreamAudioProcessor>; |
88 virtual ~MediaStreamAudioProcessor(); | 80 virtual ~MediaStreamAudioProcessor(); |
89 | 81 |
90 private: | 82 private: |
91 class MediaStreamAudioConverter; | 83 class MediaStreamAudioConverter; |
92 | 84 |
| 85 // WebRtcAudioRendererSource implementation. |
| 86 virtual void RenderData(media::AudioBus* audio_bus, |
| 87 int sample_rate, |
| 88 int audio_delay_milliseconds) OVERRIDE; |
| 89 virtual void RemoveAudioRenderer(WebRtcAudioRenderer* renderer) OVERRIDE; |
| 90 |
93 // Helper to initialize the WebRtc AudioProcessing. | 91 // Helper to initialize the WebRtc AudioProcessing. |
94 void InitializeAudioProcessingModule( | 92 void InitializeAudioProcessingModule( |
95 const blink::WebMediaConstraints& constraints, int effects); | 93 const blink::WebMediaConstraints& constraints, int effects); |
96 | 94 |
97 // Helper to initialize the capture converter. | 95 // Helper to initialize the capture converter. |
98 void InitializeCaptureConverter(const media::AudioParameters& source_params); | 96 void InitializeCaptureConverter(const media::AudioParameters& source_params); |
99 | 97 |
100 // Helper to initialize the render converter. | 98 // Helper to initialize the render converter. |
101 void InitializeRenderConverterIfNeeded(int sample_rate, | 99 void InitializeRenderConverterIfNeeded(int sample_rate, |
102 int number_of_channels, | 100 int number_of_channels, |
(...skipping 25 matching lines...) Expand all Loading... |
128 // Converter used for the down-mixing and resampling of the render data when | 126 // Converter used for the down-mixing and resampling of the render data when |
129 // the AEC is enabled. | 127 // the AEC is enabled. |
130 scoped_ptr<MediaStreamAudioConverter> render_converter_; | 128 scoped_ptr<MediaStreamAudioConverter> render_converter_; |
131 | 129 |
132 // AudioFrame used to hold the output of |render_converter_|. | 130 // AudioFrame used to hold the output of |render_converter_|. |
133 webrtc::AudioFrame render_frame_; | 131 webrtc::AudioFrame render_frame_; |
134 | 132 |
135 // Data bus to help converting interleaved data to an AudioBus. | 133 // Data bus to help converting interleaved data to an AudioBus. |
136 scoped_ptr<media::AudioBus> render_data_bus_; | 134 scoped_ptr<media::AudioBus> render_data_bus_; |
137 | 135 |
138 // Used to DCHECK that some methods are called on the main render thread. | 136 // Raw pointer to the WebRtcAudioDeviceImpl, which is valid for the lifetime |
139 base::ThreadChecker main_thread_checker_; | 137 // of RenderThread. |
| 138 WebRtcAudioDeviceImpl* audio_device_; |
140 | 139 |
141 // Used to DCHECK that some methods are called on the capture audio thread. | 140 // Used to DCHECK that some methods are called on the capture audio thread. |
142 base::ThreadChecker capture_thread_checker_; | 141 base::ThreadChecker capture_thread_checker_; |
143 | 142 |
144 // Used to DCHECK that PushRenderData() is called on the render audio thread. | 143 // Used to DCHECK that PushRenderData() is called on the render audio thread. |
145 base::ThreadChecker render_thread_checker_; | 144 base::ThreadChecker render_thread_checker_; |
146 }; | 145 }; |
147 | 146 |
148 } // namespace content | 147 } // namespace content |
149 | 148 |
150 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ | 149 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_ |
OLD | NEW |