OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" | 5 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "content/renderer/media/media_stream_audio_processor.h" | 8 #include "content/renderer/media/media_stream_audio_processor.h" |
9 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" | |
9 #include "content/renderer/media/webrtc/webrtc_audio_sink_adapter.h" | 10 #include "content/renderer/media/webrtc/webrtc_audio_sink_adapter.h" |
10 #include "content/renderer/media/webrtc_local_audio_track.h" | 11 #include "content/renderer/media/webrtc_local_audio_track.h" |
12 #include "content/renderer/render_thread_impl.h" | |
11 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" | 13 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" |
12 | 14 |
13 namespace content { | 15 namespace content { |
14 | 16 |
15 static const char kAudioTrackKind[] = "audio"; | 17 static const char kAudioTrackKind[] = "audio"; |
16 | 18 |
17 scoped_refptr<WebRtcLocalAudioTrackAdapter> | 19 scoped_refptr<WebRtcLocalAudioTrackAdapter> |
18 WebRtcLocalAudioTrackAdapter::Create( | 20 WebRtcLocalAudioTrackAdapter::Create( |
19 const std::string& label, | 21 const std::string& label, |
20 webrtc::AudioSourceInterface* track_source) { | 22 webrtc::AudioSourceInterface* track_source) { |
23 scoped_refptr<base::MessageLoopProxy> signaling_thread; | |
24 RenderThreadImpl* current = RenderThreadImpl::current(); | |
25 if (current) { | |
26 PeerConnectionDependencyFactory* pc_factory = | |
27 current->GetPeerConnectionDependencyFactory(); | |
28 signaling_thread = pc_factory->GetWebRtcSignalingThread(); | |
no longer working on chromium
2014/10/30 11:55:14
WebRtcLocalAudioTrackAdapter is created by PeerCon
tommi (sloooow) - chröme
2014/10/30 20:37:36
Yes, I plan to do that and added a TODO to do that
| |
29 } | |
30 | |
31 LOG_IF(ERROR, !signaling_thread.get()) << "No signaling thread!"; | |
32 | |
21 rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>* adapter = | 33 rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>* adapter = |
22 new rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>( | 34 new rtc::RefCountedObject<WebRtcLocalAudioTrackAdapter>( |
23 label, track_source); | 35 label, track_source, signaling_thread); |
24 return adapter; | 36 return adapter; |
25 } | 37 } |
26 | 38 |
27 WebRtcLocalAudioTrackAdapter::WebRtcLocalAudioTrackAdapter( | 39 WebRtcLocalAudioTrackAdapter::WebRtcLocalAudioTrackAdapter( |
28 const std::string& label, | 40 const std::string& label, |
29 webrtc::AudioSourceInterface* track_source) | 41 webrtc::AudioSourceInterface* track_source, |
42 const scoped_refptr<base::SingleThreadTaskRunner>& signaling_thread) | |
30 : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label), | 43 : webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>(label), |
31 owner_(NULL), | 44 owner_(NULL), |
32 track_source_(track_source), | 45 track_source_(track_source), |
46 signaling_thread_(signaling_thread), | |
33 signal_level_(0) { | 47 signal_level_(0) { |
34 signaling_thread_.DetachFromThread(); | 48 signaling_thread_checker_.DetachFromThread(); |
35 capture_thread_.DetachFromThread(); | 49 capture_thread_.DetachFromThread(); |
36 } | 50 } |
37 | 51 |
38 WebRtcLocalAudioTrackAdapter::~WebRtcLocalAudioTrackAdapter() { | 52 WebRtcLocalAudioTrackAdapter::~WebRtcLocalAudioTrackAdapter() { |
39 } | 53 } |
40 | 54 |
41 void WebRtcLocalAudioTrackAdapter::Initialize(WebRtcLocalAudioTrack* owner) { | 55 void WebRtcLocalAudioTrackAdapter::Initialize(WebRtcLocalAudioTrack* owner) { |
42 DCHECK(!owner_); | 56 DCHECK(!owner_); |
43 DCHECK(owner); | 57 DCHECK(owner); |
44 owner_ = owner; | 58 owner_ = owner; |
45 } | 59 } |
46 | 60 |
47 void WebRtcLocalAudioTrackAdapter::SetAudioProcessor( | 61 void WebRtcLocalAudioTrackAdapter::SetAudioProcessor( |
48 const scoped_refptr<MediaStreamAudioProcessor>& processor) { | 62 const scoped_refptr<MediaStreamAudioProcessor>& processor) { |
49 // SetAudioProcessor will be called when a new capture thread has been | 63 // SetAudioProcessor will be called when a new capture thread has been |
50 // initialized, so we need to detach from any current capture thread we're | 64 // initialized, so we need to detach from any current capture thread we're |
51 // checking and attach to the current one. | 65 // checking and attach to the current one. |
52 capture_thread_.DetachFromThread(); | 66 capture_thread_.DetachFromThread(); |
53 DCHECK(capture_thread_.CalledOnValidThread()); | 67 DCHECK(capture_thread_.CalledOnValidThread()); |
54 base::AutoLock auto_lock(lock_); | 68 base::AutoLock auto_lock(lock_); |
55 audio_processor_ = processor; | 69 audio_processor_ = processor; |
56 } | 70 } |
57 | 71 |
58 std::string WebRtcLocalAudioTrackAdapter::kind() const { | 72 std::string WebRtcLocalAudioTrackAdapter::kind() const { |
59 return kAudioTrackKind; | 73 return kAudioTrackKind; |
60 } | 74 } |
61 | 75 |
76 bool WebRtcLocalAudioTrackAdapter::set_enabled(bool enable) { | |
77 // If we're not called on the signaling thread, we need to post a task to | |
78 // change the state on the correct thread. | |
79 bool ret = true; | |
80 if (signaling_thread_.get() && !signaling_thread_->BelongsToCurrentThread()) { | |
81 signaling_thread_->PostTask(FROM_HERE, | |
82 base::Bind( | |
83 base::IgnoreResult(&WebRtcLocalAudioTrackAdapter::set_enabled), | |
84 this, enable)); | |
85 } else { | |
86 ret = webrtc::MediaStreamTrack<webrtc::AudioTrackInterface>:: | |
87 set_enabled(enable); | |
88 } | |
89 return ret; | |
no longer working on chromium
2014/10/30 11:55:14
how about:
if (signaling_thread_.get() && !signali
tommi (sloooow) - chröme
2014/10/30 20:37:36
Done.
| |
90 } | |
91 | |
62 void WebRtcLocalAudioTrackAdapter::AddSink( | 92 void WebRtcLocalAudioTrackAdapter::AddSink( |
63 webrtc::AudioTrackSinkInterface* sink) { | 93 webrtc::AudioTrackSinkInterface* sink) { |
64 DCHECK(signaling_thread_.CalledOnValidThread()); | 94 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
65 DCHECK(sink); | 95 DCHECK(sink); |
66 #ifndef NDEBUG | 96 #ifndef NDEBUG |
67 // Verify that |sink| has not been added. | 97 // Verify that |sink| has not been added. |
68 for (ScopedVector<WebRtcAudioSinkAdapter>::const_iterator it = | 98 for (ScopedVector<WebRtcAudioSinkAdapter>::const_iterator it = |
69 sink_adapters_.begin(); | 99 sink_adapters_.begin(); |
70 it != sink_adapters_.end(); ++it) { | 100 it != sink_adapters_.end(); ++it) { |
71 DCHECK(!(*it)->IsEqual(sink)); | 101 DCHECK(!(*it)->IsEqual(sink)); |
72 } | 102 } |
73 #endif | 103 #endif |
74 | 104 |
75 scoped_ptr<WebRtcAudioSinkAdapter> adapter( | 105 scoped_ptr<WebRtcAudioSinkAdapter> adapter( |
76 new WebRtcAudioSinkAdapter(sink)); | 106 new WebRtcAudioSinkAdapter(sink)); |
77 owner_->AddSink(adapter.get()); | 107 owner_->AddSink(adapter.get()); |
78 sink_adapters_.push_back(adapter.release()); | 108 sink_adapters_.push_back(adapter.release()); |
79 } | 109 } |
80 | 110 |
81 void WebRtcLocalAudioTrackAdapter::RemoveSink( | 111 void WebRtcLocalAudioTrackAdapter::RemoveSink( |
82 webrtc::AudioTrackSinkInterface* sink) { | 112 webrtc::AudioTrackSinkInterface* sink) { |
83 DCHECK(signaling_thread_.CalledOnValidThread()); | 113 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
84 DCHECK(sink); | 114 DCHECK(sink); |
85 for (ScopedVector<WebRtcAudioSinkAdapter>::iterator it = | 115 for (ScopedVector<WebRtcAudioSinkAdapter>::iterator it = |
86 sink_adapters_.begin(); | 116 sink_adapters_.begin(); |
87 it != sink_adapters_.end(); ++it) { | 117 it != sink_adapters_.end(); ++it) { |
88 if ((*it)->IsEqual(sink)) { | 118 if ((*it)->IsEqual(sink)) { |
89 owner_->RemoveSink(*it); | 119 owner_->RemoveSink(*it); |
90 sink_adapters_.erase(it); | 120 sink_adapters_.erase(it); |
91 return; | 121 return; |
92 } | 122 } |
93 } | 123 } |
94 } | 124 } |
95 | 125 |
96 bool WebRtcLocalAudioTrackAdapter::GetSignalLevel(int* level) { | 126 bool WebRtcLocalAudioTrackAdapter::GetSignalLevel(int* level) { |
97 DCHECK(signaling_thread_.CalledOnValidThread()); | 127 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
98 | 128 |
99 // It is required to provide the signal level after audio processing. In | 129 // It is required to provide the signal level after audio processing. In |
100 // case the audio processing is not enabled for the track, we return | 130 // case the audio processing is not enabled for the track, we return |
101 // false here in order not to overwrite the value from WebRTC. | 131 // false here in order not to overwrite the value from WebRTC. |
102 // TODO(xians): Remove this after we turn on the APM in Chrome by default. | 132 // TODO(xians): Remove this after we turn on the APM in Chrome by default. |
103 // http://crbug/365672 . | 133 // http://crbug/365672 . |
104 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) | 134 if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) |
105 return false; | 135 return false; |
106 | 136 |
107 base::AutoLock auto_lock(lock_); | 137 base::AutoLock auto_lock(lock_); |
108 *level = signal_level_; | 138 *level = signal_level_; |
109 return true; | 139 return true; |
110 } | 140 } |
111 | 141 |
112 rtc::scoped_refptr<webrtc::AudioProcessorInterface> | 142 rtc::scoped_refptr<webrtc::AudioProcessorInterface> |
113 WebRtcLocalAudioTrackAdapter::GetAudioProcessor() { | 143 WebRtcLocalAudioTrackAdapter::GetAudioProcessor() { |
114 DCHECK(signaling_thread_.CalledOnValidThread()); | 144 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
115 base::AutoLock auto_lock(lock_); | 145 base::AutoLock auto_lock(lock_); |
116 return audio_processor_.get(); | 146 return audio_processor_.get(); |
117 } | 147 } |
118 | 148 |
119 std::vector<int> WebRtcLocalAudioTrackAdapter::VoeChannels() const { | 149 std::vector<int> WebRtcLocalAudioTrackAdapter::VoeChannels() const { |
120 DCHECK(capture_thread_.CalledOnValidThread()); | 150 DCHECK(capture_thread_.CalledOnValidThread()); |
121 base::AutoLock auto_lock(lock_); | 151 base::AutoLock auto_lock(lock_); |
122 return voe_channels_; | 152 return voe_channels_; |
123 } | 153 } |
124 | 154 |
125 void WebRtcLocalAudioTrackAdapter::SetSignalLevel(int signal_level) { | 155 void WebRtcLocalAudioTrackAdapter::SetSignalLevel(int signal_level) { |
126 DCHECK(capture_thread_.CalledOnValidThread()); | 156 DCHECK(capture_thread_.CalledOnValidThread()); |
127 base::AutoLock auto_lock(lock_); | 157 base::AutoLock auto_lock(lock_); |
128 signal_level_ = signal_level; | 158 signal_level_ = signal_level; |
129 } | 159 } |
130 | 160 |
131 void WebRtcLocalAudioTrackAdapter::AddChannel(int channel_id) { | 161 void WebRtcLocalAudioTrackAdapter::AddChannel(int channel_id) { |
132 DCHECK(signaling_thread_.CalledOnValidThread()); | 162 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
133 DVLOG(1) << "WebRtcLocalAudioTrack::AddChannel(channel_id=" | 163 DVLOG(1) << "WebRtcLocalAudioTrack::AddChannel(channel_id=" |
134 << channel_id << ")"; | 164 << channel_id << ")"; |
135 base::AutoLock auto_lock(lock_); | 165 base::AutoLock auto_lock(lock_); |
136 if (std::find(voe_channels_.begin(), voe_channels_.end(), channel_id) != | 166 if (std::find(voe_channels_.begin(), voe_channels_.end(), channel_id) != |
137 voe_channels_.end()) { | 167 voe_channels_.end()) { |
138 // We need to handle the case when the same channel is connected to the | 168 // We need to handle the case when the same channel is connected to the |
139 // track more than once. | 169 // track more than once. |
140 return; | 170 return; |
141 } | 171 } |
142 | 172 |
143 voe_channels_.push_back(channel_id); | 173 voe_channels_.push_back(channel_id); |
144 } | 174 } |
145 | 175 |
146 void WebRtcLocalAudioTrackAdapter::RemoveChannel(int channel_id) { | 176 void WebRtcLocalAudioTrackAdapter::RemoveChannel(int channel_id) { |
147 DCHECK(signaling_thread_.CalledOnValidThread()); | 177 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
148 DVLOG(1) << "WebRtcLocalAudioTrack::RemoveChannel(channel_id=" | 178 DVLOG(1) << "WebRtcLocalAudioTrack::RemoveChannel(channel_id=" |
149 << channel_id << ")"; | 179 << channel_id << ")"; |
150 base::AutoLock auto_lock(lock_); | 180 base::AutoLock auto_lock(lock_); |
151 std::vector<int>::iterator iter = | 181 std::vector<int>::iterator iter = |
152 std::find(voe_channels_.begin(), voe_channels_.end(), channel_id); | 182 std::find(voe_channels_.begin(), voe_channels_.end(), channel_id); |
153 DCHECK(iter != voe_channels_.end()); | 183 DCHECK(iter != voe_channels_.end()); |
154 voe_channels_.erase(iter); | 184 voe_channels_.erase(iter); |
155 } | 185 } |
156 | 186 |
157 webrtc::AudioSourceInterface* WebRtcLocalAudioTrackAdapter::GetSource() const { | 187 webrtc::AudioSourceInterface* WebRtcLocalAudioTrackAdapter::GetSource() const { |
158 DCHECK(signaling_thread_.CalledOnValidThread()); | 188 DCHECK(signaling_thread_checker_.CalledOnValidThread()); |
159 return track_source_; | 189 return track_source_; |
160 } | 190 } |
161 | 191 |
162 cricket::AudioRenderer* WebRtcLocalAudioTrackAdapter::GetRenderer() { | 192 cricket::AudioRenderer* WebRtcLocalAudioTrackAdapter::GetRenderer() { |
163 // When the audio track processing is enabled, return a NULL so that capture | 193 // When the audio track processing is enabled, return a NULL so that capture |
164 // data goes through Libjingle LocalAudioTrackHandler::LocalAudioSinkAdapter | 194 // data goes through Libjingle LocalAudioTrackHandler::LocalAudioSinkAdapter |
165 // ==> WebRtcVoiceMediaChannel::WebRtcVoiceChannelRenderer ==> WebRTC. | 195 // ==> WebRtcVoiceMediaChannel::WebRtcVoiceChannelRenderer ==> WebRTC. |
166 // When the audio track processing is disabled, WebRtcLocalAudioTrackAdapter | 196 // When the audio track processing is disabled, WebRtcLocalAudioTrackAdapter |
167 // is used to pass the channel ids to WebRtcAudioDeviceImpl, the data flow | 197 // is used to pass the channel ids to WebRtcAudioDeviceImpl, the data flow |
168 // becomes WebRtcAudioDeviceImpl ==> WebRTC. | 198 // becomes WebRtcAudioDeviceImpl ==> WebRTC. |
169 // TODO(xians): Only return NULL after the APM in WebRTC is deprecated. | 199 // TODO(xians): Only return NULL after the APM in WebRTC is deprecated. |
170 // See See http://crbug/365672 for details. | 200 // See See http://crbug/365672 for details. |
171 return MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()? | 201 return MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()? |
172 NULL : this; | 202 NULL : this; |
173 } | 203 } |
174 | 204 |
175 } // namespace content | 205 } // namespace content |
OLD | NEW |