Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(226)

Side by Side Diff: content/renderer/media/media_stream_impl.cc

Issue 294043015: Move creation of MediaStream renders from MediaStreamImpl to MediaStreamRenderFactory (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Addressed commments. Fixed build when enable_webrtc = 0 Created 6 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/media_stream_impl.h" 5 #include "content/renderer/media/media_stream_impl.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/logging.h" 9 #include "base/logging.h"
10 #include "base/strings/string_util.h" 10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h" 11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h" 12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream.h" 13 #include "content/renderer/media/media_stream.h"
14 #include "content/renderer/media/media_stream_audio_renderer.h"
15 #include "content/renderer/media/media_stream_audio_source.h" 14 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_dispatcher.h" 15 #include "content/renderer/media/media_stream_dispatcher.h"
17 #include "content/renderer/media/media_stream_video_capturer_source.h" 16 #include "content/renderer/media/media_stream_video_capturer_source.h"
18 #include "content/renderer/media/media_stream_video_track.h" 17 #include "content/renderer/media/media_stream_video_track.h"
19 #include "content/renderer/media/peer_connection_tracker.h" 18 #include "content/renderer/media/peer_connection_tracker.h"
20 #include "content/renderer/media/rtc_video_renderer.h"
21 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" 19 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
22 #include "content/renderer/media/webrtc_audio_capturer.h" 20 #include "content/renderer/media/webrtc_audio_capturer.h"
23 #include "content/renderer/media/webrtc_audio_renderer.h"
24 #include "content/renderer/media/webrtc_local_audio_renderer.h"
25 #include "content/renderer/media/webrtc_logging.h" 21 #include "content/renderer/media/webrtc_logging.h"
26 #include "content/renderer/media/webrtc_uma_histograms.h" 22 #include "content/renderer/media/webrtc_uma_histograms.h"
27 #include "content/renderer/render_thread_impl.h" 23 #include "content/renderer/render_thread_impl.h"
28 #include "media/base/audio_hardware_config.h"
29 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" 24 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
30 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" 25 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
31 #include "third_party/WebKit/public/web/WebDocument.h" 26 #include "third_party/WebKit/public/web/WebDocument.h"
32 #include "third_party/WebKit/public/web/WebLocalFrame.h" 27 #include "third_party/WebKit/public/web/WebLocalFrame.h"
33 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
34 28
35 namespace content { 29 namespace content {
36 namespace { 30 namespace {
37 31
38 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints, 32 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
39 StreamOptions::Constraints* mandatory, 33 StreamOptions::Constraints* mandatory,
40 StreamOptions::Constraints* optional) { 34 StreamOptions::Constraints* optional) {
41 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints; 35 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
42 constraints.getMandatoryConstraints(mandatory_constraints); 36 constraints.getMandatoryConstraints(mandatory_constraints);
43 for (size_t i = 0; i < mandatory_constraints.size(); i++) { 37 for (size_t i = 0; i < mandatory_constraints.size(); i++) {
44 mandatory->push_back(StreamOptions::Constraint( 38 mandatory->push_back(StreamOptions::Constraint(
45 mandatory_constraints[i].m_name.utf8(), 39 mandatory_constraints[i].m_name.utf8(),
46 mandatory_constraints[i].m_value.utf8())); 40 mandatory_constraints[i].m_value.utf8()));
47 } 41 }
48 42
49 blink::WebVector<blink::WebMediaConstraint> optional_constraints; 43 blink::WebVector<blink::WebMediaConstraint> optional_constraints;
50 constraints.getOptionalConstraints(optional_constraints); 44 constraints.getOptionalConstraints(optional_constraints);
51 for (size_t i = 0; i < optional_constraints.size(); i++) { 45 for (size_t i = 0; i < optional_constraints.size(); i++) {
52 optional->push_back(StreamOptions::Constraint( 46 optional->push_back(StreamOptions::Constraint(
53 optional_constraints[i].m_name.utf8(), 47 optional_constraints[i].m_name.utf8(),
54 optional_constraints[i].m_value.utf8())); 48 optional_constraints[i].m_value.utf8()));
55 } 49 }
56 } 50 }
57 51
58 static int g_next_request_id = 0; 52 static int g_next_request_id = 0;
59 53
60 void GetDefaultOutputDeviceParams(
61 int* output_sample_rate, int* output_buffer_size) {
62 // Fetch the default audio output hardware config.
63 media::AudioHardwareConfig* hardware_config =
64 RenderThreadImpl::current()->GetAudioHardwareConfig();
65 *output_sample_rate = hardware_config->GetOutputSampleRate();
66 *output_buffer_size = hardware_config->GetOutputBufferSize();
67 }
68
69 } // namespace 54 } // namespace
70 55
71 MediaStreamImpl::MediaStreamImpl( 56 MediaStreamImpl::MediaStreamImpl(
72 RenderView* render_view, 57 RenderView* render_view,
73 MediaStreamDispatcher* media_stream_dispatcher, 58 MediaStreamDispatcher* media_stream_dispatcher,
74 PeerConnectionDependencyFactory* dependency_factory) 59 PeerConnectionDependencyFactory* dependency_factory)
75 : RenderViewObserver(render_view), 60 : RenderViewObserver(render_view),
76 dependency_factory_(dependency_factory), 61 dependency_factory_(dependency_factory),
77 media_stream_dispatcher_(media_stream_dispatcher) { 62 media_stream_dispatcher_(media_stream_dispatcher) {
78 } 63 }
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
178 DCHECK(CalledOnValidThread()); 163 DCHECK(CalledOnValidThread());
179 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request); 164 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
180 if (request) { 165 if (request) {
181 // We can't abort the stream generation process. 166 // We can't abort the stream generation process.
182 // Instead, erase the request. Once the stream is generated we will stop the 167 // Instead, erase the request. Once the stream is generated we will stop the
183 // stream if the request does not exist. 168 // stream if the request does not exist.
184 DeleteUserMediaRequestInfo(request); 169 DeleteUserMediaRequestInfo(request);
185 } 170 }
186 } 171 }
187 172
188 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
189 const GURL& url) {
190 return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
191 }
192
193 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
194 blink::WebMediaStream web_stream(
195 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
196
197 return (!web_stream.isNull() &&
198 (MediaStream::GetMediaStream(web_stream) != NULL));
199 }
200
201 scoped_refptr<VideoFrameProvider>
202 MediaStreamImpl::GetVideoFrameProvider(
203 const GURL& url,
204 const base::Closure& error_cb,
205 const VideoFrameProvider::RepaintCB& repaint_cb) {
206 DCHECK(CalledOnValidThread());
207 blink::WebMediaStream web_stream(GetMediaStream(url));
208
209 if (web_stream.isNull() || !web_stream.extraData())
210 return NULL; // This is not a valid stream.
211
212 DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
213 << base::UTF16ToUTF8(web_stream.id());
214
215 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
216 web_stream.videoTracks(video_tracks);
217 if (video_tracks.isEmpty() ||
218 !MediaStreamVideoTrack::GetTrack(video_tracks[0])) {
219 return NULL;
220 }
221
222 return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
223 }
224
225 scoped_refptr<MediaStreamAudioRenderer>
226 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) {
227 DCHECK(CalledOnValidThread());
228 blink::WebMediaStream web_stream(GetMediaStream(url));
229
230 if (web_stream.isNull() || !web_stream.extraData())
231 return NULL; // This is not a valid stream.
232
233 DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
234 << base::UTF16ToUTF8(web_stream.id());
235
236 MediaStream* native_stream = MediaStream::GetMediaStream(web_stream);
237
238 // TODO(tommi): MediaStreams do not have a 'local or not' concept.
239 // Tracks _might_, but even so, we need to fix the data flow so that
240 // it works the same way for all track implementations, local, remote or what
241 // have you.
242 // In this function, we should simply create a renderer object that receives
243 // and mixes audio from all the tracks that belong to the media stream.
244 // We need to remove the |is_local| property from MediaStreamExtraData since
245 // this concept is peerconnection specific (is a previously recorded stream
246 // local or remote?).
247 if (native_stream->is_local()) {
248 // Create the local audio renderer if the stream contains audio tracks.
249 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
250 web_stream.audioTracks(audio_tracks);
251 if (audio_tracks.isEmpty())
252 return NULL;
253
254 // TODO(xians): Add support for the case where the media stream contains
255 // multiple audio tracks.
256 return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
257 }
258
259 webrtc::MediaStreamInterface* stream =
260 MediaStream::GetAdapter(web_stream);
261 if (stream->GetAudioTracks().empty())
262 return NULL;
263
264 // This is a remote WebRTC media stream.
265 WebRtcAudioDeviceImpl* audio_device =
266 dependency_factory_->GetWebRtcAudioDevice();
267
268 // Share the existing renderer if any, otherwise create a new one.
269 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
270 if (!renderer.get()) {
271 renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
272
273 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
274 renderer = NULL;
275 }
276
277 return renderer.get() ?
278 renderer->CreateSharedAudioRendererProxy(stream) : NULL;
279 }
280
281 // Callback from MediaStreamDispatcher. 173 // Callback from MediaStreamDispatcher.
282 // The requested stream have been generated by the MediaStreamDispatcher. 174 // The requested stream have been generated by the MediaStreamDispatcher.
283 void MediaStreamImpl::OnStreamGenerated( 175 void MediaStreamImpl::OnStreamGenerated(
284 int request_id, 176 int request_id,
285 const std::string& label, 177 const std::string& label,
286 const StreamDeviceInfoArray& audio_array, 178 const StreamDeviceInfoArray& audio_array,
287 const StreamDeviceInfoArray& video_array) { 179 const StreamDeviceInfoArray& video_array) {
288 DCHECK(CalledOnValidThread()); 180 DCHECK(CalledOnValidThread());
289 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label; 181 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
290 182
(...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after
711 DVLOG(1) << "MediaStreamImpl::StopLocalSource(" 603 DVLOG(1) << "MediaStreamImpl::StopLocalSource("
712 << "{device_id = " << source_impl->device_info().device.id << "})"; 604 << "{device_id = " << source_impl->device_info().device.id << "})";
713 605
714 if (notify_dispatcher) 606 if (notify_dispatcher)
715 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); 607 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
716 608
717 source_impl->ResetSourceStoppedCallback(); 609 source_impl->ResetSourceStoppedCallback();
718 source_impl->StopSource(); 610 source_impl->StopSource();
719 } 611 }
720 612
721 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
722 webrtc::MediaStreamInterface* stream,
723 int render_frame_id) {
724 if (stream->GetAudioTracks().empty())
725 return NULL;
726
727 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
728 << stream->label();
729
730 // TODO(tommi): Change the default value of session_id to be
731 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc.
732 int session_id = 0, sample_rate = 0, buffer_size = 0;
733 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
734 &sample_rate,
735 &buffer_size)) {
736 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
737 }
738
739 return new WebRtcAudioRenderer(
740 stream, RenderViewObserver::routing_id(), render_frame_id, session_id,
741 sample_rate, buffer_size);
742 }
743
744 scoped_refptr<WebRtcLocalAudioRenderer>
745 MediaStreamImpl::CreateLocalAudioRenderer(
746 const blink::WebMediaStreamTrack& audio_track,
747 int render_frame_id) {
748 DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
749
750 int session_id = 0, sample_rate = 0, buffer_size = 0;
751 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
752 &sample_rate,
753 &buffer_size)) {
754 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
755 }
756
757 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
758 // existing WebRtcAudioCapturer so that the renderer can use it as source.
759 return new WebRtcLocalAudioRenderer(
760 audio_track,
761 RenderViewObserver::routing_id(),
762 render_frame_id,
763 session_id,
764 buffer_size);
765 }
766
767 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
768 int* session_id,
769 int* output_sample_rate,
770 int* output_frames_per_buffer) {
771 DCHECK(CalledOnValidThread());
772 WebRtcAudioDeviceImpl* audio_device =
773 dependency_factory_->GetWebRtcAudioDevice();
774 if (!audio_device)
775 return false;
776
777 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
778 session_id, output_sample_rate, output_frames_per_buffer);
779 }
780
781 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo( 613 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
782 int request_id, 614 int request_id,
783 blink::WebFrame* frame, 615 blink::WebFrame* frame,
784 const blink::WebUserMediaRequest& request, 616 const blink::WebUserMediaRequest& request,
785 bool enable_automatic_output_device_selection) 617 bool enable_automatic_output_device_selection)
786 : request_id(request_id), 618 : request_id(request_id),
787 generated(false), 619 generated(false),
788 enable_automatic_output_device_selection( 620 enable_automatic_output_device_selection(
789 enable_automatic_output_device_selection), 621 enable_automatic_output_device_selection),
790 frame(frame), 622 frame(frame),
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
877 sources_.begin(); 709 sources_.begin();
878 it != sources_.end(); ++it) { 710 it != sources_.end(); ++it) {
879 if (source.id() == it->id()) { 711 if (source.id() == it->id()) {
880 sources_.erase(it); 712 sources_.erase(it);
881 return; 713 return;
882 } 714 }
883 } 715 }
884 } 716 }
885 717
886 } // namespace content 718 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/media_stream_impl.h ('k') | content/renderer/media/media_stream_renderer_factory.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698