| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/media_stream_impl.h" | 5 #include "media_stream_renderer_factory.h" |
| 6 | 6 |
| 7 #include <utility> | |
| 8 | |
| 9 #include "base/logging.h" | |
| 10 #include "base/strings/string_util.h" | |
| 11 #include "base/strings/stringprintf.h" | |
| 12 #include "base/strings/utf_string_conversions.h" | 7 #include "base/strings/utf_string_conversions.h" |
| 13 #include "content/renderer/media/media_stream.h" | 8 #include "content/renderer/media/media_stream.h" |
| 14 #include "content/renderer/media/media_stream_audio_renderer.h" | |
| 15 #include "content/renderer/media/media_stream_audio_source.h" | |
| 16 #include "content/renderer/media/media_stream_dispatcher.h" | |
| 17 #include "content/renderer/media/media_stream_video_capturer_source.h" | |
| 18 #include "content/renderer/media/media_stream_video_track.h" | 9 #include "content/renderer/media/media_stream_video_track.h" |
| 19 #include "content/renderer/media/peer_connection_tracker.h" | |
| 20 #include "content/renderer/media/rtc_video_renderer.h" | 10 #include "content/renderer/media/rtc_video_renderer.h" |
| 21 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 11 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h" |
| 22 #include "content/renderer/media/webrtc_audio_capturer.h" | |
| 23 #include "content/renderer/media/webrtc_audio_renderer.h" | 12 #include "content/renderer/media/webrtc_audio_renderer.h" |
| 24 #include "content/renderer/media/webrtc_local_audio_renderer.h" | 13 #include "content/renderer/media/webrtc_local_audio_renderer.h" |
| 25 #include "content/renderer/media/webrtc_logging.h" | |
| 26 #include "content/renderer/media/webrtc_uma_histograms.h" | |
| 27 #include "content/renderer/render_thread_impl.h" | 14 #include "content/renderer/render_thread_impl.h" |
| 28 #include "media/base/audio_hardware_config.h" | 15 #include "media/base/audio_hardware_config.h" |
| 29 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | 16 #include "third_party/WebKit/public/platform/WebMediaStream.h" |
| 30 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | 17 #include "third_party/WebKit/public/platform/WebURL.h" |
| 31 #include "third_party/WebKit/public/web/WebDocument.h" | |
| 32 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
| 33 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h" | 18 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h" |
| 19 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h" |
| 34 | 20 |
| 35 namespace content { | 21 namespace content { |
| 22 |
| 36 namespace { | 23 namespace { |
| 37 | 24 |
| 38 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints, | 25 PeerConnectionDependencyFactory* GetPeerConnectionDependencyFactory() { |
| 39 StreamOptions::Constraints* mandatory, | 26 return RenderThreadImpl::current()->GetPeerConnectionDependencyFactory(); |
| 40 StreamOptions::Constraints* optional) { | |
| 41 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints; | |
| 42 constraints.getMandatoryConstraints(mandatory_constraints); | |
| 43 for (size_t i = 0; i < mandatory_constraints.size(); i++) { | |
| 44 mandatory->push_back(StreamOptions::Constraint( | |
| 45 mandatory_constraints[i].m_name.utf8(), | |
| 46 mandatory_constraints[i].m_value.utf8())); | |
| 47 } | |
| 48 | |
| 49 blink::WebVector<blink::WebMediaConstraint> optional_constraints; | |
| 50 constraints.getOptionalConstraints(optional_constraints); | |
| 51 for (size_t i = 0; i < optional_constraints.size(); i++) { | |
| 52 optional->push_back(StreamOptions::Constraint( | |
| 53 optional_constraints[i].m_name.utf8(), | |
| 54 optional_constraints[i].m_value.utf8())); | |
| 55 } | |
| 56 } | 27 } |
| 57 | 28 |
| 58 static int g_next_request_id = 0; | |
| 59 | |
| 60 void GetDefaultOutputDeviceParams( | 29 void GetDefaultOutputDeviceParams( |
| 61 int* output_sample_rate, int* output_buffer_size) { | 30 int* output_sample_rate, int* output_buffer_size) { |
| 62 // Fetch the default audio output hardware config. | 31 // Fetch the default audio output hardware config. |
| 63 media::AudioHardwareConfig* hardware_config = | 32 media::AudioHardwareConfig* hardware_config = |
| 64 RenderThreadImpl::current()->GetAudioHardwareConfig(); | 33 RenderThreadImpl::current()->GetAudioHardwareConfig(); |
| 65 *output_sample_rate = hardware_config->GetOutputSampleRate(); | 34 *output_sample_rate = hardware_config->GetOutputSampleRate(); |
| 66 *output_buffer_size = hardware_config->GetOutputBufferSize(); | 35 *output_buffer_size = hardware_config->GetOutputBufferSize(); |
| 67 } | 36 } |
| 68 | 37 |
| 38 |
| 39 // Returns a valid session id if a single capture device is currently open |
| 40 // (and then the matching session_id), otherwise -1. |
| 41 // This is used to pass on a session id to a webrtc audio renderer (either |
| 42 // local or remote), so that audio will be rendered to a matching output |
| 43 // device, should one exist. |
| 44 // Note that if there are more than one open capture devices the function |
| 45 // will not be able to pick an appropriate device and return false. |
| 46 bool GetAuthorizedDeviceInfoForAudioRenderer( |
| 47 int* session_id, |
| 48 int* output_sample_rate, |
| 49 int* output_frames_per_buffer) { |
| 50 WebRtcAudioDeviceImpl* audio_device = |
| 51 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice(); |
| 52 if (!audio_device) |
| 53 return false; |
| 54 |
| 55 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer( |
| 56 session_id, output_sample_rate, output_frames_per_buffer); |
| 57 } |
| 58 |
| 59 scoped_refptr<WebRtcAudioRenderer> CreateRemoteAudioRenderer( |
| 60 webrtc::MediaStreamInterface* stream, |
| 61 int routing_id, |
| 62 int render_frame_id) { |
| 63 if (stream->GetAudioTracks().empty()) |
| 64 return NULL; |
| 65 |
| 66 DVLOG(1) << "MediaStreamRendererFactoryImpl::CreateRemoteAudioRenderer label:" |
| 67 << stream->label(); |
| 68 |
| 69 // TODO(tommi): Change the default value of session_id to be |
| 70 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc. |
| 71 int session_id = 0, sample_rate = 0, buffer_size = 0; |
| 72 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id, |
| 73 &sample_rate, |
| 74 &buffer_size)) { |
| 75 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size); |
| 76 } |
| 77 |
| 78 return new WebRtcAudioRenderer( |
| 79 stream, routing_id, render_frame_id, session_id, |
| 80 sample_rate, buffer_size); |
| 81 } |
| 82 |
| 83 |
| 84 scoped_refptr<WebRtcLocalAudioRenderer> CreateLocalAudioRenderer( |
| 85 const blink::WebMediaStreamTrack& audio_track, |
| 86 int routing_id, |
| 87 int render_frame_id) { |
| 88 DVLOG(1) << "MediaStreamRendererFactoryImpl::CreateLocalAudioRenderer"; |
| 89 |
| 90 int session_id = 0, sample_rate = 0, buffer_size = 0; |
| 91 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id, |
| 92 &sample_rate, |
| 93 &buffer_size)) { |
| 94 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size); |
| 95 } |
| 96 |
| 97 // Create a new WebRtcLocalAudioRenderer instance and connect it to the |
| 98 // existing WebRtcAudioCapturer so that the renderer can use it as source. |
| 99 return new WebRtcLocalAudioRenderer( |
| 100 audio_track, |
| 101 routing_id, |
| 102 render_frame_id, |
| 103 session_id, |
| 104 buffer_size); |
| 105 } |
| 106 |
| 69 } // namespace | 107 } // namespace |
| 70 | 108 |
| 71 MediaStreamImpl::MediaStreamImpl( | |
| 72 RenderView* render_view, | |
| 73 MediaStreamDispatcher* media_stream_dispatcher, | |
| 74 PeerConnectionDependencyFactory* dependency_factory) | |
| 75 : RenderViewObserver(render_view), | |
| 76 dependency_factory_(dependency_factory), | |
| 77 media_stream_dispatcher_(media_stream_dispatcher) { | |
| 78 } | |
| 79 | |
| 80 MediaStreamImpl::~MediaStreamImpl() { | |
| 81 } | |
| 82 | |
| 83 void MediaStreamImpl::requestUserMedia( | |
| 84 const blink::WebUserMediaRequest& user_media_request) { | |
| 85 // Save histogram data so we can see how much GetUserMedia is used. | |
| 86 // The histogram counts the number of calls to the JS API | |
| 87 // webGetUserMedia. | |
| 88 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA); | |
| 89 DCHECK(CalledOnValidThread()); | |
| 90 | |
| 91 if (RenderThreadImpl::current()) { | |
| 92 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia( | |
| 93 user_media_request); | |
| 94 } | |
| 95 | |
| 96 int request_id = g_next_request_id++; | |
| 97 StreamOptions options; | |
| 98 blink::WebLocalFrame* frame = NULL; | |
| 99 GURL security_origin; | |
| 100 bool enable_automatic_output_device_selection = false; | |
| 101 | |
| 102 // |user_media_request| can't be mocked. So in order to test at all we check | |
| 103 // if it isNull. | |
| 104 if (user_media_request.isNull()) { | |
| 105 // We are in a test. | |
| 106 options.audio_requested = true; | |
| 107 options.video_requested = true; | |
| 108 } else { | |
| 109 if (user_media_request.audio()) { | |
| 110 options.audio_requested = true; | |
| 111 CopyStreamConstraints(user_media_request.audioConstraints(), | |
| 112 &options.mandatory_audio, | |
| 113 &options.optional_audio); | |
| 114 | |
| 115 // Check if this input device should be used to select a matching output | |
| 116 // device for audio rendering. | |
| 117 std::string enable; | |
| 118 if (options.GetFirstAudioConstraintByName( | |
| 119 kMediaStreamRenderToAssociatedSink, &enable, NULL) && | |
| 120 LowerCaseEqualsASCII(enable, "true")) { | |
| 121 enable_automatic_output_device_selection = true; | |
| 122 } | |
| 123 } | |
| 124 if (user_media_request.video()) { | |
| 125 options.video_requested = true; | |
| 126 CopyStreamConstraints(user_media_request.videoConstraints(), | |
| 127 &options.mandatory_video, | |
| 128 &options.optional_video); | |
| 129 } | |
| 130 | |
| 131 security_origin = GURL(user_media_request.securityOrigin().toString()); | |
| 132 // Get the WebFrame that requested a MediaStream. | |
| 133 // The frame is needed to tell the MediaStreamDispatcher when a stream goes | |
| 134 // out of scope. | |
| 135 frame = user_media_request.ownerDocument().frame(); | |
| 136 DCHECK(frame); | |
| 137 } | |
| 138 | |
| 139 DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ " | |
| 140 << "audio=" << (options.audio_requested) | |
| 141 << " select associated sink: " | |
| 142 << enable_automatic_output_device_selection | |
| 143 << ", video=" << (options.video_requested) << " ], " | |
| 144 << security_origin.spec() << ")"; | |
| 145 | |
| 146 std::string audio_device_id; | |
| 147 bool mandatory_audio; | |
| 148 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId, | |
| 149 &audio_device_id, &mandatory_audio); | |
| 150 std::string video_device_id; | |
| 151 bool mandatory_video; | |
| 152 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId, | |
| 153 &video_device_id, &mandatory_video); | |
| 154 | |
| 155 WebRtcLogMessage(base::StringPrintf( | |
| 156 "MSI::requestUserMedia. request_id=%d" | |
| 157 ", audio source id=%s mandatory= %s " | |
| 158 ", video source id=%s mandatory= %s", | |
| 159 request_id, | |
| 160 audio_device_id.c_str(), | |
| 161 mandatory_audio ? "true":"false", | |
| 162 video_device_id.c_str(), | |
| 163 mandatory_video ? "true":"false")); | |
| 164 | |
| 165 user_media_requests_.push_back( | |
| 166 new UserMediaRequestInfo(request_id, frame, user_media_request, | |
| 167 enable_automatic_output_device_selection)); | |
| 168 | |
| 169 media_stream_dispatcher_->GenerateStream( | |
| 170 request_id, | |
| 171 AsWeakPtr(), | |
| 172 options, | |
| 173 security_origin); | |
| 174 } | |
| 175 | |
| 176 void MediaStreamImpl::cancelUserMediaRequest( | |
| 177 const blink::WebUserMediaRequest& user_media_request) { | |
| 178 DCHECK(CalledOnValidThread()); | |
| 179 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request); | |
| 180 if (request) { | |
| 181 // We can't abort the stream generation process. | |
| 182 // Instead, erase the request. Once the stream is generated we will stop the | |
| 183 // stream if the request does not exist. | |
| 184 DeleteUserMediaRequestInfo(request); | |
| 185 } | |
| 186 } | |
| 187 | |
| 188 blink::WebMediaStream MediaStreamImpl::GetMediaStream( | |
| 189 const GURL& url) { | |
| 190 return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url); | |
| 191 } | |
| 192 | |
| 193 bool MediaStreamImpl::IsMediaStream(const GURL& url) { | |
| 194 blink::WebMediaStream web_stream( | |
| 195 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url)); | |
| 196 | |
| 197 return (!web_stream.isNull() && | |
| 198 (MediaStream::GetMediaStream(web_stream) != NULL)); | |
| 199 } | |
| 200 | |
| 201 scoped_refptr<VideoFrameProvider> | 109 scoped_refptr<VideoFrameProvider> |
| 202 MediaStreamImpl::GetVideoFrameProvider( | 110 MediaStreamRendererFactory::GetVideoFrameProvider( |
| 203 const GURL& url, | 111 const GURL& url, |
| 204 const base::Closure& error_cb, | 112 const base::Closure& error_cb, |
| 205 const VideoFrameProvider::RepaintCB& repaint_cb) { | 113 const VideoFrameProvider::RepaintCB& repaint_cb) { |
| 206 DCHECK(CalledOnValidThread()); | 114 blink::WebMediaStream web_stream = |
| 207 blink::WebMediaStream web_stream(GetMediaStream(url)); | 115 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url); |
| 116 DCHECK(!web_stream.isNull()); |
| 208 | 117 |
| 209 if (web_stream.isNull() || !web_stream.extraData()) | 118 DVLOG(1) << "MediaStreamRendererFactoryImpl::GetVideoFrameProvider stream:" |
| 210 return NULL; // This is not a valid stream. | |
| 211 | |
| 212 DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:" | |
| 213 << base::UTF16ToUTF8(web_stream.id()); | 119 << base::UTF16ToUTF8(web_stream.id()); |
| 214 | 120 |
| 215 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; | 121 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
| 216 web_stream.videoTracks(video_tracks); | 122 web_stream.videoTracks(video_tracks); |
| 217 if (video_tracks.isEmpty() || | 123 if (video_tracks.isEmpty() || |
| 218 !MediaStreamVideoTrack::GetTrack(video_tracks[0])) { | 124 !MediaStreamVideoTrack::GetTrack(video_tracks[0])) { |
| 219 return NULL; | 125 return NULL; |
| 220 } | 126 } |
| 221 | 127 |
| 222 return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb); | 128 return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb); |
| 223 } | 129 } |
| 224 | 130 |
| 225 scoped_refptr<MediaStreamAudioRenderer> | 131 scoped_refptr<MediaStreamAudioRenderer> |
| 226 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) { | 132 MediaStreamRendererFactory::GetAudioRenderer( |
| 227 DCHECK(CalledOnValidThread()); | 133 const GURL& url, int render_view_id, int render_frame_id) { |
| 228 blink::WebMediaStream web_stream(GetMediaStream(url)); | 134 blink::WebMediaStream web_stream = |
| 135 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url); |
| 229 | 136 |
| 230 if (web_stream.isNull() || !web_stream.extraData()) | 137 if (web_stream.isNull() || !web_stream.extraData()) |
| 231 return NULL; // This is not a valid stream. | 138 return NULL; // This is not a valid stream. |
| 232 | 139 |
| 233 DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:" | 140 DVLOG(1) << "MediaStreamRendererFactoryImpl::GetAudioRenderer stream:" |
| 234 << base::UTF16ToUTF8(web_stream.id()); | 141 << base::UTF16ToUTF8(web_stream.id()); |
| 235 | 142 |
| 236 MediaStream* native_stream = MediaStream::GetMediaStream(web_stream); | 143 MediaStream* native_stream = MediaStream::GetMediaStream(web_stream); |
| 237 | 144 |
| 238 // TODO(tommi): MediaStreams do not have a 'local or not' concept. | 145 // TODO(tommi): MediaStreams do not have a 'local or not' concept. |
| 239 // Tracks _might_, but even so, we need to fix the data flow so that | 146 // Tracks _might_, but even so, we need to fix the data flow so that |
| 240 // it works the same way for all track implementations, local, remote or what | 147 // it works the same way for all track implementations, local, remote or what |
| 241 // have you. | 148 // have you. |
| 242 // In this function, we should simply create a renderer object that receives | 149 // In this function, we should simply create a renderer object that receives |
| 243 // and mixes audio from all the tracks that belong to the media stream. | 150 // and mixes audio from all the tracks that belong to the media stream. |
| 244 // We need to remove the |is_local| property from MediaStreamExtraData since | 151 // We need to remove the |is_local| property from MediaStreamExtraData since |
| 245 // this concept is peerconnection specific (is a previously recorded stream | 152 // this concept is peerconnection specific (is a previously recorded stream |
| 246 // local or remote?). | 153 // local or remote?). |
| 247 if (native_stream->is_local()) { | 154 if (native_stream->is_local()) { |
| 248 // Create the local audio renderer if the stream contains audio tracks. | 155 // Create the local audio renderer if the stream contains audio tracks. |
| 249 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; | 156 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
| 250 web_stream.audioTracks(audio_tracks); | 157 web_stream.audioTracks(audio_tracks); |
| 251 if (audio_tracks.isEmpty()) | 158 if (audio_tracks.isEmpty()) |
| 252 return NULL; | 159 return NULL; |
| 253 | 160 |
| 254 // TODO(xians): Add support for the case where the media stream contains | 161 // TODO(xians): Add support for the case where the media stream contains |
| 255 // multiple audio tracks. | 162 // multiple audio tracks. |
| 256 return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id); | 163 return CreateLocalAudioRenderer(audio_tracks[0], render_view_id, |
| 164 render_frame_id); |
| 257 } | 165 } |
| 258 | 166 |
| 259 webrtc::MediaStreamInterface* stream = | 167 webrtc::MediaStreamInterface* stream = |
| 260 MediaStream::GetAdapter(web_stream); | 168 MediaStream::GetAdapter(web_stream); |
| 261 if (stream->GetAudioTracks().empty()) | 169 if (stream->GetAudioTracks().empty()) |
| 262 return NULL; | 170 return NULL; |
| 263 | 171 |
| 264 // This is a remote WebRTC media stream. | 172 // This is a remote WebRTC media stream. |
| 265 WebRtcAudioDeviceImpl* audio_device = | 173 WebRtcAudioDeviceImpl* audio_device = |
| 266 dependency_factory_->GetWebRtcAudioDevice(); | 174 GetPeerConnectionDependencyFactory()->GetWebRtcAudioDevice(); |
| 267 | 175 |
| 268 // Share the existing renderer if any, otherwise create a new one. | 176 // Share the existing renderer if any, otherwise create a new one. |
| 269 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer()); | 177 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer()); |
| 270 if (!renderer.get()) { | 178 if (!renderer.get()) { |
| 271 renderer = CreateRemoteAudioRenderer(stream, render_frame_id); | 179 renderer = CreateRemoteAudioRenderer(stream, render_view_id, |
| 180 render_frame_id); |
| 272 | 181 |
| 273 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get())) | 182 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get())) |
| 274 renderer = NULL; | 183 renderer = NULL; |
| 275 } | 184 } |
| 276 | 185 |
| 277 return renderer.get() ? | 186 return renderer.get() ? |
| 278 renderer->CreateSharedAudioRendererProxy(stream) : NULL; | 187 renderer->CreateSharedAudioRendererProxy(stream) : NULL; |
| 279 } | 188 } |
| 280 | 189 |
| 281 // Callback from MediaStreamDispatcher. | |
| 282 // The requested stream have been generated by the MediaStreamDispatcher. | |
| 283 void MediaStreamImpl::OnStreamGenerated( | |
| 284 int request_id, | |
| 285 const std::string& label, | |
| 286 const StreamDeviceInfoArray& audio_array, | |
| 287 const StreamDeviceInfoArray& video_array) { | |
| 288 DCHECK(CalledOnValidThread()); | |
| 289 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label; | |
| 290 | |
| 291 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | |
| 292 if (!request_info) { | |
| 293 // This can happen if the request is canceled or the frame reloads while | |
| 294 // MediaStreamDispatcher is processing the request. | |
| 295 // Only stop the device if the device is not used in another MediaStream. | |
| 296 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); | |
| 297 device_it != audio_array.end(); ++device_it) { | |
| 298 if (!FindLocalSource(*device_it)) | |
| 299 media_stream_dispatcher_->StopStreamDevice(*device_it); | |
| 300 } | |
| 301 | |
| 302 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); | |
| 303 device_it != video_array.end(); ++device_it) { | |
| 304 if (!FindLocalSource(*device_it)) | |
| 305 media_stream_dispatcher_->StopStreamDevice(*device_it); | |
| 306 } | |
| 307 | |
| 308 DVLOG(1) << "Request ID not found"; | |
| 309 return; | |
| 310 } | |
| 311 request_info->generated = true; | |
| 312 | |
| 313 // WebUserMediaRequest don't have an implementation in unit tests. | |
| 314 // Therefore we need to check for isNull here and initialize the | |
| 315 // constraints. | |
| 316 blink::WebUserMediaRequest* request = &(request_info->request); | |
| 317 blink::WebMediaConstraints audio_constraints; | |
| 318 blink::WebMediaConstraints video_constraints; | |
| 319 if (request->isNull()) { | |
| 320 audio_constraints.initialize(); | |
| 321 video_constraints.initialize(); | |
| 322 } else { | |
| 323 audio_constraints = request->audioConstraints(); | |
| 324 video_constraints = request->videoConstraints(); | |
| 325 } | |
| 326 | |
| 327 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( | |
| 328 audio_array.size()); | |
| 329 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector, | |
| 330 request_info); | |
| 331 | |
| 332 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( | |
| 333 video_array.size()); | |
| 334 CreateVideoTracks(video_array, video_constraints, &video_track_vector, | |
| 335 request_info); | |
| 336 | |
| 337 blink::WebString webkit_id = base::UTF8ToUTF16(label); | |
| 338 blink::WebMediaStream* web_stream = &(request_info->web_stream); | |
| 339 | |
| 340 web_stream->initialize(webkit_id, audio_track_vector, | |
| 341 video_track_vector); | |
| 342 web_stream->setExtraData( | |
| 343 new MediaStream( | |
| 344 *web_stream)); | |
| 345 | |
| 346 // Wait for the tracks to be started successfully or to fail. | |
| 347 request_info->CallbackOnTracksStarted( | |
| 348 base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr())); | |
| 349 } | |
| 350 | |
| 351 // Callback from MediaStreamDispatcher. | |
| 352 // The requested stream failed to be generated. | |
| 353 void MediaStreamImpl::OnStreamGenerationFailed( | |
| 354 int request_id, | |
| 355 content::MediaStreamRequestResult result) { | |
| 356 DCHECK(CalledOnValidThread()); | |
| 357 DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed(" | |
| 358 << request_id << ")"; | |
| 359 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | |
| 360 if (!request_info) { | |
| 361 // This can happen if the request is canceled or the frame reloads while | |
| 362 // MediaStreamDispatcher is processing the request. | |
| 363 DVLOG(1) << "Request ID not found"; | |
| 364 return; | |
| 365 } | |
| 366 | |
| 367 GetUserMediaRequestFailed(&request_info->request, result); | |
| 368 DeleteUserMediaRequestInfo(request_info); | |
| 369 } | |
| 370 | |
| 371 // Callback from MediaStreamDispatcher. | |
| 372 // The browser process has stopped a device used by a MediaStream. | |
| 373 void MediaStreamImpl::OnDeviceStopped( | |
| 374 const std::string& label, | |
| 375 const StreamDeviceInfo& device_info) { | |
| 376 DCHECK(CalledOnValidThread()); | |
| 377 DVLOG(1) << "MediaStreamImpl::OnDeviceStopped(" | |
| 378 << "{device_id = " << device_info.device.id << "})"; | |
| 379 | |
| 380 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info); | |
| 381 if (!source_ptr) { | |
| 382 // This happens if the same device is used in several guM requests or | |
| 383 // if a user happen stop a track from JS at the same time | |
| 384 // as the underlying media device is unplugged from the system. | |
| 385 return; | |
| 386 } | |
| 387 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource | |
| 388 // object is valid during the cleanup. | |
| 389 blink::WebMediaStreamSource source(*source_ptr); | |
| 390 StopLocalSource(source, false); | |
| 391 | |
| 392 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | |
| 393 device_it != local_sources_.end(); ++device_it) { | |
| 394 if (device_it->source.id() == source.id()) { | |
| 395 local_sources_.erase(device_it); | |
| 396 break; | |
| 397 } | |
| 398 } | |
| 399 } | |
| 400 | |
| 401 void MediaStreamImpl::InitializeSourceObject( | |
| 402 const StreamDeviceInfo& device, | |
| 403 blink::WebMediaStreamSource::Type type, | |
| 404 const blink::WebMediaConstraints& constraints, | |
| 405 blink::WebFrame* frame, | |
| 406 blink::WebMediaStreamSource* webkit_source) { | |
| 407 const blink::WebMediaStreamSource* existing_source = | |
| 408 FindLocalSource(device); | |
| 409 if (existing_source) { | |
| 410 *webkit_source = *existing_source; | |
| 411 DVLOG(1) << "Source already exist. Reusing source with id " | |
| 412 << webkit_source->id().utf8(); | |
| 413 return; | |
| 414 } | |
| 415 | |
| 416 webkit_source->initialize( | |
| 417 base::UTF8ToUTF16(device.device.id), | |
| 418 type, | |
| 419 base::UTF8ToUTF16(device.device.name)); | |
| 420 | |
| 421 DVLOG(1) << "Initialize source object :" | |
| 422 << "id = " << webkit_source->id().utf8() | |
| 423 << ", name = " << webkit_source->name().utf8(); | |
| 424 | |
| 425 if (type == blink::WebMediaStreamSource::TypeVideo) { | |
| 426 webkit_source->setExtraData( | |
| 427 CreateVideoSource( | |
| 428 device, | |
| 429 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()))); | |
| 430 } else { | |
| 431 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); | |
| 432 MediaStreamAudioSource* audio_source( | |
| 433 new MediaStreamAudioSource( | |
| 434 RenderViewObserver::routing_id(), | |
| 435 device, | |
| 436 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()), | |
| 437 dependency_factory_)); | |
| 438 webkit_source->setExtraData(audio_source); | |
| 439 } | |
| 440 local_sources_.push_back(LocalStreamSource(frame, *webkit_source)); | |
| 441 } | |
| 442 | |
| 443 MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource( | |
| 444 const StreamDeviceInfo& device, | |
| 445 const MediaStreamSource::SourceStoppedCallback& stop_callback) { | |
| 446 return new content::MediaStreamVideoCapturerSource( | |
| 447 device, | |
| 448 stop_callback, | |
| 449 new VideoCapturerDelegate(device)); | |
| 450 } | |
| 451 | |
| 452 void MediaStreamImpl::CreateVideoTracks( | |
| 453 const StreamDeviceInfoArray& devices, | |
| 454 const blink::WebMediaConstraints& constraints, | |
| 455 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | |
| 456 UserMediaRequestInfo* request) { | |
| 457 DCHECK_EQ(devices.size(), webkit_tracks->size()); | |
| 458 | |
| 459 for (size_t i = 0; i < devices.size(); ++i) { | |
| 460 blink::WebMediaStreamSource webkit_source; | |
| 461 InitializeSourceObject(devices[i], | |
| 462 blink::WebMediaStreamSource::TypeVideo, | |
| 463 constraints, | |
| 464 request->frame, | |
| 465 &webkit_source); | |
| 466 (*webkit_tracks)[i] = | |
| 467 request->CreateAndStartVideoTrack(webkit_source, constraints); | |
| 468 } | |
| 469 } | |
| 470 | |
| 471 void MediaStreamImpl::CreateAudioTracks( | |
| 472 const StreamDeviceInfoArray& devices, | |
| 473 const blink::WebMediaConstraints& constraints, | |
| 474 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | |
| 475 UserMediaRequestInfo* request) { | |
| 476 DCHECK_EQ(devices.size(), webkit_tracks->size()); | |
| 477 | |
| 478 // Log the device names for this request. | |
| 479 for (StreamDeviceInfoArray::const_iterator it = devices.begin(); | |
| 480 it != devices.end(); ++it) { | |
| 481 WebRtcLogMessage(base::StringPrintf( | |
| 482 "Generated media stream for request id %d contains audio device name" | |
| 483 " \"%s\"", | |
| 484 request->request_id, | |
| 485 it->device.name.c_str())); | |
| 486 } | |
| 487 | |
| 488 StreamDeviceInfoArray overridden_audio_array = devices; | |
| 489 if (!request->enable_automatic_output_device_selection) { | |
| 490 // If the GetUserMedia request did not explicitly set the constraint | |
| 491 // kMediaStreamRenderToAssociatedSink, the output device parameters must | |
| 492 // be removed. | |
| 493 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin(); | |
| 494 it != overridden_audio_array.end(); ++it) { | |
| 495 it->device.matched_output_device_id = ""; | |
| 496 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters(); | |
| 497 } | |
| 498 } | |
| 499 | |
| 500 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { | |
| 501 blink::WebMediaStreamSource webkit_source; | |
| 502 InitializeSourceObject(overridden_audio_array[i], | |
| 503 blink::WebMediaStreamSource::TypeAudio, | |
| 504 constraints, | |
| 505 request->frame, | |
| 506 &webkit_source); | |
| 507 (*webkit_tracks)[i].initialize(webkit_source); | |
| 508 request->StartAudioTrack((*webkit_tracks)[i], constraints); | |
| 509 } | |
| 510 } | |
| 511 | |
| 512 void MediaStreamImpl::OnCreateNativeTracksCompleted( | |
| 513 UserMediaRequestInfo* request, | |
| 514 content::MediaStreamRequestResult result) { | |
| 515 DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete(" | |
| 516 << "{request_id = " << request->request_id << "} " | |
| 517 << "{result = " << result << "})"; | |
| 518 if (result == content::MEDIA_DEVICE_OK) | |
| 519 GetUserMediaRequestSucceeded(request->web_stream, &request->request); | |
| 520 else | |
| 521 GetUserMediaRequestFailed(&request->request, result); | |
| 522 | |
| 523 DeleteUserMediaRequestInfo(request); | |
| 524 } | |
| 525 | |
| 526 void MediaStreamImpl::OnDevicesEnumerated( | |
| 527 int request_id, | |
| 528 const StreamDeviceInfoArray& device_array) { | |
| 529 DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated(" | |
| 530 << request_id << ")"; | |
| 531 NOTIMPLEMENTED(); | |
| 532 } | |
| 533 | |
| 534 void MediaStreamImpl::OnDeviceOpened( | |
| 535 int request_id, | |
| 536 const std::string& label, | |
| 537 const StreamDeviceInfo& video_device) { | |
| 538 DVLOG(1) << "MediaStreamImpl::OnDeviceOpened(" | |
| 539 << request_id << ", " << label << ")"; | |
| 540 NOTIMPLEMENTED(); | |
| 541 } | |
| 542 | |
| 543 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) { | |
| 544 DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed(" | |
| 545 << request_id << ")"; | |
| 546 NOTIMPLEMENTED(); | |
| 547 } | |
| 548 | |
| 549 void MediaStreamImpl::GetUserMediaRequestSucceeded( | |
| 550 const blink::WebMediaStream& stream, | |
| 551 blink::WebUserMediaRequest* request_info) { | |
| 552 DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded"; | |
| 553 request_info->requestSucceeded(stream); | |
| 554 } | |
| 555 | |
| 556 void MediaStreamImpl::GetUserMediaRequestFailed( | |
| 557 blink::WebUserMediaRequest* request_info, | |
| 558 content::MediaStreamRequestResult result) { | |
| 559 switch (result) { | |
| 560 case MEDIA_DEVICE_OK: | |
| 561 NOTREACHED(); | |
| 562 break; | |
| 563 case MEDIA_DEVICE_PERMISSION_DENIED: | |
| 564 request_info->requestDenied(); | |
| 565 break; | |
| 566 case MEDIA_DEVICE_PERMISSION_DISMISSED: | |
| 567 request_info->requestFailedUASpecific("PermissionDismissedError"); | |
| 568 break; | |
| 569 case MEDIA_DEVICE_INVALID_STATE: | |
| 570 request_info->requestFailedUASpecific("InvalidStateError"); | |
| 571 break; | |
| 572 case MEDIA_DEVICE_NO_HARDWARE: | |
| 573 request_info->requestFailedUASpecific("DevicesNotFoundError"); | |
| 574 break; | |
| 575 case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN: | |
| 576 request_info->requestFailedUASpecific("InvalidSecurityOriginError"); | |
| 577 break; | |
| 578 case MEDIA_DEVICE_TAB_CAPTURE_FAILURE: | |
| 579 request_info->requestFailedUASpecific("TabCaptureError"); | |
| 580 break; | |
| 581 case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE: | |
| 582 request_info->requestFailedUASpecific("ScreenCaptureError"); | |
| 583 break; | |
| 584 case MEDIA_DEVICE_CAPTURE_FAILURE: | |
| 585 request_info->requestFailedUASpecific("DeviceCaptureError"); | |
| 586 break; | |
| 587 case MEDIA_DEVICE_TRACK_START_FAILURE: | |
| 588 request_info->requestFailedUASpecific("TrackStartError"); | |
| 589 break; | |
| 590 default: | |
| 591 request_info->requestFailed(); | |
| 592 break; | |
| 593 } | |
| 594 } | |
| 595 | |
| 596 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource( | |
| 597 const StreamDeviceInfo& device) const { | |
| 598 for (LocalStreamSources::const_iterator it = local_sources_.begin(); | |
| 599 it != local_sources_.end(); ++it) { | |
| 600 MediaStreamSource* source = | |
| 601 static_cast<MediaStreamSource*>(it->source.extraData()); | |
| 602 const StreamDeviceInfo& active_device = source->device_info(); | |
| 603 if (active_device.device.id == device.device.id && | |
| 604 active_device.device.type == device.device.type && | |
| 605 active_device.session_id == device.session_id) { | |
| 606 return &it->source; | |
| 607 } | |
| 608 } | |
| 609 return NULL; | |
| 610 } | |
| 611 | |
| 612 MediaStreamImpl::UserMediaRequestInfo* | |
| 613 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) { | |
| 614 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 615 for (; it != user_media_requests_.end(); ++it) { | |
| 616 if ((*it)->request_id == request_id) | |
| 617 return (*it); | |
| 618 } | |
| 619 return NULL; | |
| 620 } | |
| 621 | |
| 622 MediaStreamImpl::UserMediaRequestInfo* | |
| 623 MediaStreamImpl::FindUserMediaRequestInfo( | |
| 624 const blink::WebUserMediaRequest& request) { | |
| 625 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 626 for (; it != user_media_requests_.end(); ++it) { | |
| 627 if ((*it)->request == request) | |
| 628 return (*it); | |
| 629 } | |
| 630 return NULL; | |
| 631 } | |
| 632 | |
| 633 void MediaStreamImpl::DeleteUserMediaRequestInfo( | |
| 634 UserMediaRequestInfo* request) { | |
| 635 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 636 for (; it != user_media_requests_.end(); ++it) { | |
| 637 if ((*it) == request) { | |
| 638 user_media_requests_.erase(it); | |
| 639 return; | |
| 640 } | |
| 641 } | |
| 642 NOTREACHED(); | |
| 643 } | |
| 644 | |
| 645 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) { | |
| 646 // Do same thing as FrameWillClose. | |
| 647 FrameWillClose(frame); | |
| 648 } | |
| 649 | |
| 650 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) { | |
| 651 // Loop through all UserMediaRequests and find the requests that belong to the | |
| 652 // frame that is being closed. | |
| 653 UserMediaRequests::iterator request_it = user_media_requests_.begin(); | |
| 654 while (request_it != user_media_requests_.end()) { | |
| 655 if ((*request_it)->frame == frame) { | |
| 656 DVLOG(1) << "MediaStreamImpl::FrameWillClose: " | |
| 657 << "Cancel user media request " << (*request_it)->request_id; | |
| 658 // If the request is not generated, it means that a request | |
| 659 // has been sent to the MediaStreamDispatcher to generate a stream | |
| 660 // but MediaStreamDispatcher has not yet responded and we need to cancel | |
| 661 // the request. | |
| 662 if (!(*request_it)->generated) { | |
| 663 media_stream_dispatcher_->CancelGenerateStream( | |
| 664 (*request_it)->request_id, AsWeakPtr()); | |
| 665 } | |
| 666 request_it = user_media_requests_.erase(request_it); | |
| 667 } else { | |
| 668 ++request_it; | |
| 669 } | |
| 670 } | |
| 671 | |
| 672 // Loop through all current local sources and stop the sources that were | |
| 673 // created by the frame that will be closed. | |
| 674 LocalStreamSources::iterator sources_it = local_sources_.begin(); | |
| 675 while (sources_it != local_sources_.end()) { | |
| 676 if (sources_it->frame == frame) { | |
| 677 StopLocalSource(sources_it->source, true); | |
| 678 sources_it = local_sources_.erase(sources_it); | |
| 679 } else { | |
| 680 ++sources_it; | |
| 681 } | |
| 682 } | |
| 683 } | |
| 684 | |
| 685 void MediaStreamImpl::OnLocalSourceStopped( | |
| 686 const blink::WebMediaStreamSource& source) { | |
| 687 DCHECK(CalledOnValidThread()); | |
| 688 DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped"; | |
| 689 | |
| 690 bool device_found = false; | |
| 691 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | |
| 692 device_it != local_sources_.end(); ++device_it) { | |
| 693 if (device_it->source.id() == source.id()) { | |
| 694 device_found = true; | |
| 695 local_sources_.erase(device_it); | |
| 696 break; | |
| 697 } | |
| 698 } | |
| 699 CHECK(device_found); | |
| 700 | |
| 701 MediaStreamSource* source_impl = | |
| 702 static_cast<MediaStreamSource*> (source.extraData()); | |
| 703 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); | |
| 704 } | |
| 705 | |
| 706 void MediaStreamImpl::StopLocalSource( | |
| 707 const blink::WebMediaStreamSource& source, | |
| 708 bool notify_dispatcher) { | |
| 709 MediaStreamSource* source_impl = | |
| 710 static_cast<MediaStreamSource*> (source.extraData()); | |
| 711 DVLOG(1) << "MediaStreamImpl::StopLocalSource(" | |
| 712 << "{device_id = " << source_impl->device_info().device.id << "})"; | |
| 713 | |
| 714 if (notify_dispatcher) | |
| 715 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); | |
| 716 | |
| 717 source_impl->ResetSourceStoppedCallback(); | |
| 718 source_impl->StopSource(); | |
| 719 } | |
| 720 | |
| 721 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer( | |
| 722 webrtc::MediaStreamInterface* stream, | |
| 723 int render_frame_id) { | |
| 724 if (stream->GetAudioTracks().empty()) | |
| 725 return NULL; | |
| 726 | |
| 727 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:" | |
| 728 << stream->label(); | |
| 729 | |
| 730 // TODO(tommi): Change the default value of session_id to be | |
| 731 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc. | |
| 732 int session_id = 0, sample_rate = 0, buffer_size = 0; | |
| 733 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id, | |
| 734 &sample_rate, | |
| 735 &buffer_size)) { | |
| 736 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size); | |
| 737 } | |
| 738 | |
| 739 return new WebRtcAudioRenderer( | |
| 740 stream, RenderViewObserver::routing_id(), render_frame_id, session_id, | |
| 741 sample_rate, buffer_size); | |
| 742 } | |
| 743 | |
| 744 scoped_refptr<WebRtcLocalAudioRenderer> | |
| 745 MediaStreamImpl::CreateLocalAudioRenderer( | |
| 746 const blink::WebMediaStreamTrack& audio_track, | |
| 747 int render_frame_id) { | |
| 748 DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer"; | |
| 749 | |
| 750 int session_id = 0, sample_rate = 0, buffer_size = 0; | |
| 751 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id, | |
| 752 &sample_rate, | |
| 753 &buffer_size)) { | |
| 754 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size); | |
| 755 } | |
| 756 | |
| 757 // Create a new WebRtcLocalAudioRenderer instance and connect it to the | |
| 758 // existing WebRtcAudioCapturer so that the renderer can use it as source. | |
| 759 return new WebRtcLocalAudioRenderer( | |
| 760 audio_track, | |
| 761 RenderViewObserver::routing_id(), | |
| 762 render_frame_id, | |
| 763 session_id, | |
| 764 buffer_size); | |
| 765 } | |
| 766 | |
| 767 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer( | |
| 768 int* session_id, | |
| 769 int* output_sample_rate, | |
| 770 int* output_frames_per_buffer) { | |
| 771 DCHECK(CalledOnValidThread()); | |
| 772 WebRtcAudioDeviceImpl* audio_device = | |
| 773 dependency_factory_->GetWebRtcAudioDevice(); | |
| 774 if (!audio_device) | |
| 775 return false; | |
| 776 | |
| 777 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer( | |
| 778 session_id, output_sample_rate, output_frames_per_buffer); | |
| 779 } | |
| 780 | |
| 781 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo( | |
| 782 int request_id, | |
| 783 blink::WebFrame* frame, | |
| 784 const blink::WebUserMediaRequest& request, | |
| 785 bool enable_automatic_output_device_selection) | |
| 786 : request_id(request_id), | |
| 787 generated(false), | |
| 788 enable_automatic_output_device_selection( | |
| 789 enable_automatic_output_device_selection), | |
| 790 frame(frame), | |
| 791 request(request), | |
| 792 request_failed_(false) { | |
| 793 } | |
| 794 | |
| 795 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { | |
| 796 DVLOG(1) << "~UserMediaRequestInfo"; | |
| 797 } | |
| 798 | |
| 799 void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack( | |
| 800 const blink::WebMediaStreamTrack& track, | |
| 801 const blink::WebMediaConstraints& constraints) { | |
| 802 DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio); | |
| 803 MediaStreamAudioSource* native_source = | |
| 804 static_cast <MediaStreamAudioSource*>(track.source().extraData()); | |
| 805 DCHECK(native_source); | |
| 806 | |
| 807 sources_.push_back(track.source()); | |
| 808 sources_waiting_for_callback_.push_back(native_source); | |
| 809 native_source->AddTrack( | |
| 810 track, constraints, base::Bind( | |
| 811 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted, | |
| 812 AsWeakPtr())); | |
| 813 } | |
| 814 | |
| 815 blink::WebMediaStreamTrack | |
| 816 MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( | |
| 817 const blink::WebMediaStreamSource& source, | |
| 818 const blink::WebMediaConstraints& constraints) { | |
| 819 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo); | |
| 820 MediaStreamVideoSource* native_source = | |
| 821 MediaStreamVideoSource::GetVideoSource(source); | |
| 822 DCHECK(native_source); | |
| 823 sources_.push_back(source); | |
| 824 sources_waiting_for_callback_.push_back(native_source); | |
| 825 return MediaStreamVideoTrack::CreateVideoTrack( | |
| 826 native_source, constraints, base::Bind( | |
| 827 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted, | |
| 828 AsWeakPtr()), | |
| 829 true); | |
| 830 } | |
| 831 | |
| 832 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted( | |
| 833 const ResourcesReady& callback) { | |
| 834 DCHECK(ready_callback_.is_null()); | |
| 835 ready_callback_ = callback; | |
| 836 CheckAllTracksStarted(); | |
| 837 } | |
| 838 | |
| 839 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted( | |
| 840 MediaStreamSource* source, bool success) { | |
| 841 DVLOG(1) << "OnTrackStarted result " << success; | |
| 842 std::vector<MediaStreamSource*>::iterator it = | |
| 843 std::find(sources_waiting_for_callback_.begin(), | |
| 844 sources_waiting_for_callback_.end(), | |
| 845 source); | |
| 846 DCHECK(it != sources_waiting_for_callback_.end()); | |
| 847 sources_waiting_for_callback_.erase(it); | |
| 848 // All tracks must be started successfully. Otherwise the request is a | |
| 849 // failure. | |
| 850 if (!success) | |
| 851 request_failed_ = true; | |
| 852 CheckAllTracksStarted(); | |
| 853 } | |
| 854 | |
| 855 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() { | |
| 856 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { | |
| 857 ready_callback_.Run( | |
| 858 this, | |
| 859 request_failed_ ? MEDIA_DEVICE_TRACK_START_FAILURE : MEDIA_DEVICE_OK); | |
| 860 } | |
| 861 } | |
| 862 | |
| 863 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed( | |
| 864 const blink::WebMediaStreamSource& source) const { | |
| 865 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it = | |
| 866 sources_.begin(); | |
| 867 source_it != sources_.end(); ++source_it) { | |
| 868 if (source_it->id() == source.id()) | |
| 869 return true; | |
| 870 } | |
| 871 return false; | |
| 872 } | |
| 873 | |
| 874 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource( | |
| 875 const blink::WebMediaStreamSource& source) { | |
| 876 for (std::vector<blink::WebMediaStreamSource>::iterator it = | |
| 877 sources_.begin(); | |
| 878 it != sources_.end(); ++it) { | |
| 879 if (source.id() == it->id()) { | |
| 880 sources_.erase(it); | |
| 881 return; | |
| 882 } | |
| 883 } | |
| 884 } | |
| 885 | |
| 886 } // namespace content | 190 } // namespace content |
| OLD | NEW |