| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/renderer/media/media_stream_impl.h" | |
| 6 | |
| 7 #include <utility> | |
| 8 | |
| 9 #include "base/hash.h" | |
| 10 #include "base/logging.h" | |
| 11 #include "base/strings/string_number_conversions.h" | |
| 12 #include "base/strings/string_util.h" | |
| 13 #include "base/strings/stringprintf.h" | |
| 14 #include "base/strings/utf_string_conversions.h" | |
| 15 #include "content/public/renderer/render_frame.h" | |
| 16 #include "content/renderer/media/media_stream.h" | |
| 17 #include "content/renderer/media/media_stream_audio_source.h" | |
| 18 #include "content/renderer/media/media_stream_dispatcher.h" | |
| 19 #include "content/renderer/media/media_stream_video_capturer_source.h" | |
| 20 #include "content/renderer/media/media_stream_video_track.h" | |
| 21 #include "content/renderer/media/peer_connection_tracker.h" | |
| 22 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | |
| 23 #include "content/renderer/media/webrtc_audio_capturer.h" | |
| 24 #include "content/renderer/media/webrtc_logging.h" | |
| 25 #include "content/renderer/media/webrtc_uma_histograms.h" | |
| 26 #include "content/renderer/render_thread_impl.h" | |
| 27 #include "third_party/WebKit/public/platform/WebMediaConstraints.h" | |
| 28 #include "third_party/WebKit/public/platform/WebMediaDeviceInfo.h" | |
| 29 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | |
| 30 #include "third_party/WebKit/public/web/WebDocument.h" | |
| 31 #include "third_party/WebKit/public/web/WebLocalFrame.h" | |
| 32 | |
| 33 namespace content { | |
| 34 namespace { | |
| 35 | |
| 36 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints, | |
| 37 StreamOptions::Constraints* mandatory, | |
| 38 StreamOptions::Constraints* optional) { | |
| 39 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints; | |
| 40 constraints.getMandatoryConstraints(mandatory_constraints); | |
| 41 for (size_t i = 0; i < mandatory_constraints.size(); i++) { | |
| 42 mandatory->push_back(StreamOptions::Constraint( | |
| 43 mandatory_constraints[i].m_name.utf8(), | |
| 44 mandatory_constraints[i].m_value.utf8())); | |
| 45 } | |
| 46 | |
| 47 blink::WebVector<blink::WebMediaConstraint> optional_constraints; | |
| 48 constraints.getOptionalConstraints(optional_constraints); | |
| 49 for (size_t i = 0; i < optional_constraints.size(); i++) { | |
| 50 optional->push_back(StreamOptions::Constraint( | |
| 51 optional_constraints[i].m_name.utf8(), | |
| 52 optional_constraints[i].m_value.utf8())); | |
| 53 } | |
| 54 } | |
| 55 | |
| 56 static int g_next_request_id = 0; | |
| 57 | |
| 58 } // namespace | |
| 59 | |
| 60 struct MediaStreamImpl::MediaDevicesRequestInfo { | |
| 61 MediaDevicesRequestInfo(const blink::WebMediaDevicesRequest& request, | |
| 62 int audio_input_request_id, | |
| 63 int video_input_request_id, | |
| 64 int audio_output_request_id) | |
| 65 : request(request), | |
| 66 audio_input_request_id(audio_input_request_id), | |
| 67 video_input_request_id(video_input_request_id), | |
| 68 audio_output_request_id(audio_output_request_id), | |
| 69 has_audio_input_returned(false), | |
| 70 has_video_input_returned(false), | |
| 71 has_audio_output_returned(false) {} | |
| 72 | |
| 73 blink::WebMediaDevicesRequest request; | |
| 74 int audio_input_request_id; | |
| 75 int video_input_request_id; | |
| 76 int audio_output_request_id; | |
| 77 bool has_audio_input_returned; | |
| 78 bool has_video_input_returned; | |
| 79 bool has_audio_output_returned; | |
| 80 StreamDeviceInfoArray audio_input_devices; | |
| 81 StreamDeviceInfoArray video_input_devices; | |
| 82 StreamDeviceInfoArray audio_output_devices; | |
| 83 }; | |
| 84 | |
| 85 MediaStreamImpl::MediaStreamImpl( | |
| 86 RenderFrame* render_frame, | |
| 87 PeerConnectionDependencyFactory* dependency_factory, | |
| 88 scoped_ptr<MediaStreamDispatcher> media_stream_dispatcher) | |
| 89 : RenderFrameObserver(render_frame), | |
| 90 dependency_factory_(dependency_factory), | |
| 91 media_stream_dispatcher_(media_stream_dispatcher.Pass()), | |
| 92 weak_factory_(this) { | |
| 93 DCHECK(dependency_factory_); | |
| 94 DCHECK(media_stream_dispatcher_.get()); | |
| 95 } | |
| 96 | |
| 97 MediaStreamImpl::~MediaStreamImpl() { | |
| 98 // Force-close all outstanding user media requests and local sources here, | |
| 99 // before the outstanding WeakPtrs are invalidated, to ensure a clean | |
| 100 // shutdown. | |
| 101 FrameWillClose(); | |
| 102 } | |
| 103 | |
| 104 void MediaStreamImpl::requestUserMedia( | |
| 105 const blink::WebUserMediaRequest& user_media_request) { | |
| 106 // Save histogram data so we can see how much GetUserMedia is used. | |
| 107 // The histogram counts the number of calls to the JS API | |
| 108 // webGetUserMedia. | |
| 109 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA); | |
| 110 DCHECK(CalledOnValidThread()); | |
| 111 | |
| 112 if (RenderThreadImpl::current()) { | |
| 113 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia( | |
| 114 user_media_request); | |
| 115 } | |
| 116 | |
| 117 int request_id = g_next_request_id++; | |
| 118 StreamOptions options; | |
| 119 GURL security_origin; | |
| 120 bool enable_automatic_output_device_selection = false; | |
| 121 | |
| 122 // |user_media_request| can't be mocked. So in order to test at all we check | |
| 123 // if it isNull. | |
| 124 if (user_media_request.isNull()) { | |
| 125 // We are in a test. | |
| 126 options.audio_requested = true; | |
| 127 options.video_requested = true; | |
| 128 } else { | |
| 129 if (user_media_request.audio()) { | |
| 130 options.audio_requested = true; | |
| 131 CopyStreamConstraints(user_media_request.audioConstraints(), | |
| 132 &options.mandatory_audio, | |
| 133 &options.optional_audio); | |
| 134 | |
| 135 // Check if this input device should be used to select a matching output | |
| 136 // device for audio rendering. | |
| 137 std::string enable; | |
| 138 if (options.GetFirstAudioConstraintByName( | |
| 139 kMediaStreamRenderToAssociatedSink, &enable, NULL) && | |
| 140 LowerCaseEqualsASCII(enable, "true")) { | |
| 141 enable_automatic_output_device_selection = true; | |
| 142 } | |
| 143 } | |
| 144 if (user_media_request.video()) { | |
| 145 options.video_requested = true; | |
| 146 CopyStreamConstraints(user_media_request.videoConstraints(), | |
| 147 &options.mandatory_video, | |
| 148 &options.optional_video); | |
| 149 } | |
| 150 | |
| 151 security_origin = GURL(user_media_request.securityOrigin().toString()); | |
| 152 DCHECK(render_frame()->GetWebFrame() == | |
| 153 static_cast<blink::WebFrame*>( | |
| 154 user_media_request.ownerDocument().frame())); | |
| 155 } | |
| 156 | |
| 157 DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ " | |
| 158 << "audio=" << (options.audio_requested) | |
| 159 << " select associated sink: " | |
| 160 << enable_automatic_output_device_selection | |
| 161 << ", video=" << (options.video_requested) << " ], " | |
| 162 << security_origin.spec() << ")"; | |
| 163 | |
| 164 std::string audio_device_id; | |
| 165 bool mandatory_audio; | |
| 166 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId, | |
| 167 &audio_device_id, &mandatory_audio); | |
| 168 std::string video_device_id; | |
| 169 bool mandatory_video; | |
| 170 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId, | |
| 171 &video_device_id, &mandatory_video); | |
| 172 | |
| 173 WebRtcLogMessage(base::StringPrintf( | |
| 174 "MSI::requestUserMedia. request_id=%d" | |
| 175 ", audio source id=%s mandatory= %s " | |
| 176 ", video source id=%s mandatory= %s", | |
| 177 request_id, | |
| 178 audio_device_id.c_str(), | |
| 179 mandatory_audio ? "true":"false", | |
| 180 video_device_id.c_str(), | |
| 181 mandatory_video ? "true":"false")); | |
| 182 | |
| 183 user_media_requests_.push_back( | |
| 184 new UserMediaRequestInfo(request_id, user_media_request, | |
| 185 enable_automatic_output_device_selection)); | |
| 186 | |
| 187 media_stream_dispatcher_->GenerateStream( | |
| 188 request_id, | |
| 189 weak_factory_.GetWeakPtr(), | |
| 190 options, | |
| 191 security_origin); | |
| 192 } | |
| 193 | |
| 194 void MediaStreamImpl::cancelUserMediaRequest( | |
| 195 const blink::WebUserMediaRequest& user_media_request) { | |
| 196 DCHECK(CalledOnValidThread()); | |
| 197 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request); | |
| 198 if (request) { | |
| 199 // We can't abort the stream generation process. | |
| 200 // Instead, erase the request. Once the stream is generated we will stop the | |
| 201 // stream if the request does not exist. | |
| 202 LogUserMediaRequestWithNoResult(MEDIA_STREAM_REQUEST_EXPLICITLY_CANCELLED); | |
| 203 DeleteUserMediaRequestInfo(request); | |
| 204 } | |
| 205 } | |
| 206 | |
| 207 void MediaStreamImpl::requestMediaDevices( | |
| 208 const blink::WebMediaDevicesRequest& media_devices_request) { | |
| 209 UpdateWebRTCMethodCount(WEBKIT_GET_MEDIA_DEVICES); | |
| 210 DCHECK(CalledOnValidThread()); | |
| 211 | |
| 212 int audio_input_request_id = g_next_request_id++; | |
| 213 int video_input_request_id = g_next_request_id++; | |
| 214 int audio_output_request_id = g_next_request_id++; | |
| 215 | |
| 216 // |media_devices_request| can't be mocked, so in tests it will be empty (the | |
| 217 // underlying pointer is null). In order to use this function in a test we | |
| 218 // need to check if it isNull. | |
| 219 GURL security_origin; | |
| 220 if (!media_devices_request.isNull()) | |
| 221 security_origin = GURL(media_devices_request.securityOrigin().toString()); | |
| 222 | |
| 223 DVLOG(1) << "MediaStreamImpl::requestMediaDevices(" << audio_input_request_id | |
| 224 << ", " << video_input_request_id << ", " << audio_output_request_id | |
| 225 << ", " << security_origin.spec() << ")"; | |
| 226 | |
| 227 media_devices_requests_.push_back(new MediaDevicesRequestInfo( | |
| 228 media_devices_request, | |
| 229 audio_input_request_id, | |
| 230 video_input_request_id, | |
| 231 audio_output_request_id)); | |
| 232 | |
| 233 media_stream_dispatcher_->EnumerateDevices( | |
| 234 audio_input_request_id, | |
| 235 weak_factory_.GetWeakPtr(), | |
| 236 MEDIA_DEVICE_AUDIO_CAPTURE, | |
| 237 security_origin); | |
| 238 | |
| 239 media_stream_dispatcher_->EnumerateDevices( | |
| 240 video_input_request_id, | |
| 241 weak_factory_.GetWeakPtr(), | |
| 242 MEDIA_DEVICE_VIDEO_CAPTURE, | |
| 243 security_origin); | |
| 244 | |
| 245 media_stream_dispatcher_->EnumerateDevices( | |
| 246 audio_output_request_id, | |
| 247 weak_factory_.GetWeakPtr(), | |
| 248 MEDIA_DEVICE_AUDIO_OUTPUT, | |
| 249 security_origin); | |
| 250 } | |
| 251 | |
| 252 void MediaStreamImpl::cancelMediaDevicesRequest( | |
| 253 const blink::WebMediaDevicesRequest& media_devices_request) { | |
| 254 DCHECK(CalledOnValidThread()); | |
| 255 MediaDevicesRequestInfo* request = | |
| 256 FindMediaDevicesRequestInfo(media_devices_request); | |
| 257 if (!request) | |
| 258 return; | |
| 259 CancelAndDeleteMediaDevicesRequest(request); | |
| 260 } | |
| 261 | |
| 262 // Callback from MediaStreamDispatcher. | |
| 263 // The requested stream have been generated by the MediaStreamDispatcher. | |
| 264 void MediaStreamImpl::OnStreamGenerated( | |
| 265 int request_id, | |
| 266 const std::string& label, | |
| 267 const StreamDeviceInfoArray& audio_array, | |
| 268 const StreamDeviceInfoArray& video_array) { | |
| 269 DCHECK(CalledOnValidThread()); | |
| 270 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label; | |
| 271 | |
| 272 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | |
| 273 if (!request_info) { | |
| 274 // This can happen if the request is canceled or the frame reloads while | |
| 275 // MediaStreamDispatcher is processing the request. | |
| 276 DVLOG(1) << "Request ID not found"; | |
| 277 OnStreamGeneratedForCancelledRequest(audio_array, video_array); | |
| 278 return; | |
| 279 } | |
| 280 request_info->generated = true; | |
| 281 | |
| 282 // WebUserMediaRequest don't have an implementation in unit tests. | |
| 283 // Therefore we need to check for isNull here and initialize the | |
| 284 // constraints. | |
| 285 blink::WebUserMediaRequest* request = &(request_info->request); | |
| 286 blink::WebMediaConstraints audio_constraints; | |
| 287 blink::WebMediaConstraints video_constraints; | |
| 288 if (request->isNull()) { | |
| 289 audio_constraints.initialize(); | |
| 290 video_constraints.initialize(); | |
| 291 } else { | |
| 292 audio_constraints = request->audioConstraints(); | |
| 293 video_constraints = request->videoConstraints(); | |
| 294 } | |
| 295 | |
| 296 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( | |
| 297 audio_array.size()); | |
| 298 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector, | |
| 299 request_info); | |
| 300 | |
| 301 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( | |
| 302 video_array.size()); | |
| 303 CreateVideoTracks(video_array, video_constraints, &video_track_vector, | |
| 304 request_info); | |
| 305 | |
| 306 blink::WebString webkit_id = base::UTF8ToUTF16(label); | |
| 307 blink::WebMediaStream* web_stream = &(request_info->web_stream); | |
| 308 | |
| 309 web_stream->initialize(webkit_id, audio_track_vector, | |
| 310 video_track_vector); | |
| 311 web_stream->setExtraData( | |
| 312 new MediaStream( | |
| 313 *web_stream)); | |
| 314 | |
| 315 // Wait for the tracks to be started successfully or to fail. | |
| 316 request_info->CallbackOnTracksStarted( | |
| 317 base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, | |
| 318 weak_factory_.GetWeakPtr())); | |
| 319 } | |
| 320 | |
| 321 void MediaStreamImpl::OnStreamGeneratedForCancelledRequest( | |
| 322 const StreamDeviceInfoArray& audio_array, | |
| 323 const StreamDeviceInfoArray& video_array) { | |
| 324 // Only stop the device if the device is not used in another MediaStream. | |
| 325 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); | |
| 326 device_it != audio_array.end(); ++device_it) { | |
| 327 if (!FindLocalSource(*device_it)) | |
| 328 media_stream_dispatcher_->StopStreamDevice(*device_it); | |
| 329 } | |
| 330 | |
| 331 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); | |
| 332 device_it != video_array.end(); ++device_it) { | |
| 333 if (!FindLocalSource(*device_it)) | |
| 334 media_stream_dispatcher_->StopStreamDevice(*device_it); | |
| 335 } | |
| 336 } | |
| 337 | |
| 338 // Callback from MediaStreamDispatcher. | |
| 339 // The requested stream failed to be generated. | |
| 340 void MediaStreamImpl::OnStreamGenerationFailed( | |
| 341 int request_id, | |
| 342 MediaStreamRequestResult result) { | |
| 343 DCHECK(CalledOnValidThread()); | |
| 344 DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed(" | |
| 345 << request_id << ")"; | |
| 346 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | |
| 347 if (!request_info) { | |
| 348 // This can happen if the request is canceled or the frame reloads while | |
| 349 // MediaStreamDispatcher is processing the request. | |
| 350 DVLOG(1) << "Request ID not found"; | |
| 351 return; | |
| 352 } | |
| 353 | |
| 354 GetUserMediaRequestFailed(&request_info->request, result); | |
| 355 DeleteUserMediaRequestInfo(request_info); | |
| 356 } | |
| 357 | |
| 358 // Callback from MediaStreamDispatcher. | |
| 359 // The browser process has stopped a device used by a MediaStream. | |
| 360 void MediaStreamImpl::OnDeviceStopped( | |
| 361 const std::string& label, | |
| 362 const StreamDeviceInfo& device_info) { | |
| 363 DCHECK(CalledOnValidThread()); | |
| 364 DVLOG(1) << "MediaStreamImpl::OnDeviceStopped(" | |
| 365 << "{device_id = " << device_info.device.id << "})"; | |
| 366 | |
| 367 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info); | |
| 368 if (!source_ptr) { | |
| 369 // This happens if the same device is used in several guM requests or | |
| 370 // if a user happen stop a track from JS at the same time | |
| 371 // as the underlying media device is unplugged from the system. | |
| 372 return; | |
| 373 } | |
| 374 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource | |
| 375 // object is valid during the cleanup. | |
| 376 blink::WebMediaStreamSource source(*source_ptr); | |
| 377 StopLocalSource(source, false); | |
| 378 | |
| 379 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | |
| 380 device_it != local_sources_.end(); ++device_it) { | |
| 381 if (device_it->id() == source.id()) { | |
| 382 local_sources_.erase(device_it); | |
| 383 break; | |
| 384 } | |
| 385 } | |
| 386 } | |
| 387 | |
| 388 void MediaStreamImpl::InitializeSourceObject( | |
| 389 const StreamDeviceInfo& device, | |
| 390 blink::WebMediaStreamSource::Type type, | |
| 391 const blink::WebMediaConstraints& constraints, | |
| 392 blink::WebMediaStreamSource* webkit_source) { | |
| 393 const blink::WebMediaStreamSource* existing_source = | |
| 394 FindLocalSource(device); | |
| 395 if (existing_source) { | |
| 396 *webkit_source = *existing_source; | |
| 397 DVLOG(1) << "Source already exist. Reusing source with id " | |
| 398 << webkit_source->id().utf8(); | |
| 399 return; | |
| 400 } | |
| 401 | |
| 402 webkit_source->initialize( | |
| 403 base::UTF8ToUTF16(device.device.id), | |
| 404 type, | |
| 405 base::UTF8ToUTF16(device.device.name)); | |
| 406 | |
| 407 DVLOG(1) << "Initialize source object :" | |
| 408 << "id = " << webkit_source->id().utf8() | |
| 409 << ", name = " << webkit_source->name().utf8(); | |
| 410 | |
| 411 if (type == blink::WebMediaStreamSource::TypeVideo) { | |
| 412 webkit_source->setExtraData( | |
| 413 CreateVideoSource( | |
| 414 device, | |
| 415 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, | |
| 416 weak_factory_.GetWeakPtr()))); | |
| 417 } else { | |
| 418 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); | |
| 419 MediaStreamAudioSource* audio_source( | |
| 420 new MediaStreamAudioSource( | |
| 421 RenderFrameObserver::routing_id(), | |
| 422 device, | |
| 423 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, | |
| 424 weak_factory_.GetWeakPtr()), | |
| 425 dependency_factory_)); | |
| 426 webkit_source->setExtraData(audio_source); | |
| 427 } | |
| 428 local_sources_.push_back(*webkit_source); | |
| 429 } | |
| 430 | |
| 431 MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource( | |
| 432 const StreamDeviceInfo& device, | |
| 433 const MediaStreamSource::SourceStoppedCallback& stop_callback) { | |
| 434 return new content::MediaStreamVideoCapturerSource( | |
| 435 device, | |
| 436 stop_callback, | |
| 437 new VideoCapturerDelegate(device)); | |
| 438 } | |
| 439 | |
| 440 void MediaStreamImpl::CreateVideoTracks( | |
| 441 const StreamDeviceInfoArray& devices, | |
| 442 const blink::WebMediaConstraints& constraints, | |
| 443 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | |
| 444 UserMediaRequestInfo* request) { | |
| 445 DCHECK_EQ(devices.size(), webkit_tracks->size()); | |
| 446 | |
| 447 for (size_t i = 0; i < devices.size(); ++i) { | |
| 448 blink::WebMediaStreamSource webkit_source; | |
| 449 InitializeSourceObject(devices[i], | |
| 450 blink::WebMediaStreamSource::TypeVideo, | |
| 451 constraints, | |
| 452 &webkit_source); | |
| 453 (*webkit_tracks)[i] = | |
| 454 request->CreateAndStartVideoTrack(webkit_source, constraints); | |
| 455 } | |
| 456 } | |
| 457 | |
| 458 void MediaStreamImpl::CreateAudioTracks( | |
| 459 const StreamDeviceInfoArray& devices, | |
| 460 const blink::WebMediaConstraints& constraints, | |
| 461 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | |
| 462 UserMediaRequestInfo* request) { | |
| 463 DCHECK_EQ(devices.size(), webkit_tracks->size()); | |
| 464 | |
| 465 // Log the device names for this request. | |
| 466 for (StreamDeviceInfoArray::const_iterator it = devices.begin(); | |
| 467 it != devices.end(); ++it) { | |
| 468 WebRtcLogMessage(base::StringPrintf( | |
| 469 "Generated media stream for request id %d contains audio device name" | |
| 470 " \"%s\"", | |
| 471 request->request_id, | |
| 472 it->device.name.c_str())); | |
| 473 } | |
| 474 | |
| 475 StreamDeviceInfoArray overridden_audio_array = devices; | |
| 476 if (!request->enable_automatic_output_device_selection) { | |
| 477 // If the GetUserMedia request did not explicitly set the constraint | |
| 478 // kMediaStreamRenderToAssociatedSink, the output device parameters must | |
| 479 // be removed. | |
| 480 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin(); | |
| 481 it != overridden_audio_array.end(); ++it) { | |
| 482 it->device.matched_output_device_id = ""; | |
| 483 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters(); | |
| 484 } | |
| 485 } | |
| 486 | |
| 487 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { | |
| 488 blink::WebMediaStreamSource webkit_source; | |
| 489 InitializeSourceObject(overridden_audio_array[i], | |
| 490 blink::WebMediaStreamSource::TypeAudio, | |
| 491 constraints, | |
| 492 &webkit_source); | |
| 493 (*webkit_tracks)[i].initialize(webkit_source); | |
| 494 request->StartAudioTrack((*webkit_tracks)[i], constraints); | |
| 495 } | |
| 496 } | |
| 497 | |
| 498 void MediaStreamImpl::OnCreateNativeTracksCompleted( | |
| 499 UserMediaRequestInfo* request, | |
| 500 MediaStreamRequestResult result, | |
| 501 const blink::WebString& result_name) { | |
| 502 DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete(" | |
| 503 << "{request_id = " << request->request_id << "} " | |
| 504 << "{result = " << result << "})"; | |
| 505 if (result == content::MEDIA_DEVICE_OK) | |
| 506 GetUserMediaRequestSucceeded(request->web_stream, &request->request); | |
| 507 else | |
| 508 GetUserMediaRequestTrackStartedFailed(&request->request, | |
| 509 result, | |
| 510 result_name); | |
| 511 | |
| 512 DeleteUserMediaRequestInfo(request); | |
| 513 } | |
| 514 | |
| 515 void MediaStreamImpl::OnDevicesEnumerated( | |
| 516 int request_id, | |
| 517 const StreamDeviceInfoArray& device_array) { | |
| 518 DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated(" << request_id << ")"; | |
| 519 | |
| 520 MediaDevicesRequestInfo* request = FindMediaDevicesRequestInfo(request_id); | |
| 521 DCHECK(request); | |
| 522 | |
| 523 if (request_id == request->audio_input_request_id) { | |
| 524 request->has_audio_input_returned = true; | |
| 525 DCHECK(request->audio_input_devices.empty()); | |
| 526 request->audio_input_devices = device_array; | |
| 527 } else if (request_id == request->video_input_request_id) { | |
| 528 request->has_video_input_returned = true; | |
| 529 DCHECK(request->video_input_devices.empty()); | |
| 530 request->video_input_devices = device_array; | |
| 531 } else { | |
| 532 DCHECK_EQ(request->audio_output_request_id, request_id); | |
| 533 request->has_audio_output_returned = true; | |
| 534 DCHECK(request->audio_output_devices.empty()); | |
| 535 request->audio_output_devices = device_array; | |
| 536 } | |
| 537 | |
| 538 if (!request->has_audio_input_returned || | |
| 539 !request->has_video_input_returned || | |
| 540 !request->has_audio_output_returned) { | |
| 541 // Wait for the rest of the devices to complete. | |
| 542 return; | |
| 543 } | |
| 544 | |
| 545 // All devices are ready for copying. We use a hashed audio output device id | |
| 546 // as the group id for input and output audio devices. If an input device | |
| 547 // doesn't have an associated output device, we use the input device's own id. | |
| 548 // We don't support group id for video devices, that's left empty. | |
| 549 blink::WebVector<blink::WebMediaDeviceInfo> | |
| 550 devices(request->audio_input_devices.size() + | |
| 551 request->video_input_devices.size() + | |
| 552 request->audio_output_devices.size()); | |
| 553 for (size_t i = 0; i < request->audio_input_devices.size(); ++i) { | |
| 554 const MediaStreamDevice& device = request->audio_input_devices[i].device; | |
| 555 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE); | |
| 556 std::string group_id = base::UintToString(base::Hash( | |
| 557 !device.matched_output_device_id.empty() ? | |
| 558 device.matched_output_device_id : | |
| 559 device.id)); | |
| 560 devices[i].initialize( | |
| 561 blink::WebString::fromUTF8(device.id), | |
| 562 blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput, | |
| 563 blink::WebString::fromUTF8(device.name), | |
| 564 blink::WebString::fromUTF8(group_id)); | |
| 565 } | |
| 566 size_t offset = request->audio_input_devices.size(); | |
| 567 for (size_t i = 0; i < request->video_input_devices.size(); ++i) { | |
| 568 const MediaStreamDevice& device = request->video_input_devices[i].device; | |
| 569 DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE); | |
| 570 devices[offset + i].initialize( | |
| 571 blink::WebString::fromUTF8(device.id), | |
| 572 blink::WebMediaDeviceInfo::MediaDeviceKindVideoInput, | |
| 573 blink::WebString::fromUTF8(device.name), | |
| 574 blink::WebString()); | |
| 575 } | |
| 576 offset += request->video_input_devices.size(); | |
| 577 for (size_t i = 0; i < request->audio_output_devices.size(); ++i) { | |
| 578 const MediaStreamDevice& device = request->audio_output_devices[i].device; | |
| 579 DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_OUTPUT); | |
| 580 devices[offset + i].initialize( | |
| 581 blink::WebString::fromUTF8(device.id), | |
| 582 blink::WebMediaDeviceInfo::MediaDeviceKindAudioOutput, | |
| 583 blink::WebString::fromUTF8(device.name), | |
| 584 blink::WebString::fromUTF8(base::UintToString(base::Hash(device.id)))); | |
| 585 } | |
| 586 | |
| 587 EnumerateDevicesSucceded(&request->request, devices); | |
| 588 CancelAndDeleteMediaDevicesRequest(request); | |
| 589 } | |
| 590 | |
| 591 void MediaStreamImpl::OnDeviceOpened( | |
| 592 int request_id, | |
| 593 const std::string& label, | |
| 594 const StreamDeviceInfo& video_device) { | |
| 595 DVLOG(1) << "MediaStreamImpl::OnDeviceOpened(" | |
| 596 << request_id << ", " << label << ")"; | |
| 597 NOTIMPLEMENTED(); | |
| 598 } | |
| 599 | |
| 600 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) { | |
| 601 DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed(" | |
| 602 << request_id << ")"; | |
| 603 NOTIMPLEMENTED(); | |
| 604 } | |
| 605 | |
| 606 void MediaStreamImpl::GetUserMediaRequestSucceeded( | |
| 607 const blink::WebMediaStream& stream, | |
| 608 blink::WebUserMediaRequest* request_info) { | |
| 609 DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded"; | |
| 610 LogUserMediaRequestResult(MEDIA_DEVICE_OK); | |
| 611 request_info->requestSucceeded(stream); | |
| 612 } | |
| 613 | |
| 614 void MediaStreamImpl::GetUserMediaRequestFailed( | |
| 615 blink::WebUserMediaRequest* request_info, | |
| 616 MediaStreamRequestResult result) { | |
| 617 LogUserMediaRequestResult(result); | |
| 618 switch (result) { | |
| 619 case MEDIA_DEVICE_OK: | |
| 620 NOTREACHED(); | |
| 621 break; | |
| 622 case MEDIA_DEVICE_PERMISSION_DENIED: | |
| 623 request_info->requestDenied(); | |
| 624 break; | |
| 625 case MEDIA_DEVICE_PERMISSION_DISMISSED: | |
| 626 request_info->requestFailedUASpecific("PermissionDismissedError"); | |
| 627 break; | |
| 628 case MEDIA_DEVICE_INVALID_STATE: | |
| 629 request_info->requestFailedUASpecific("InvalidStateError"); | |
| 630 break; | |
| 631 case MEDIA_DEVICE_NO_HARDWARE: | |
| 632 request_info->requestFailedUASpecific("DevicesNotFoundError"); | |
| 633 break; | |
| 634 case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN: | |
| 635 request_info->requestFailedUASpecific("InvalidSecurityOriginError"); | |
| 636 break; | |
| 637 case MEDIA_DEVICE_TAB_CAPTURE_FAILURE: | |
| 638 request_info->requestFailedUASpecific("TabCaptureError"); | |
| 639 break; | |
| 640 case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE: | |
| 641 request_info->requestFailedUASpecific("ScreenCaptureError"); | |
| 642 break; | |
| 643 case MEDIA_DEVICE_CAPTURE_FAILURE: | |
| 644 request_info->requestFailedUASpecific("DeviceCaptureError"); | |
| 645 break; | |
| 646 default: | |
| 647 NOTREACHED(); | |
| 648 request_info->requestFailed(); | |
| 649 break; | |
| 650 } | |
| 651 } | |
| 652 | |
| 653 void MediaStreamImpl::GetUserMediaRequestTrackStartedFailed( | |
| 654 blink::WebUserMediaRequest* request_info, | |
| 655 MediaStreamRequestResult result, | |
| 656 const blink::WebString& result_name) { | |
| 657 switch (result) { | |
| 658 case MEDIA_DEVICE_CONSTRAINT_NOT_SATISFIED: | |
| 659 request_info->requestFailedConstraint(result_name); | |
| 660 break; | |
| 661 case MEDIA_DEVICE_TRACK_START_FAILURE: | |
| 662 request_info->requestFailedUASpecific("TrackStartError"); | |
| 663 break; | |
| 664 default: | |
| 665 NOTREACHED(); | |
| 666 request_info->requestFailed(); | |
| 667 break; | |
| 668 } | |
| 669 } | |
| 670 | |
| 671 void MediaStreamImpl::EnumerateDevicesSucceded( | |
| 672 blink::WebMediaDevicesRequest* request, | |
| 673 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { | |
| 674 request->requestSucceeded(devices); | |
| 675 } | |
| 676 | |
| 677 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource( | |
| 678 const StreamDeviceInfo& device) const { | |
| 679 for (LocalStreamSources::const_iterator it = local_sources_.begin(); | |
| 680 it != local_sources_.end(); ++it) { | |
| 681 MediaStreamSource* const source = | |
| 682 static_cast<MediaStreamSource*>(it->extraData()); | |
| 683 const StreamDeviceInfo& active_device = source->device_info(); | |
| 684 if (active_device.device.id == device.device.id && | |
| 685 active_device.device.type == device.device.type && | |
| 686 active_device.session_id == device.session_id) { | |
| 687 return &(*it); | |
| 688 } | |
| 689 } | |
| 690 return NULL; | |
| 691 } | |
| 692 | |
| 693 MediaStreamImpl::UserMediaRequestInfo* | |
| 694 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) { | |
| 695 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 696 for (; it != user_media_requests_.end(); ++it) { | |
| 697 if ((*it)->request_id == request_id) | |
| 698 return (*it); | |
| 699 } | |
| 700 return NULL; | |
| 701 } | |
| 702 | |
| 703 MediaStreamImpl::UserMediaRequestInfo* | |
| 704 MediaStreamImpl::FindUserMediaRequestInfo( | |
| 705 const blink::WebUserMediaRequest& request) { | |
| 706 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 707 for (; it != user_media_requests_.end(); ++it) { | |
| 708 if ((*it)->request == request) | |
| 709 return (*it); | |
| 710 } | |
| 711 return NULL; | |
| 712 } | |
| 713 | |
| 714 void MediaStreamImpl::DeleteUserMediaRequestInfo( | |
| 715 UserMediaRequestInfo* request) { | |
| 716 UserMediaRequests::iterator it = user_media_requests_.begin(); | |
| 717 for (; it != user_media_requests_.end(); ++it) { | |
| 718 if ((*it) == request) { | |
| 719 user_media_requests_.erase(it); | |
| 720 return; | |
| 721 } | |
| 722 } | |
| 723 NOTREACHED(); | |
| 724 } | |
| 725 | |
| 726 void MediaStreamImpl::DeleteAllUserMediaRequests() { | |
| 727 UserMediaRequests::iterator request_it = user_media_requests_.begin(); | |
| 728 while (request_it != user_media_requests_.end()) { | |
| 729 DVLOG(1) << "MediaStreamImpl@" << this << "::DeleteAllUserMediaRequests: " | |
| 730 << "Cancel user media request " << (*request_it)->request_id; | |
| 731 // If the request is not generated, it means that a request | |
| 732 // has been sent to the MediaStreamDispatcher to generate a stream | |
| 733 // but MediaStreamDispatcher has not yet responded and we need to cancel | |
| 734 // the request. | |
| 735 if (!(*request_it)->generated) { | |
| 736 DCHECK(!(*request_it)->HasPendingSources()); | |
| 737 media_stream_dispatcher_->CancelGenerateStream( | |
| 738 (*request_it)->request_id, weak_factory_.GetWeakPtr()); | |
| 739 LogUserMediaRequestWithNoResult(MEDIA_STREAM_REQUEST_NOT_GENERATED); | |
| 740 } else { | |
| 741 DCHECK((*request_it)->HasPendingSources()); | |
| 742 LogUserMediaRequestWithNoResult( | |
| 743 MEDIA_STREAM_REQUEST_PENDING_MEDIA_TRACKS); | |
| 744 } | |
| 745 request_it = user_media_requests_.erase(request_it); | |
| 746 } | |
| 747 } | |
| 748 | |
| 749 MediaStreamImpl::MediaDevicesRequestInfo* | |
| 750 MediaStreamImpl::FindMediaDevicesRequestInfo( | |
| 751 int request_id) { | |
| 752 MediaDevicesRequests::iterator it = media_devices_requests_.begin(); | |
| 753 for (; it != media_devices_requests_.end(); ++it) { | |
| 754 if ((*it)->audio_input_request_id == request_id || | |
| 755 (*it)->video_input_request_id == request_id || | |
| 756 (*it)->audio_output_request_id == request_id) { | |
| 757 return (*it); | |
| 758 } | |
| 759 } | |
| 760 return NULL; | |
| 761 } | |
| 762 | |
| 763 MediaStreamImpl::MediaDevicesRequestInfo* | |
| 764 MediaStreamImpl::FindMediaDevicesRequestInfo( | |
| 765 const blink::WebMediaDevicesRequest& request) { | |
| 766 MediaDevicesRequests::iterator it = media_devices_requests_.begin(); | |
| 767 for (; it != media_devices_requests_.end(); ++it) { | |
| 768 if ((*it)->request == request) | |
| 769 return (*it); | |
| 770 } | |
| 771 return NULL; | |
| 772 } | |
| 773 | |
| 774 void MediaStreamImpl::CancelAndDeleteMediaDevicesRequest( | |
| 775 MediaDevicesRequestInfo* request) { | |
| 776 MediaDevicesRequests::iterator it = media_devices_requests_.begin(); | |
| 777 for (; it != media_devices_requests_.end(); ++it) { | |
| 778 if ((*it) == request) { | |
| 779 // Cancel device enumeration. | |
| 780 media_stream_dispatcher_->StopEnumerateDevices( | |
| 781 request->audio_input_request_id, weak_factory_.GetWeakPtr()); | |
| 782 media_stream_dispatcher_->StopEnumerateDevices( | |
| 783 request->video_input_request_id, weak_factory_.GetWeakPtr()); | |
| 784 media_stream_dispatcher_->StopEnumerateDevices( | |
| 785 request->audio_output_request_id, weak_factory_.GetWeakPtr()); | |
| 786 | |
| 787 media_devices_requests_.erase(it); | |
| 788 return; | |
| 789 } | |
| 790 } | |
| 791 NOTREACHED(); | |
| 792 } | |
| 793 | |
| 794 void MediaStreamImpl::FrameWillClose() { | |
| 795 // Cancel all outstanding UserMediaRequests. | |
| 796 DeleteAllUserMediaRequests(); | |
| 797 | |
| 798 // Loop through all current local sources and stop the sources. | |
| 799 LocalStreamSources::iterator sources_it = local_sources_.begin(); | |
| 800 while (sources_it != local_sources_.end()) { | |
| 801 StopLocalSource(*sources_it, true); | |
| 802 sources_it = local_sources_.erase(sources_it); | |
| 803 } | |
| 804 } | |
| 805 | |
| 806 void MediaStreamImpl::OnLocalSourceStopped( | |
| 807 const blink::WebMediaStreamSource& source) { | |
| 808 DCHECK(CalledOnValidThread()); | |
| 809 DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped"; | |
| 810 | |
| 811 bool device_found = false; | |
| 812 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | |
| 813 device_it != local_sources_.end(); ++device_it) { | |
| 814 if (device_it->id() == source.id()) { | |
| 815 device_found = true; | |
| 816 local_sources_.erase(device_it); | |
| 817 break; | |
| 818 } | |
| 819 } | |
| 820 CHECK(device_found); | |
| 821 | |
| 822 MediaStreamSource* source_impl = | |
| 823 static_cast<MediaStreamSource*>(source.extraData()); | |
| 824 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); | |
| 825 } | |
| 826 | |
| 827 void MediaStreamImpl::StopLocalSource( | |
| 828 const blink::WebMediaStreamSource& source, | |
| 829 bool notify_dispatcher) { | |
| 830 MediaStreamSource* source_impl = | |
| 831 static_cast<MediaStreamSource*>(source.extraData()); | |
| 832 DVLOG(1) << "MediaStreamImpl::StopLocalSource(" | |
| 833 << "{device_id = " << source_impl->device_info().device.id << "})"; | |
| 834 | |
| 835 if (notify_dispatcher) | |
| 836 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info()); | |
| 837 | |
| 838 source_impl->ResetSourceStoppedCallback(); | |
| 839 source_impl->StopSource(); | |
| 840 } | |
| 841 | |
| 842 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo( | |
| 843 int request_id, | |
| 844 const blink::WebUserMediaRequest& request, | |
| 845 bool enable_automatic_output_device_selection) | |
| 846 : request_id(request_id), | |
| 847 generated(false), | |
| 848 enable_automatic_output_device_selection( | |
| 849 enable_automatic_output_device_selection), | |
| 850 request(request), | |
| 851 request_result_(MEDIA_DEVICE_OK), | |
| 852 request_result_name_("") { | |
| 853 } | |
| 854 | |
| 855 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { | |
| 856 DVLOG(1) << "~UserMediaRequestInfo"; | |
| 857 } | |
| 858 | |
| 859 void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack( | |
| 860 const blink::WebMediaStreamTrack& track, | |
| 861 const blink::WebMediaConstraints& constraints) { | |
| 862 DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio); | |
| 863 MediaStreamAudioSource* native_source = | |
| 864 static_cast <MediaStreamAudioSource*>(track.source().extraData()); | |
| 865 DCHECK(native_source); | |
| 866 | |
| 867 sources_.push_back(track.source()); | |
| 868 sources_waiting_for_callback_.push_back(native_source); | |
| 869 native_source->AddTrack( | |
| 870 track, constraints, base::Bind( | |
| 871 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted, | |
| 872 AsWeakPtr())); | |
| 873 } | |
| 874 | |
| 875 blink::WebMediaStreamTrack | |
| 876 MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( | |
| 877 const blink::WebMediaStreamSource& source, | |
| 878 const blink::WebMediaConstraints& constraints) { | |
| 879 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo); | |
| 880 MediaStreamVideoSource* native_source = | |
| 881 MediaStreamVideoSource::GetVideoSource(source); | |
| 882 DCHECK(native_source); | |
| 883 sources_.push_back(source); | |
| 884 sources_waiting_for_callback_.push_back(native_source); | |
| 885 return MediaStreamVideoTrack::CreateVideoTrack( | |
| 886 native_source, constraints, base::Bind( | |
| 887 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted, | |
| 888 AsWeakPtr()), | |
| 889 true); | |
| 890 } | |
| 891 | |
| 892 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted( | |
| 893 const ResourcesReady& callback) { | |
| 894 DCHECK(ready_callback_.is_null()); | |
| 895 ready_callback_ = callback; | |
| 896 CheckAllTracksStarted(); | |
| 897 } | |
| 898 | |
| 899 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted( | |
| 900 MediaStreamSource* source, | |
| 901 MediaStreamRequestResult result, | |
| 902 const blink::WebString& result_name) { | |
| 903 DVLOG(1) << "OnTrackStarted result " << result; | |
| 904 std::vector<MediaStreamSource*>::iterator it = | |
| 905 std::find(sources_waiting_for_callback_.begin(), | |
| 906 sources_waiting_for_callback_.end(), | |
| 907 source); | |
| 908 DCHECK(it != sources_waiting_for_callback_.end()); | |
| 909 sources_waiting_for_callback_.erase(it); | |
| 910 // All tracks must be started successfully. Otherwise the request is a | |
| 911 // failure. | |
| 912 if (result != MEDIA_DEVICE_OK) { | |
| 913 request_result_ = result; | |
| 914 request_result_name_ = result_name; | |
| 915 } | |
| 916 | |
| 917 CheckAllTracksStarted(); | |
| 918 } | |
| 919 | |
| 920 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() { | |
| 921 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { | |
| 922 ready_callback_.Run(this, request_result_, request_result_name_); | |
| 923 } | |
| 924 } | |
| 925 | |
| 926 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed( | |
| 927 const blink::WebMediaStreamSource& source) const { | |
| 928 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it = | |
| 929 sources_.begin(); | |
| 930 source_it != sources_.end(); ++source_it) { | |
| 931 if (source_it->id() == source.id()) | |
| 932 return true; | |
| 933 } | |
| 934 return false; | |
| 935 } | |
| 936 | |
| 937 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource( | |
| 938 const blink::WebMediaStreamSource& source) { | |
| 939 for (std::vector<blink::WebMediaStreamSource>::iterator it = | |
| 940 sources_.begin(); | |
| 941 it != sources_.end(); ++it) { | |
| 942 if (source.id() == it->id()) { | |
| 943 sources_.erase(it); | |
| 944 return; | |
| 945 } | |
| 946 } | |
| 947 } | |
| 948 | |
| 949 bool MediaStreamImpl::UserMediaRequestInfo::HasPendingSources() const { | |
| 950 return !sources_waiting_for_callback_.empty(); | |
| 951 } | |
| 952 | |
| 953 } // namespace content | |
| OLD | NEW |