OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/user_media_client_impl.h" | 5 #include "content/renderer/media/user_media_client_impl.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 #include <utility> | 10 #include <utility> |
(...skipping 327 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
338 const url::Origin& security_origin) { | 338 const url::Origin& security_origin) { |
339 DCHECK(CalledOnValidThread()); | 339 DCHECK(CalledOnValidThread()); |
340 | 340 |
341 WebRtcLogMessage( | 341 WebRtcLogMessage( |
342 base::StringPrintf("MSI::requestUserMedia. request_id=%d" | 342 base::StringPrintf("MSI::requestUserMedia. request_id=%d" |
343 ", audio source id=%s" | 343 ", audio source id=%s" |
344 ", video source id=%s", | 344 ", video source id=%s", |
345 request_id, controls->audio.device_id.c_str(), | 345 request_id, controls->audio.device_id.c_str(), |
346 controls->video.device_id.c_str())); | 346 controls->video.device_id.c_str())); |
347 | 347 |
348 user_media_requests_.push_back( | 348 user_media_requests_.push_back(std::unique_ptr<UserMediaRequestInfo>( |
349 new UserMediaRequestInfo(request_id, user_media_request, | 349 new UserMediaRequestInfo(request_id, user_media_request, |
350 enable_automatic_output_device_selection)); | 350 enable_automatic_output_device_selection))); |
351 | 351 |
352 media_stream_dispatcher_->GenerateStream( | 352 media_stream_dispatcher_->GenerateStream( |
353 request_id, weak_factory_.GetWeakPtr(), *controls, security_origin); | 353 request_id, weak_factory_.GetWeakPtr(), *controls, security_origin); |
354 } | 354 } |
355 | 355 |
356 void UserMediaClientImpl::cancelUserMediaRequest( | 356 void UserMediaClientImpl::cancelUserMediaRequest( |
357 const blink::WebUserMediaRequest& user_media_request) { | 357 const blink::WebUserMediaRequest& user_media_request) { |
358 DCHECK(CalledOnValidThread()); | 358 DCHECK(CalledOnValidThread()); |
359 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request); | 359 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request); |
360 if (request) { | 360 if (request) { |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | 425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); |
426 if (!request_info) { | 426 if (!request_info) { |
427 // This can happen if the request is canceled or the frame reloads while | 427 // This can happen if the request is canceled or the frame reloads while |
428 // MediaStreamDispatcher is processing the request. | 428 // MediaStreamDispatcher is processing the request. |
429 DVLOG(1) << "Request ID not found"; | 429 DVLOG(1) << "Request ID not found"; |
430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); | 430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); |
431 return; | 431 return; |
432 } | 432 } |
433 request_info->generated = true; | 433 request_info->generated = true; |
434 | 434 |
| 435 for (const auto* array : {&audio_array, &video_array}) { |
| 436 for (const auto& info : *array) { |
| 437 WebRtcLogMessage(base::StringPrintf("Request %d for device \"%s\"", |
| 438 request_id, |
| 439 info.device.name.c_str())); |
| 440 } |
| 441 } |
| 442 |
435 DCHECK(!request_info->request.isNull()); | 443 DCHECK(!request_info->request.isNull()); |
436 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( | 444 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( |
437 audio_array.size()); | 445 audio_array.size()); |
438 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), | 446 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), |
439 &audio_track_vector, request_info); | 447 &audio_track_vector, request_info); |
440 | 448 |
441 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( | 449 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( |
442 video_array.size()); | 450 video_array.size()); |
443 CreateVideoTracks(video_array, request_info->request.videoConstraints(), | 451 CreateVideoTracks(video_array, request_info->request.videoConstraints(), |
444 &video_track_vector, request_info); | 452 &video_track_vector, request_info); |
445 | 453 |
446 blink::WebString webkit_id = blink::WebString::fromUTF8(label); | 454 blink::WebString webkit_id = blink::WebString::fromUTF8(label); |
447 blink::WebMediaStream* web_stream = &(request_info->web_stream); | 455 blink::WebMediaStream* web_stream = &(request_info->web_stream); |
448 | 456 |
449 web_stream->initialize(webkit_id, audio_track_vector, | 457 web_stream->initialize(webkit_id, audio_track_vector, video_track_vector); |
450 video_track_vector); | |
451 web_stream->setExtraData(new MediaStream()); | 458 web_stream->setExtraData(new MediaStream()); |
452 | 459 |
453 // Wait for the tracks to be started successfully or to fail. | 460 // Wait for the tracks to be started successfully or to fail. |
454 request_info->CallbackOnTracksStarted( | 461 request_info->CallbackOnTracksStarted( |
455 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, | 462 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, |
456 weak_factory_.GetWeakPtr())); | 463 weak_factory_.GetWeakPtr())); |
457 } | 464 } |
458 | 465 |
459 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( | 466 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( |
460 const StreamDeviceInfoArray& audio_array, | 467 const StreamDeviceInfoArray& audio_array, |
461 const StreamDeviceInfoArray& video_array) { | 468 const StreamDeviceInfoArray& video_array) { |
462 // Only stop the device if the device is not used in another MediaStream. | 469 // Only stop the device if the device is not used in another MediaStream. |
463 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); | 470 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); |
464 device_it != audio_array.end(); ++device_it) { | 471 device_it != audio_array.end(); ++device_it) { |
465 if (!FindLocalSource(*device_it)) | 472 if (!FindLocalSource(*device_it)) |
466 media_stream_dispatcher_->StopStreamDevice(*device_it); | 473 media_stream_dispatcher_->StopStreamDevice(*device_it); |
467 } | 474 } |
468 | 475 |
469 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); | 476 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); |
470 device_it != video_array.end(); ++device_it) { | 477 device_it != video_array.end(); ++device_it) { |
471 if (!FindLocalSource(*device_it)) | 478 if (!FindLocalSource(*device_it)) |
472 media_stream_dispatcher_->StopStreamDevice(*device_it); | 479 media_stream_dispatcher_->StopStreamDevice(*device_it); |
473 } | 480 } |
474 } | 481 } |
475 | 482 |
| 483 // static |
| 484 void UserMediaClientImpl::OnAudioSourceStartedOnAudioThread( |
| 485 scoped_refptr<base::SingleThreadTaskRunner> task_runner, |
| 486 base::WeakPtr<UserMediaClientImpl> weak_ptr, |
| 487 MediaStreamSource* source, |
| 488 MediaStreamRequestResult result, |
| 489 const blink::WebString& result_name) { |
| 490 task_runner->PostTask(FROM_HERE, |
| 491 base::Bind(&UserMediaClientImpl::OnAudioSourceStarted, |
| 492 weak_ptr, source, result, result_name)); |
| 493 } |
| 494 |
| 495 void UserMediaClientImpl::OnAudioSourceStarted( |
| 496 MediaStreamSource* source, |
| 497 MediaStreamRequestResult result, |
| 498 const blink::WebString& result_name) { |
| 499 DCHECK(CalledOnValidThread()); |
| 500 |
| 501 for (auto it = pending_local_sources_.begin(); |
| 502 it != pending_local_sources_.end(); ++it) { |
| 503 MediaStreamSource* const source_extra_data = |
| 504 static_cast<MediaStreamSource*>((*it).getExtraData()); |
| 505 if (source_extra_data != source) |
| 506 continue; |
| 507 if (result == MEDIA_DEVICE_OK) |
| 508 local_sources_.push_back((*it)); |
| 509 pending_local_sources_.erase(it); |
| 510 |
| 511 NotifyAllRequestsOfAudioSourceStarted(source, result, result_name); |
| 512 return; |
| 513 } |
| 514 NOTREACHED(); |
| 515 } |
| 516 |
| 517 void UserMediaClientImpl::NotifyAllRequestsOfAudioSourceStarted( |
| 518 MediaStreamSource* source, |
| 519 MediaStreamRequestResult result, |
| 520 const blink::WebString& result_name) { |
| 521 // Since a request object that receives the OnAudioSourceStarted event, |
| 522 // might get deleted and removed from the |user_media_requests_| array while |
| 523 // we iterate through it, we need to jump through this hoop here, copy |
| 524 // pointers to the objects we're notifying and avoid using |
| 525 // user_media_requests_ as we iterate+notify. |
| 526 std::vector<UserMediaRequestInfo*> requests; |
| 527 requests.reserve(user_media_requests_.size()); |
| 528 for (const auto& request : user_media_requests_) |
| 529 requests.push_back(request.get()); |
| 530 for (auto* request : requests) |
| 531 request->OnAudioSourceStarted(source, result, result_name); |
| 532 } |
| 533 |
476 void UserMediaClientImpl::FinalizeEnumerateDevices( | 534 void UserMediaClientImpl::FinalizeEnumerateDevices( |
477 blink::WebMediaDevicesRequest request, | 535 blink::WebMediaDevicesRequest request, |
478 const EnumerationResult& result) { | 536 const EnumerationResult& result) { |
479 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); | 537 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); |
480 | 538 |
481 blink::WebVector<blink::WebMediaDeviceInfo> devices( | 539 blink::WebVector<blink::WebMediaDeviceInfo> devices( |
482 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + | 540 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + |
483 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + | 541 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + |
484 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); | 542 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); |
485 size_t index = 0; | 543 size_t index = 0; |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
533 // as the underlying media device is unplugged from the system. | 591 // as the underlying media device is unplugged from the system. |
534 return; | 592 return; |
535 } | 593 } |
536 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource | 594 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource |
537 // object is valid during the cleanup. | 595 // object is valid during the cleanup. |
538 blink::WebMediaStreamSource source(*source_ptr); | 596 blink::WebMediaStreamSource source(*source_ptr); |
539 StopLocalSource(source, false); | 597 StopLocalSource(source, false); |
540 RemoveLocalSource(source); | 598 RemoveLocalSource(source); |
541 } | 599 } |
542 | 600 |
543 void UserMediaClientImpl::InitializeSourceObject( | 601 blink::WebMediaStreamSource UserMediaClientImpl::InitializeVideoSourceObject( |
544 const StreamDeviceInfo& device, | 602 const StreamDeviceInfo& device, |
545 blink::WebMediaStreamSource::Type type, | 603 const blink::WebMediaConstraints& constraints) { |
| 604 DCHECK(CalledOnValidThread()); |
| 605 |
| 606 blink::WebMediaStreamSource source = FindOrInitializeSourceObject(device); |
| 607 if (!source.getExtraData()) { |
| 608 source.setExtraData(CreateVideoSource( |
| 609 device, base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, |
| 610 weak_factory_.GetWeakPtr()))); |
| 611 local_sources_.push_back(source); |
| 612 } |
| 613 return source; |
| 614 } |
| 615 |
| 616 blink::WebMediaStreamSource UserMediaClientImpl::InitializeAudioSourceObject( |
| 617 const StreamDeviceInfo& device, |
546 const blink::WebMediaConstraints& constraints, | 618 const blink::WebMediaConstraints& constraints, |
547 blink::WebMediaStreamSource* webkit_source) { | 619 bool* is_pending) { |
548 const blink::WebMediaStreamSource* existing_source = | 620 DCHECK(CalledOnValidThread()); |
549 FindLocalSource(device); | 621 |
550 if (existing_source) { | 622 *is_pending = true; |
551 *webkit_source = *existing_source; | 623 |
552 DVLOG(1) << "Source already exist. Reusing source with id " | 624 // See if the source is already being initialized. |
553 << webkit_source->id().utf8(); | 625 auto* pending = FindPendingLocalSource(device); |
554 return; | 626 if (pending) |
| 627 return *pending; |
| 628 |
| 629 blink::WebMediaStreamSource source = FindOrInitializeSourceObject(device); |
| 630 if (source.getExtraData()) { |
| 631 // The only return point for non-pending sources. |
| 632 *is_pending = false; |
| 633 return source; |
555 } | 634 } |
556 | 635 |
557 webkit_source->initialize(blink::WebString::fromUTF8(device.device.id), type, | 636 // While sources are being initialized, keep them in a separate array. |
558 blink::WebString::fromUTF8(device.device.name), | 637 // Once they've finished initialized, they'll be moved over to local_sources_. |
559 false /* remote */); | 638 // See OnAudioSourceStarted for more details. |
| 639 pending_local_sources_.push_back(source); |
560 | 640 |
561 DVLOG(1) << "Initialize source object :" | 641 MediaStreamSource::ConstraintsCallback source_ready = base::Bind( |
562 << "id = " << webkit_source->id().utf8() | 642 &UserMediaClientImpl::OnAudioSourceStartedOnAudioThread, |
563 << ", name = " << webkit_source->name().utf8(); | 643 base::ThreadTaskRunnerHandle::Get(), weak_factory_.GetWeakPtr()); |
564 | 644 |
565 if (type == blink::WebMediaStreamSource::TypeVideo) { | 645 MediaStreamAudioSource* const audio_source = |
566 webkit_source->setExtraData( | 646 CreateAudioSource(device, constraints, source_ready); |
567 CreateVideoSource( | 647 audio_source->SetStopCallback(base::Bind( |
568 device, | 648 &UserMediaClientImpl::OnLocalSourceStopped, weak_factory_.GetWeakPtr())); |
569 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | 649 source.setExtraData(audio_source); // Takes ownership. |
570 weak_factory_.GetWeakPtr()))); | 650 return source; |
571 } else { | |
572 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); | |
573 MediaStreamAudioSource* const audio_source = | |
574 CreateAudioSource(device, constraints); | |
575 audio_source->SetStopCallback( | |
576 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | |
577 weak_factory_.GetWeakPtr())); | |
578 webkit_source->setExtraData(audio_source); // Takes ownership. | |
579 } | |
580 local_sources_.push_back(*webkit_source); | |
581 } | 651 } |
582 | 652 |
583 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( | 653 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( |
584 const StreamDeviceInfo& device, | 654 const StreamDeviceInfo& device, |
585 const blink::WebMediaConstraints& constraints) { | 655 const blink::WebMediaConstraints& constraints, |
| 656 const MediaStreamSource::ConstraintsCallback& source_ready) { |
| 657 DCHECK(CalledOnValidThread()); |
586 // If the audio device is a loopback device (for screen capture), or if the | 658 // If the audio device is a loopback device (for screen capture), or if the |
587 // constraints/effects parameters indicate no audio processing is needed, | 659 // constraints/effects parameters indicate no audio processing is needed, |
588 // create an efficient, direct-path MediaStreamAudioSource instance. | 660 // create an efficient, direct-path MediaStreamAudioSource instance. |
589 if (IsScreenCaptureMediaType(device.device.type) || | 661 if (IsScreenCaptureMediaType(device.device.type) || |
590 !MediaStreamAudioProcessor::WouldModifyAudio( | 662 !MediaStreamAudioProcessor::WouldModifyAudio( |
591 constraints, device.device.input.effects)) { | 663 constraints, device.device.input.effects)) { |
592 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), | 664 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), |
593 device); | 665 device, source_ready); |
594 } | 666 } |
595 | 667 |
596 // The audio device is not associated with screen capture and also requires | 668 // The audio device is not associated with screen capture and also requires |
597 // processing. | 669 // processing. |
598 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( | 670 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( |
599 RenderFrameObserver::routing_id(), device, dependency_factory_); | 671 RenderFrameObserver::routing_id(), device, constraints, source_ready, |
600 source->SetSourceConstraints(constraints); | 672 dependency_factory_); |
601 return source; | 673 return source; |
602 } | 674 } |
603 | 675 |
604 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( | 676 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( |
605 const StreamDeviceInfo& device, | 677 const StreamDeviceInfo& device, |
606 const MediaStreamSource::SourceStoppedCallback& stop_callback) { | 678 const MediaStreamSource::SourceStoppedCallback& stop_callback) { |
| 679 DCHECK(CalledOnValidThread()); |
607 content::MediaStreamVideoCapturerSource* ret = | 680 content::MediaStreamVideoCapturerSource* ret = |
608 new content::MediaStreamVideoCapturerSource(stop_callback, device, | 681 new content::MediaStreamVideoCapturerSource(stop_callback, device, |
609 render_frame()); | 682 render_frame()); |
610 return ret; | 683 return ret; |
611 } | 684 } |
612 | 685 |
613 void UserMediaClientImpl::CreateVideoTracks( | 686 void UserMediaClientImpl::CreateVideoTracks( |
614 const StreamDeviceInfoArray& devices, | 687 const StreamDeviceInfoArray& devices, |
615 const blink::WebMediaConstraints& constraints, | 688 const blink::WebMediaConstraints& constraints, |
616 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 689 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
617 UserMediaRequestInfo* request) { | 690 UserMediaRequestInfo* request) { |
| 691 DCHECK(CalledOnValidThread()); |
618 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 692 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
619 | 693 |
620 for (size_t i = 0; i < devices.size(); ++i) { | 694 for (size_t i = 0; i < devices.size(); ++i) { |
621 blink::WebMediaStreamSource webkit_source; | 695 blink::WebMediaStreamSource source = |
622 InitializeSourceObject(devices[i], | 696 InitializeVideoSourceObject(devices[i], constraints); |
623 blink::WebMediaStreamSource::TypeVideo, | |
624 constraints, | |
625 &webkit_source); | |
626 (*webkit_tracks)[i] = | 697 (*webkit_tracks)[i] = |
627 request->CreateAndStartVideoTrack(webkit_source, constraints); | 698 request->CreateAndStartVideoTrack(source, constraints); |
628 } | 699 } |
629 } | 700 } |
630 | 701 |
631 void UserMediaClientImpl::CreateAudioTracks( | 702 void UserMediaClientImpl::CreateAudioTracks( |
632 const StreamDeviceInfoArray& devices, | 703 const StreamDeviceInfoArray& devices, |
633 const blink::WebMediaConstraints& constraints, | 704 const blink::WebMediaConstraints& constraints, |
634 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 705 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
635 UserMediaRequestInfo* request) { | 706 UserMediaRequestInfo* request) { |
| 707 DCHECK(CalledOnValidThread()); |
636 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 708 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
637 | 709 |
638 // Log the device names for this request. | |
639 for (StreamDeviceInfoArray::const_iterator it = devices.begin(); | |
640 it != devices.end(); ++it) { | |
641 WebRtcLogMessage(base::StringPrintf( | |
642 "Generated media stream for request id %d contains audio device name" | |
643 " \"%s\"", | |
644 request->request_id, | |
645 it->device.name.c_str())); | |
646 } | |
647 | |
648 StreamDeviceInfoArray overridden_audio_array = devices; | 710 StreamDeviceInfoArray overridden_audio_array = devices; |
649 if (!request->enable_automatic_output_device_selection) { | 711 if (!request->enable_automatic_output_device_selection) { |
650 // If the GetUserMedia request did not explicitly set the constraint | 712 // If the GetUserMedia request did not explicitly set the constraint |
651 // kMediaStreamRenderToAssociatedSink, the output device parameters must | 713 // kMediaStreamRenderToAssociatedSink, the output device parameters must |
652 // be removed. | 714 // be removed. |
653 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin(); | 715 for (auto& device_info : overridden_audio_array) { |
654 it != overridden_audio_array.end(); ++it) { | 716 device_info.device.matched_output_device_id = ""; |
655 it->device.matched_output_device_id = ""; | 717 device_info.device.matched_output = |
656 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters(); | 718 MediaStreamDevice::AudioDeviceParameters(); |
657 } | 719 } |
658 } | 720 } |
659 | 721 |
660 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { | 722 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { |
661 blink::WebMediaStreamSource webkit_source; | 723 bool is_pending = false; |
662 InitializeSourceObject(overridden_audio_array[i], | 724 blink::WebMediaStreamSource source = InitializeAudioSourceObject( |
663 blink::WebMediaStreamSource::TypeAudio, | 725 overridden_audio_array[i], constraints, &is_pending); |
664 constraints, | 726 (*webkit_tracks)[i].initialize(source); |
665 &webkit_source); | 727 request->StartAudioTrack((*webkit_tracks)[i], is_pending); |
666 (*webkit_tracks)[i].initialize(webkit_source); | |
667 request->StartAudioTrack((*webkit_tracks)[i]); | |
668 } | 728 } |
669 } | 729 } |
670 | 730 |
671 void UserMediaClientImpl::OnCreateNativeTracksCompleted( | 731 void UserMediaClientImpl::OnCreateNativeTracksCompleted( |
672 UserMediaRequestInfo* request, | 732 UserMediaRequestInfo* request, |
673 MediaStreamRequestResult result, | 733 MediaStreamRequestResult result, |
674 const blink::WebString& result_name) { | 734 const blink::WebString& result_name) { |
| 735 DCHECK(CalledOnValidThread()); |
675 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" | 736 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" |
676 << "{request_id = " << request->request_id << "} " | 737 << "{request_id = " << request->request_id << "} " |
677 << "{result = " << result << "})"; | 738 << "{result = " << result << "})"; |
678 | 739 |
679 if (result == content::MEDIA_DEVICE_OK) { | 740 if (result == content::MEDIA_DEVICE_OK) { |
680 GetUserMediaRequestSucceeded(request->web_stream, request->request); | 741 GetUserMediaRequestSucceeded(request->web_stream, request->request); |
681 } else { | 742 } else { |
682 GetUserMediaRequestFailed(request->request, result, result_name); | 743 GetUserMediaRequestFailed(request->request, result, result_name); |
683 | 744 |
684 blink::WebVector<blink::WebMediaStreamTrack> tracks; | 745 blink::WebVector<blink::WebMediaStreamTrack> tracks; |
(...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
811 request_info.requestFailed(); | 872 request_info.requestFailed(); |
812 } | 873 } |
813 | 874 |
814 void UserMediaClientImpl::EnumerateDevicesSucceded( | 875 void UserMediaClientImpl::EnumerateDevicesSucceded( |
815 blink::WebMediaDevicesRequest* request, | 876 blink::WebMediaDevicesRequest* request, |
816 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { | 877 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { |
817 request->requestSucceeded(devices); | 878 request->requestSucceeded(devices); |
818 } | 879 } |
819 | 880 |
820 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( | 881 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( |
| 882 const LocalStreamSources& sources, |
821 const StreamDeviceInfo& device) const { | 883 const StreamDeviceInfo& device) const { |
822 for (LocalStreamSources::const_iterator it = local_sources_.begin(); | 884 for (const auto& local_source : sources) { |
823 it != local_sources_.end(); ++it) { | |
824 MediaStreamSource* const source = | 885 MediaStreamSource* const source = |
825 static_cast<MediaStreamSource*>(it->getExtraData()); | 886 static_cast<MediaStreamSource*>(local_source.getExtraData()); |
826 const StreamDeviceInfo& active_device = source->device_info(); | 887 const StreamDeviceInfo& active_device = source->device_info(); |
827 if (IsSameDevice(active_device, device)) { | 888 if (IsSameDevice(active_device, device)) |
828 return &(*it); | 889 return &local_source; |
829 } | |
830 } | 890 } |
831 return NULL; | 891 return nullptr; |
| 892 } |
| 893 |
| 894 blink::WebMediaStreamSource UserMediaClientImpl::FindOrInitializeSourceObject( |
| 895 const StreamDeviceInfo& device) { |
| 896 const blink::WebMediaStreamSource* existing_source = FindLocalSource(device); |
| 897 if (existing_source) { |
| 898 DVLOG(1) << "Source already exists. Reusing source with id " |
| 899 << existing_source->id().utf8(); |
| 900 return *existing_source; |
| 901 } |
| 902 |
| 903 blink::WebMediaStreamSource::Type type = |
| 904 IsAudioInputMediaType(device.device.type) |
| 905 ? blink::WebMediaStreamSource::TypeAudio |
| 906 : blink::WebMediaStreamSource::TypeVideo; |
| 907 |
| 908 blink::WebMediaStreamSource source; |
| 909 source.initialize(blink::WebString::fromUTF8(device.device.id), type, |
| 910 blink::WebString::fromUTF8(device.device.name), |
| 911 false /* remote */); |
| 912 |
| 913 DVLOG(1) << "Initialize source object :" |
| 914 << "id = " << source.id().utf8() |
| 915 << ", name = " << source.name().utf8(); |
| 916 return source; |
832 } | 917 } |
833 | 918 |
834 bool UserMediaClientImpl::RemoveLocalSource( | 919 bool UserMediaClientImpl::RemoveLocalSource( |
835 const blink::WebMediaStreamSource& source) { | 920 const blink::WebMediaStreamSource& source) { |
836 bool device_found = false; | 921 DCHECK(CalledOnValidThread()); |
| 922 |
837 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | 923 for (LocalStreamSources::iterator device_it = local_sources_.begin(); |
838 device_it != local_sources_.end(); ++device_it) { | 924 device_it != local_sources_.end(); ++device_it) { |
839 if (IsSameSource(*device_it, source)) { | 925 if (IsSameSource(*device_it, source)) { |
840 device_found = true; | |
841 local_sources_.erase(device_it); | 926 local_sources_.erase(device_it); |
842 break; | 927 return true; |
843 } | 928 } |
844 } | 929 } |
845 return device_found; | 930 |
| 931 // Check if the source was pending. |
| 932 for (LocalStreamSources::iterator device_it = pending_local_sources_.begin(); |
| 933 device_it != pending_local_sources_.end(); ++device_it) { |
| 934 if (IsSameSource(*device_it, source)) { |
| 935 MediaStreamSource* const source_extra_data = |
| 936 static_cast<MediaStreamSource*>(source.getExtraData()); |
| 937 NotifyAllRequestsOfAudioSourceStarted( |
| 938 source_extra_data, MEDIA_DEVICE_TRACK_START_FAILURE, |
| 939 "Failed to access audio capture device"); |
| 940 pending_local_sources_.erase(device_it); |
| 941 return true; |
| 942 } |
| 943 } |
| 944 |
| 945 return false; |
846 } | 946 } |
847 | 947 |
848 UserMediaClientImpl::UserMediaRequestInfo* | 948 UserMediaClientImpl::UserMediaRequestInfo* |
849 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { | 949 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { |
850 UserMediaRequests::iterator it = user_media_requests_.begin(); | 950 DCHECK(CalledOnValidThread()); |
851 for (; it != user_media_requests_.end(); ++it) { | 951 for (auto& r : user_media_requests_) { |
852 if ((*it)->request_id == request_id) | 952 if (r->request_id == request_id) |
853 return (*it); | 953 return r.get(); |
854 } | 954 } |
855 return NULL; | 955 return nullptr; |
856 } | 956 } |
857 | 957 |
858 UserMediaClientImpl::UserMediaRequestInfo* | 958 UserMediaClientImpl::UserMediaRequestInfo* |
859 UserMediaClientImpl::FindUserMediaRequestInfo( | 959 UserMediaClientImpl::FindUserMediaRequestInfo( |
860 const blink::WebUserMediaRequest& request) { | 960 const blink::WebUserMediaRequest& request) { |
861 UserMediaRequests::iterator it = user_media_requests_.begin(); | 961 DCHECK(CalledOnValidThread()); |
862 for (; it != user_media_requests_.end(); ++it) { | 962 for (auto& r : user_media_requests_) { |
863 if ((*it)->request == request) | 963 if (r->request == request) |
864 return (*it); | 964 return r.get(); |
865 } | 965 } |
866 return NULL; | 966 return nullptr; |
867 } | 967 } |
868 | 968 |
869 void UserMediaClientImpl::DeleteUserMediaRequestInfo( | 969 void UserMediaClientImpl::DeleteUserMediaRequestInfo( |
870 UserMediaRequestInfo* request) { | 970 UserMediaRequestInfo* request) { |
871 UserMediaRequests::iterator it = user_media_requests_.begin(); | 971 DCHECK(CalledOnValidThread()); |
872 for (; it != user_media_requests_.end(); ++it) { | 972 auto new_end = |
873 if ((*it) == request) { | 973 std::remove_if(user_media_requests_.begin(), user_media_requests_.end(), |
874 user_media_requests_.erase(it); | 974 [&](std::unique_ptr<UserMediaRequestInfo>& r) { |
875 return; | 975 return r.get() == request; |
876 } | 976 }); |
877 } | 977 DCHECK(new_end != user_media_requests_.end()); |
878 NOTREACHED(); | 978 user_media_requests_.erase(new_end, user_media_requests_.end()); |
879 } | 979 } |
880 | 980 |
881 void UserMediaClientImpl::DeleteAllUserMediaRequests() { | 981 void UserMediaClientImpl::DeleteAllUserMediaRequests() { |
882 UserMediaRequests::iterator request_it = user_media_requests_.begin(); | 982 UserMediaRequests::iterator request_it = user_media_requests_.begin(); |
883 while (request_it != user_media_requests_.end()) { | 983 while (request_it != user_media_requests_.end()) { |
884 DVLOG(1) << "UserMediaClientImpl@" << this | 984 DVLOG(1) << "UserMediaClientImpl@" << this |
885 << "::DeleteAllUserMediaRequests: " | 985 << "::DeleteAllUserMediaRequests: " |
886 << "Cancel user media request " << (*request_it)->request_id; | 986 << "Cancel user media request " << (*request_it)->request_id; |
887 // If the request is not generated, it means that a request | 987 // If the request is not generated, it means that a request |
888 // has been sent to the MediaStreamDispatcher to generate a stream | 988 // has been sent to the MediaStreamDispatcher to generate a stream |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
968 request(request), | 1068 request(request), |
969 request_result_(MEDIA_DEVICE_OK), | 1069 request_result_(MEDIA_DEVICE_OK), |
970 request_result_name_("") { | 1070 request_result_name_("") { |
971 } | 1071 } |
972 | 1072 |
973 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { | 1073 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { |
974 DVLOG(1) << "~UserMediaRequestInfo"; | 1074 DVLOG(1) << "~UserMediaRequestInfo"; |
975 } | 1075 } |
976 | 1076 |
977 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( | 1077 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( |
978 const blink::WebMediaStreamTrack& track) { | 1078 const blink::WebMediaStreamTrack& track, |
| 1079 bool is_pending) { |
979 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); | 1080 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); |
980 MediaStreamAudioSource* native_source = | 1081 MediaStreamAudioSource* native_source = |
981 MediaStreamAudioSource::From(track.source()); | 1082 MediaStreamAudioSource::From(track.source()); |
982 DCHECK(native_source); | 1083 // Add the source as pending since OnTrackStarted will expect it to be there. |
| 1084 sources_waiting_for_callback_.push_back(native_source); |
983 | 1085 |
984 sources_.push_back(track.source()); | 1086 sources_.push_back(track.source()); |
985 sources_waiting_for_callback_.push_back(native_source); | 1087 bool connected = native_source->ConnectToTrack(track); |
986 if (native_source->ConnectToTrack(track)) | 1088 if (!is_pending) { |
| 1089 OnTrackStarted( |
| 1090 native_source, |
| 1091 connected ? MEDIA_DEVICE_OK : MEDIA_DEVICE_TRACK_START_FAILURE, ""); |
| 1092 #if defined(OS_ANDROID) |
| 1093 } else if (connected) { |
| 1094 CHECK(native_source->is_local_source()); |
| 1095 // On Android, we won't get the callback indicating the device readyness. |
| 1096 // TODO(tommi): Update the android implementation to support the |
| 1097 // OnAudioSourceStarted notification. http://crbug.com/679302 |
987 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); | 1098 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); |
988 else | 1099 #endif |
989 OnTrackStarted(native_source, MEDIA_DEVICE_TRACK_START_FAILURE, ""); | 1100 } |
990 } | 1101 } |
991 | 1102 |
992 blink::WebMediaStreamTrack | 1103 blink::WebMediaStreamTrack |
993 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( | 1104 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( |
994 const blink::WebMediaStreamSource& source, | 1105 const blink::WebMediaStreamSource& source, |
995 const blink::WebMediaConstraints& constraints) { | 1106 const blink::WebMediaConstraints& constraints) { |
996 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); | 1107 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); |
997 MediaStreamVideoSource* native_source = | 1108 MediaStreamVideoSource* native_source = |
998 MediaStreamVideoSource::GetVideoSource(source); | 1109 MediaStreamVideoSource::GetVideoSource(source); |
999 DCHECK(native_source); | 1110 DCHECK(native_source); |
(...skipping 30 matching lines...) Expand all Loading... |
1030 request_result_ = result; | 1141 request_result_ = result; |
1031 request_result_name_ = result_name; | 1142 request_result_name_ = result_name; |
1032 } | 1143 } |
1033 | 1144 |
1034 CheckAllTracksStarted(); | 1145 CheckAllTracksStarted(); |
1035 } | 1146 } |
1036 | 1147 |
1037 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { | 1148 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { |
1038 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { | 1149 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { |
1039 ready_callback_.Run(this, request_result_, request_result_name_); | 1150 ready_callback_.Run(this, request_result_, request_result_name_); |
| 1151 // NOTE: |this| might now be deleted. |
1040 } | 1152 } |
1041 } | 1153 } |
1042 | 1154 |
1043 bool UserMediaClientImpl::UserMediaRequestInfo::IsSourceUsed( | |
1044 const blink::WebMediaStreamSource& source) const { | |
1045 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it = | |
1046 sources_.begin(); | |
1047 source_it != sources_.end(); ++source_it) { | |
1048 if (source_it->id() == source.id()) | |
1049 return true; | |
1050 } | |
1051 return false; | |
1052 } | |
1053 | |
1054 void UserMediaClientImpl::UserMediaRequestInfo::RemoveSource( | |
1055 const blink::WebMediaStreamSource& source) { | |
1056 for (std::vector<blink::WebMediaStreamSource>::iterator it = | |
1057 sources_.begin(); | |
1058 it != sources_.end(); ++it) { | |
1059 if (source.id() == it->id()) { | |
1060 sources_.erase(it); | |
1061 return; | |
1062 } | |
1063 } | |
1064 } | |
1065 | |
1066 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { | 1155 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { |
1067 return !sources_waiting_for_callback_.empty(); | 1156 return !sources_waiting_for_callback_.empty(); |
1068 } | 1157 } |
1069 | 1158 |
| 1159 void UserMediaClientImpl::UserMediaRequestInfo::OnAudioSourceStarted( |
| 1160 MediaStreamSource* source, |
| 1161 MediaStreamRequestResult result, |
| 1162 const blink::WebString& result_name) { |
| 1163 // Check if we're waiting to be notified of this source. If not, then we'll |
| 1164 // ignore the notification. |
| 1165 auto found = std::find(sources_waiting_for_callback_.begin(), |
| 1166 sources_waiting_for_callback_.end(), source); |
| 1167 if (found != sources_waiting_for_callback_.end()) |
| 1168 OnTrackStarted(source, result, result_name); |
| 1169 } |
| 1170 |
1070 void UserMediaClientImpl::OnDestruct() { | 1171 void UserMediaClientImpl::OnDestruct() { |
1071 delete this; | 1172 delete this; |
1072 } | 1173 } |
1073 | 1174 |
1074 } // namespace content | 1175 } // namespace content |
OLD | NEW |