OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/user_media_client_impl.h" | 5 #include "content/renderer/media/user_media_client_impl.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 #include <utility> | 10 #include <utility> |
(...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | 425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); |
426 if (!request_info) { | 426 if (!request_info) { |
427 // This can happen if the request is canceled or the frame reloads while | 427 // This can happen if the request is canceled or the frame reloads while |
428 // MediaStreamDispatcher is processing the request. | 428 // MediaStreamDispatcher is processing the request. |
429 DVLOG(1) << "Request ID not found"; | 429 DVLOG(1) << "Request ID not found"; |
430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); | 430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); |
431 return; | 431 return; |
432 } | 432 } |
433 request_info->generated = true; | 433 request_info->generated = true; |
434 | 434 |
435 for (const auto* array : {&audio_array, &video_array}) { | |
436 for (const auto& info : *array) { | |
437 WebRtcLogMessage(base::StringPrintf("Request %d for device \"%s\"", | |
438 request_id, | |
439 info.device.name.c_str())); | |
440 } | |
441 } | |
442 | |
443 DCHECK(!request_info->request.isNull()); | 435 DCHECK(!request_info->request.isNull()); |
444 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( | 436 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( |
445 audio_array.size()); | 437 audio_array.size()); |
446 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), | 438 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), |
447 &audio_track_vector, request_info); | 439 &audio_track_vector, request_info); |
448 | 440 |
449 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( | 441 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( |
450 video_array.size()); | 442 video_array.size()); |
451 CreateVideoTracks(video_array, request_info->request.videoConstraints(), | 443 CreateVideoTracks(video_array, request_info->request.videoConstraints(), |
452 &video_track_vector, request_info); | 444 &video_track_vector, request_info); |
453 | 445 |
454 blink::WebString webkit_id = blink::WebString::fromUTF8(label); | 446 blink::WebString webkit_id = blink::WebString::fromUTF8(label); |
455 blink::WebMediaStream* web_stream = &(request_info->web_stream); | 447 blink::WebMediaStream* web_stream = &(request_info->web_stream); |
456 | 448 |
457 web_stream->initialize(webkit_id, audio_track_vector, video_track_vector); | 449 web_stream->initialize(webkit_id, audio_track_vector, |
| 450 video_track_vector); |
458 web_stream->setExtraData(new MediaStream()); | 451 web_stream->setExtraData(new MediaStream()); |
459 | 452 |
460 // Wait for the tracks to be started successfully or to fail. | 453 // Wait for the tracks to be started successfully or to fail. |
461 request_info->CallbackOnTracksStarted( | 454 request_info->CallbackOnTracksStarted( |
462 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, | 455 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, |
463 weak_factory_.GetWeakPtr())); | 456 weak_factory_.GetWeakPtr())); |
464 } | 457 } |
465 | 458 |
466 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( | 459 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( |
467 const StreamDeviceInfoArray& audio_array, | 460 const StreamDeviceInfoArray& audio_array, |
468 const StreamDeviceInfoArray& video_array) { | 461 const StreamDeviceInfoArray& video_array) { |
469 // Only stop the device if the device is not used in another MediaStream. | 462 // Only stop the device if the device is not used in another MediaStream. |
470 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); | 463 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); |
471 device_it != audio_array.end(); ++device_it) { | 464 device_it != audio_array.end(); ++device_it) { |
472 if (!FindLocalSource(*device_it)) | 465 if (!FindLocalSource(*device_it)) |
473 media_stream_dispatcher_->StopStreamDevice(*device_it); | 466 media_stream_dispatcher_->StopStreamDevice(*device_it); |
474 } | 467 } |
475 | 468 |
476 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); | 469 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); |
477 device_it != video_array.end(); ++device_it) { | 470 device_it != video_array.end(); ++device_it) { |
478 if (!FindLocalSource(*device_it)) | 471 if (!FindLocalSource(*device_it)) |
479 media_stream_dispatcher_->StopStreamDevice(*device_it); | 472 media_stream_dispatcher_->StopStreamDevice(*device_it); |
480 } | 473 } |
481 } | 474 } |
482 | 475 |
483 // static | |
484 void UserMediaClientImpl::OnAudioSourceStartedOnAudioThread( | |
485 scoped_refptr<base::SingleThreadTaskRunner> task_runner, | |
486 base::WeakPtr<UserMediaClientImpl> weak_ptr, | |
487 MediaStreamSource* source, | |
488 MediaStreamRequestResult result, | |
489 const blink::WebString& result_name) { | |
490 task_runner->PostTask( | |
491 FROM_HERE, | |
492 base::Bind(&UserMediaClientImpl::OnAudioSourceStartedOnMainThread, | |
493 weak_ptr, source, result, result_name)); | |
494 } | |
495 | |
496 // static | |
497 void UserMediaClientImpl::OnAudioSourceStartedOnMainThread( | |
498 base::WeakPtr<UserMediaClientImpl> weak_ptr, | |
499 MediaStreamSource* source, | |
500 MediaStreamRequestResult result, | |
501 const blink::WebString& result_name) { | |
502 // The purpose of this function is just to check the weak pointer. | |
503 // Some content_browsertests may exit and delete the UserMediaClientImpl | |
504 // instance in an incomplete state with outstanding notifications. | |
505 if (weak_ptr) | |
506 weak_ptr->OnAudioSourceStarted(source, result, result_name); | |
507 } | |
508 | |
509 void UserMediaClientImpl::OnAudioSourceStarted( | |
510 MediaStreamSource* source, | |
511 MediaStreamRequestResult result, | |
512 const blink::WebString& result_name) { | |
513 DCHECK(CalledOnValidThread()); | |
514 | |
515 for (auto it = pending_local_sources_.begin(); | |
516 it != pending_local_sources_.end(); ++it) { | |
517 MediaStreamSource* const source_extra_data = | |
518 static_cast<MediaStreamSource*>((*it).getExtraData()); | |
519 if (source_extra_data == source) { | |
520 if (result == MEDIA_DEVICE_OK) | |
521 local_sources_.push_back((*it)); | |
522 pending_local_sources_.erase(it); | |
523 for (const auto& request : user_media_requests_) | |
524 request->OnAudioSourceStarted(source, result, result_name); | |
525 return; | |
526 } | |
527 } | |
528 NOTREACHED(); | |
529 } | |
530 | |
531 void UserMediaClientImpl::FinalizeEnumerateDevices( | 476 void UserMediaClientImpl::FinalizeEnumerateDevices( |
532 blink::WebMediaDevicesRequest request, | 477 blink::WebMediaDevicesRequest request, |
533 const EnumerationResult& result) { | 478 const EnumerationResult& result) { |
534 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); | 479 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); |
535 | 480 |
536 blink::WebVector<blink::WebMediaDeviceInfo> devices( | 481 blink::WebVector<blink::WebMediaDeviceInfo> devices( |
537 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + | 482 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + |
538 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + | 483 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + |
539 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); | 484 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); |
540 size_t index = 0; | 485 size_t index = 0; |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
588 // as the underlying media device is unplugged from the system. | 533 // as the underlying media device is unplugged from the system. |
589 return; | 534 return; |
590 } | 535 } |
591 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource | 536 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource |
592 // object is valid during the cleanup. | 537 // object is valid during the cleanup. |
593 blink::WebMediaStreamSource source(*source_ptr); | 538 blink::WebMediaStreamSource source(*source_ptr); |
594 StopLocalSource(source, false); | 539 StopLocalSource(source, false); |
595 RemoveLocalSource(source); | 540 RemoveLocalSource(source); |
596 } | 541 } |
597 | 542 |
598 void UserMediaClientImpl::InitializeVideoSourceObject( | 543 void UserMediaClientImpl::InitializeSourceObject( |
599 const StreamDeviceInfo& device, | 544 const StreamDeviceInfo& device, |
| 545 blink::WebMediaStreamSource::Type type, |
600 const blink::WebMediaConstraints& constraints, | 546 const blink::WebMediaConstraints& constraints, |
601 blink::WebMediaStreamSource* webkit_source) { | 547 blink::WebMediaStreamSource* webkit_source) { |
602 DCHECK(CalledOnValidThread()); | 548 const blink::WebMediaStreamSource* existing_source = |
603 | 549 FindLocalSource(device); |
604 *webkit_source = FindOrInitializeSourceObject(device); | 550 if (existing_source) { |
605 if (webkit_source->getExtraData()) | 551 *webkit_source = *existing_source; |
606 return; | 552 DVLOG(1) << "Source already exist. Reusing source with id " |
607 | 553 << webkit_source->id().utf8(); |
608 webkit_source->setExtraData(CreateVideoSource( | |
609 device, base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | |
610 weak_factory_.GetWeakPtr()))); | |
611 local_sources_.push_back(*webkit_source); | |
612 } | |
613 | |
614 void UserMediaClientImpl::InitializeAudioSourceObject( | |
615 const StreamDeviceInfo& device, | |
616 const blink::WebMediaConstraints& constraints, | |
617 blink::WebMediaStreamSource* webkit_source, | |
618 bool* source_initialized) { | |
619 DCHECK(CalledOnValidThread()); | |
620 | |
621 *webkit_source = FindOrInitializeSourceObject(device); | |
622 if (webkit_source->getExtraData()) { | |
623 *source_initialized = true; | |
624 return; | 554 return; |
625 } | 555 } |
626 | 556 |
627 *source_initialized = false; | 557 webkit_source->initialize(blink::WebString::fromUTF8(device.device.id), type, |
| 558 blink::WebString::fromUTF8(device.device.name), |
| 559 false /* remote */); |
628 | 560 |
629 // See if the source is already being initialized. | 561 DVLOG(1) << "Initialize source object :" |
630 auto* pending = FindPendingLocalSource(device); | 562 << "id = " << webkit_source->id().utf8() |
631 if (pending) { | 563 << ", name = " << webkit_source->name().utf8(); |
632 *webkit_source = *pending; | 564 |
633 return; | 565 if (type == blink::WebMediaStreamSource::TypeVideo) { |
| 566 webkit_source->setExtraData( |
| 567 CreateVideoSource( |
| 568 device, |
| 569 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, |
| 570 weak_factory_.GetWeakPtr()))); |
| 571 } else { |
| 572 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); |
| 573 MediaStreamAudioSource* const audio_source = |
| 574 CreateAudioSource(device, constraints); |
| 575 audio_source->SetStopCallback( |
| 576 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, |
| 577 weak_factory_.GetWeakPtr())); |
| 578 webkit_source->setExtraData(audio_source); // Takes ownership. |
634 } | 579 } |
635 | 580 local_sources_.push_back(*webkit_source); |
636 // While sources are being initialized, keep them in a separate array. | |
637 // Once they've finished initialized, they'll be moved over to local_sources_. | |
638 // See OnAudioSourceStarted for more details. | |
639 pending_local_sources_.push_back(*webkit_source); | |
640 | |
641 MediaStreamSource::ConstraintsCallback source_ready = base::Bind( | |
642 &UserMediaClientImpl::OnAudioSourceStartedOnAudioThread, | |
643 base::ThreadTaskRunnerHandle::Get(), weak_factory_.GetWeakPtr()); | |
644 | |
645 MediaStreamAudioSource* const audio_source = | |
646 CreateAudioSource(device, constraints, source_ready); | |
647 audio_source->SetStopCallback(base::Bind( | |
648 &UserMediaClientImpl::OnLocalSourceStopped, weak_factory_.GetWeakPtr())); | |
649 webkit_source->setExtraData(audio_source); // Takes ownership. | |
650 } | 581 } |
651 | 582 |
652 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( | 583 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( |
653 const StreamDeviceInfo& device, | 584 const StreamDeviceInfo& device, |
654 const blink::WebMediaConstraints& constraints, | 585 const blink::WebMediaConstraints& constraints) { |
655 const MediaStreamSource::ConstraintsCallback& source_ready) { | |
656 DCHECK(CalledOnValidThread()); | |
657 // If the audio device is a loopback device (for screen capture), or if the | 586 // If the audio device is a loopback device (for screen capture), or if the |
658 // constraints/effects parameters indicate no audio processing is needed, | 587 // constraints/effects parameters indicate no audio processing is needed, |
659 // create an efficient, direct-path MediaStreamAudioSource instance. | 588 // create an efficient, direct-path MediaStreamAudioSource instance. |
660 if (IsScreenCaptureMediaType(device.device.type) || | 589 if (IsScreenCaptureMediaType(device.device.type) || |
661 !MediaStreamAudioProcessor::WouldModifyAudio( | 590 !MediaStreamAudioProcessor::WouldModifyAudio( |
662 constraints, device.device.input.effects)) { | 591 constraints, device.device.input.effects)) { |
663 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), | 592 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), |
664 device, source_ready); | 593 device); |
665 } | 594 } |
666 | 595 |
667 // The audio device is not associated with screen capture and also requires | 596 // The audio device is not associated with screen capture and also requires |
668 // processing. | 597 // processing. |
669 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( | 598 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( |
670 RenderFrameObserver::routing_id(), device, constraints, source_ready, | 599 RenderFrameObserver::routing_id(), device, dependency_factory_); |
671 dependency_factory_); | 600 source->SetSourceConstraints(constraints); |
672 return source; | 601 return source; |
673 } | 602 } |
674 | 603 |
675 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( | 604 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( |
676 const StreamDeviceInfo& device, | 605 const StreamDeviceInfo& device, |
677 const MediaStreamSource::SourceStoppedCallback& stop_callback) { | 606 const MediaStreamSource::SourceStoppedCallback& stop_callback) { |
678 DCHECK(CalledOnValidThread()); | |
679 content::MediaStreamVideoCapturerSource* ret = | 607 content::MediaStreamVideoCapturerSource* ret = |
680 new content::MediaStreamVideoCapturerSource(stop_callback, device, | 608 new content::MediaStreamVideoCapturerSource(stop_callback, device, |
681 render_frame()); | 609 render_frame()); |
682 return ret; | 610 return ret; |
683 } | 611 } |
684 | 612 |
685 void UserMediaClientImpl::CreateVideoTracks( | 613 void UserMediaClientImpl::CreateVideoTracks( |
686 const StreamDeviceInfoArray& devices, | 614 const StreamDeviceInfoArray& devices, |
687 const blink::WebMediaConstraints& constraints, | 615 const blink::WebMediaConstraints& constraints, |
688 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 616 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
689 UserMediaRequestInfo* request) { | 617 UserMediaRequestInfo* request) { |
690 DCHECK(CalledOnValidThread()); | |
691 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 618 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
692 | 619 |
693 for (size_t i = 0; i < devices.size(); ++i) { | 620 for (size_t i = 0; i < devices.size(); ++i) { |
694 blink::WebMediaStreamSource webkit_source; | 621 blink::WebMediaStreamSource webkit_source; |
695 InitializeVideoSourceObject(devices[i], constraints, &webkit_source); | 622 InitializeSourceObject(devices[i], |
| 623 blink::WebMediaStreamSource::TypeVideo, |
| 624 constraints, |
| 625 &webkit_source); |
696 (*webkit_tracks)[i] = | 626 (*webkit_tracks)[i] = |
697 request->CreateAndStartVideoTrack(webkit_source, constraints); | 627 request->CreateAndStartVideoTrack(webkit_source, constraints); |
698 } | 628 } |
699 } | 629 } |
700 | 630 |
701 void UserMediaClientImpl::CreateAudioTracks( | 631 void UserMediaClientImpl::CreateAudioTracks( |
702 const StreamDeviceInfoArray& devices, | 632 const StreamDeviceInfoArray& devices, |
703 const blink::WebMediaConstraints& constraints, | 633 const blink::WebMediaConstraints& constraints, |
704 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 634 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
705 UserMediaRequestInfo* request) { | 635 UserMediaRequestInfo* request) { |
706 DCHECK(CalledOnValidThread()); | |
707 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 636 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
708 | 637 |
| 638 // Log the device names for this request. |
| 639 for (StreamDeviceInfoArray::const_iterator it = devices.begin(); |
| 640 it != devices.end(); ++it) { |
| 641 WebRtcLogMessage(base::StringPrintf( |
| 642 "Generated media stream for request id %d contains audio device name" |
| 643 " \"%s\"", |
| 644 request->request_id, |
| 645 it->device.name.c_str())); |
| 646 } |
| 647 |
709 StreamDeviceInfoArray overridden_audio_array = devices; | 648 StreamDeviceInfoArray overridden_audio_array = devices; |
710 if (!request->enable_automatic_output_device_selection) { | 649 if (!request->enable_automatic_output_device_selection) { |
711 // If the GetUserMedia request did not explicitly set the constraint | 650 // If the GetUserMedia request did not explicitly set the constraint |
712 // kMediaStreamRenderToAssociatedSink, the output device parameters must | 651 // kMediaStreamRenderToAssociatedSink, the output device parameters must |
713 // be removed. | 652 // be removed. |
714 for (auto& device_info : overridden_audio_array) { | 653 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin(); |
715 device_info.device.matched_output_device_id = ""; | 654 it != overridden_audio_array.end(); ++it) { |
716 device_info.device.matched_output = | 655 it->device.matched_output_device_id = ""; |
717 MediaStreamDevice::AudioDeviceParameters(); | 656 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters(); |
718 } | 657 } |
719 } | 658 } |
720 | 659 |
721 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { | 660 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { |
722 blink::WebMediaStreamSource webkit_source; | 661 blink::WebMediaStreamSource webkit_source; |
723 bool source_initialized = true; | 662 InitializeSourceObject(overridden_audio_array[i], |
724 InitializeAudioSourceObject(overridden_audio_array[i], constraints, | 663 blink::WebMediaStreamSource::TypeAudio, |
725 &webkit_source, &source_initialized); | 664 constraints, |
| 665 &webkit_source); |
726 (*webkit_tracks)[i].initialize(webkit_source); | 666 (*webkit_tracks)[i].initialize(webkit_source); |
727 request->StartAudioTrack((*webkit_tracks)[i], source_initialized); | 667 request->StartAudioTrack((*webkit_tracks)[i]); |
728 } | 668 } |
729 } | 669 } |
730 | 670 |
731 void UserMediaClientImpl::OnCreateNativeTracksCompleted( | 671 void UserMediaClientImpl::OnCreateNativeTracksCompleted( |
732 UserMediaRequestInfo* request, | 672 UserMediaRequestInfo* request, |
733 MediaStreamRequestResult result, | 673 MediaStreamRequestResult result, |
734 const blink::WebString& result_name) { | 674 const blink::WebString& result_name) { |
735 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" | 675 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" |
736 << "{request_id = " << request->request_id << "} " | 676 << "{request_id = " << request->request_id << "} " |
737 << "{result = " << result << "})"; | 677 << "{result = " << result << "})"; |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
871 request_info.requestFailed(); | 811 request_info.requestFailed(); |
872 } | 812 } |
873 | 813 |
874 void UserMediaClientImpl::EnumerateDevicesSucceded( | 814 void UserMediaClientImpl::EnumerateDevicesSucceded( |
875 blink::WebMediaDevicesRequest* request, | 815 blink::WebMediaDevicesRequest* request, |
876 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { | 816 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { |
877 request->requestSucceeded(devices); | 817 request->requestSucceeded(devices); |
878 } | 818 } |
879 | 819 |
880 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( | 820 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( |
881 const LocalStreamSources& sources, | |
882 const StreamDeviceInfo& device) const { | 821 const StreamDeviceInfo& device) const { |
883 for (const auto& local_source : sources) { | 822 for (LocalStreamSources::const_iterator it = local_sources_.begin(); |
| 823 it != local_sources_.end(); ++it) { |
884 MediaStreamSource* const source = | 824 MediaStreamSource* const source = |
885 static_cast<MediaStreamSource*>(local_source.getExtraData()); | 825 static_cast<MediaStreamSource*>(it->getExtraData()); |
886 const StreamDeviceInfo& active_device = source->device_info(); | 826 const StreamDeviceInfo& active_device = source->device_info(); |
887 if (IsSameDevice(active_device, device)) | 827 if (IsSameDevice(active_device, device)) { |
888 return &local_source; | 828 return &(*it); |
| 829 } |
889 } | 830 } |
890 return nullptr; | 831 return NULL; |
891 } | |
892 | |
893 blink::WebMediaStreamSource UserMediaClientImpl::FindOrInitializeSourceObject( | |
894 const StreamDeviceInfo& device) { | |
895 const blink::WebMediaStreamSource* existing_source = FindLocalSource(device); | |
896 if (existing_source) { | |
897 DVLOG(1) << "Source already exists. Reusing source with id " | |
898 << existing_source->id().utf8(); | |
899 return *existing_source; | |
900 } | |
901 | |
902 blink::WebMediaStreamSource::Type type = | |
903 IsAudioInputMediaType(device.device.type) | |
904 ? blink::WebMediaStreamSource::TypeAudio | |
905 : blink::WebMediaStreamSource::TypeVideo; | |
906 | |
907 blink::WebMediaStreamSource source; | |
908 source.initialize(blink::WebString::fromUTF8(device.device.id), type, | |
909 blink::WebString::fromUTF8(device.device.name), | |
910 false /* remote */); | |
911 | |
912 DVLOG(1) << "Initialize source object :" | |
913 << "id = " << source.id().utf8() | |
914 << ", name = " << source.name().utf8(); | |
915 return source; | |
916 } | 832 } |
917 | 833 |
918 bool UserMediaClientImpl::RemoveLocalSource( | 834 bool UserMediaClientImpl::RemoveLocalSource( |
919 const blink::WebMediaStreamSource& source) { | 835 const blink::WebMediaStreamSource& source) { |
| 836 bool device_found = false; |
920 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | 837 for (LocalStreamSources::iterator device_it = local_sources_.begin(); |
921 device_it != local_sources_.end(); ++device_it) { | 838 device_it != local_sources_.end(); ++device_it) { |
922 if (IsSameSource(*device_it, source)) { | 839 if (IsSameSource(*device_it, source)) { |
| 840 device_found = true; |
923 local_sources_.erase(device_it); | 841 local_sources_.erase(device_it); |
924 return true; | 842 break; |
925 } | 843 } |
926 } | 844 } |
927 | 845 return device_found; |
928 // Check if the source was pending. | |
929 for (LocalStreamSources::iterator device_it = pending_local_sources_.begin(); | |
930 device_it != pending_local_sources_.end(); ++device_it) { | |
931 if (IsSameSource(*device_it, source)) { | |
932 MediaStreamSource* const source_extra_data = | |
933 static_cast<MediaStreamSource*>(source.getExtraData()); | |
934 for (const auto& request : user_media_requests_) { | |
935 request->OnAudioSourceStarted(source_extra_data, | |
936 MEDIA_DEVICE_TRACK_START_FAILURE, | |
937 "Failed to access audio capture device"); | |
938 } | |
939 pending_local_sources_.erase(device_it); | |
940 return true; | |
941 } | |
942 } | |
943 | |
944 return false; | |
945 } | 846 } |
946 | 847 |
947 UserMediaClientImpl::UserMediaRequestInfo* | 848 UserMediaClientImpl::UserMediaRequestInfo* |
948 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { | 849 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { |
949 UserMediaRequests::iterator it = user_media_requests_.begin(); | 850 UserMediaRequests::iterator it = user_media_requests_.begin(); |
950 for (; it != user_media_requests_.end(); ++it) { | 851 for (; it != user_media_requests_.end(); ++it) { |
951 if ((*it)->request_id == request_id) | 852 if ((*it)->request_id == request_id) |
952 return (*it); | 853 return (*it); |
953 } | 854 } |
954 return NULL; | 855 return NULL; |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1067 request(request), | 968 request(request), |
1068 request_result_(MEDIA_DEVICE_OK), | 969 request_result_(MEDIA_DEVICE_OK), |
1069 request_result_name_("") { | 970 request_result_name_("") { |
1070 } | 971 } |
1071 | 972 |
1072 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { | 973 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { |
1073 DVLOG(1) << "~UserMediaRequestInfo"; | 974 DVLOG(1) << "~UserMediaRequestInfo"; |
1074 } | 975 } |
1075 | 976 |
1076 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( | 977 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( |
1077 const blink::WebMediaStreamTrack& track, | 978 const blink::WebMediaStreamTrack& track) { |
1078 bool source_initialized) { | |
1079 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); | 979 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); |
1080 MediaStreamAudioSource* native_source = | 980 MediaStreamAudioSource* native_source = |
1081 MediaStreamAudioSource::From(track.source()); | 981 MediaStreamAudioSource::From(track.source()); |
1082 // Add the source as pending since OnTrackStarted will expect it to be there. | 982 DCHECK(native_source); |
1083 sources_waiting_for_callback_.push_back(native_source); | |
1084 | 983 |
1085 sources_.push_back(track.source()); | 984 sources_.push_back(track.source()); |
1086 bool connected = native_source->ConnectToTrack(track); | 985 sources_waiting_for_callback_.push_back(native_source); |
1087 if (source_initialized) { | 986 if (native_source->ConnectToTrack(track)) |
1088 OnTrackStarted( | |
1089 native_source, | |
1090 connected ? MEDIA_DEVICE_OK : MEDIA_DEVICE_TRACK_START_FAILURE, ""); | |
1091 #if defined(OS_ANDROID) | |
1092 } else if (connected) { | |
1093 CHECK(native_source->is_local_source()); | |
1094 // On Android, we won't get the callback indicating the device readyness. | |
1095 // TODO(tommi): Update the android implementation to support the | |
1096 // OnAudioSourceStarted notification. http://crbug.com/679302 | |
1097 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); | 987 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); |
1098 #endif | 988 else |
1099 } | 989 OnTrackStarted(native_source, MEDIA_DEVICE_TRACK_START_FAILURE, ""); |
1100 } | 990 } |
1101 | 991 |
1102 blink::WebMediaStreamTrack | 992 blink::WebMediaStreamTrack |
1103 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( | 993 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( |
1104 const blink::WebMediaStreamSource& source, | 994 const blink::WebMediaStreamSource& source, |
1105 const blink::WebMediaConstraints& constraints) { | 995 const blink::WebMediaConstraints& constraints) { |
1106 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); | 996 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); |
1107 MediaStreamVideoSource* native_source = | 997 MediaStreamVideoSource* native_source = |
1108 MediaStreamVideoSource::GetVideoSource(source); | 998 MediaStreamVideoSource::GetVideoSource(source); |
1109 DCHECK(native_source); | 999 DCHECK(native_source); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1143 | 1033 |
1144 CheckAllTracksStarted(); | 1034 CheckAllTracksStarted(); |
1145 } | 1035 } |
1146 | 1036 |
1147 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { | 1037 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { |
1148 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { | 1038 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { |
1149 ready_callback_.Run(this, request_result_, request_result_name_); | 1039 ready_callback_.Run(this, request_result_, request_result_name_); |
1150 } | 1040 } |
1151 } | 1041 } |
1152 | 1042 |
| 1043 bool UserMediaClientImpl::UserMediaRequestInfo::IsSourceUsed( |
| 1044 const blink::WebMediaStreamSource& source) const { |
| 1045 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it = |
| 1046 sources_.begin(); |
| 1047 source_it != sources_.end(); ++source_it) { |
| 1048 if (source_it->id() == source.id()) |
| 1049 return true; |
| 1050 } |
| 1051 return false; |
| 1052 } |
| 1053 |
| 1054 void UserMediaClientImpl::UserMediaRequestInfo::RemoveSource( |
| 1055 const blink::WebMediaStreamSource& source) { |
| 1056 for (std::vector<blink::WebMediaStreamSource>::iterator it = |
| 1057 sources_.begin(); |
| 1058 it != sources_.end(); ++it) { |
| 1059 if (source.id() == it->id()) { |
| 1060 sources_.erase(it); |
| 1061 return; |
| 1062 } |
| 1063 } |
| 1064 } |
| 1065 |
1153 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { | 1066 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { |
1154 return !sources_waiting_for_callback_.empty(); | 1067 return !sources_waiting_for_callback_.empty(); |
1155 } | 1068 } |
1156 | 1069 |
1157 void UserMediaClientImpl::UserMediaRequestInfo::OnAudioSourceStarted( | |
1158 MediaStreamSource* source, | |
1159 MediaStreamRequestResult result, | |
1160 const blink::WebString& result_name) { | |
1161 // Check if we're waiting to be notified of this source. If not, then we'll | |
1162 // ignore the notification. | |
1163 auto found = std::find(sources_waiting_for_callback_.begin(), | |
1164 sources_waiting_for_callback_.end(), source); | |
1165 if (found != sources_waiting_for_callback_.end()) | |
1166 OnTrackStarted(source, result, result_name); | |
1167 } | |
1168 | |
1169 void UserMediaClientImpl::OnDestruct() { | 1070 void UserMediaClientImpl::OnDestruct() { |
1170 delete this; | 1071 delete this; |
1171 } | 1072 } |
1172 | 1073 |
1173 } // namespace content | 1074 } // namespace content |
OLD | NEW |