Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/user_media_client_impl.h" | 5 #include "content/renderer/media/user_media_client_impl.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 | 8 |
| 9 #include <algorithm> | 9 #include <algorithm> |
| 10 #include <utility> | 10 #include <utility> |
| (...skipping 414 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); | 425 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id); |
| 426 if (!request_info) { | 426 if (!request_info) { |
| 427 // This can happen if the request is canceled or the frame reloads while | 427 // This can happen if the request is canceled or the frame reloads while |
| 428 // MediaStreamDispatcher is processing the request. | 428 // MediaStreamDispatcher is processing the request. |
| 429 DVLOG(1) << "Request ID not found"; | 429 DVLOG(1) << "Request ID not found"; |
| 430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); | 430 OnStreamGeneratedForCancelledRequest(audio_array, video_array); |
| 431 return; | 431 return; |
| 432 } | 432 } |
| 433 request_info->generated = true; | 433 request_info->generated = true; |
| 434 | 434 |
| 435 for (const auto* array : {&audio_array, &video_array}) { | |
| 436 for (const auto& info : *array) { | |
| 437 WebRtcLogMessage(base::StringPrintf("Request %d for device \"%s\"", | |
| 438 request_id, | |
| 439 info.device.name.c_str())); | |
| 440 } | |
| 441 } | |
| 442 | |
| 435 DCHECK(!request_info->request.isNull()); | 443 DCHECK(!request_info->request.isNull()); |
| 436 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( | 444 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector( |
| 437 audio_array.size()); | 445 audio_array.size()); |
| 438 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), | 446 CreateAudioTracks(audio_array, request_info->request.audioConstraints(), |
| 439 &audio_track_vector, request_info); | 447 &audio_track_vector, request_info); |
| 440 | 448 |
| 441 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( | 449 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector( |
| 442 video_array.size()); | 450 video_array.size()); |
| 443 CreateVideoTracks(video_array, request_info->request.videoConstraints(), | 451 CreateVideoTracks(video_array, request_info->request.videoConstraints(), |
| 444 &video_track_vector, request_info); | 452 &video_track_vector, request_info); |
| 445 | 453 |
| 446 blink::WebString webkit_id = blink::WebString::fromUTF8(label); | 454 blink::WebString webkit_id = blink::WebString::fromUTF8(label); |
| 447 blink::WebMediaStream* web_stream = &(request_info->web_stream); | 455 blink::WebMediaStream* web_stream = &(request_info->web_stream); |
| 448 | 456 |
| 449 web_stream->initialize(webkit_id, audio_track_vector, | 457 web_stream->initialize(webkit_id, audio_track_vector, video_track_vector); |
| 450 video_track_vector); | |
| 451 web_stream->setExtraData(new MediaStream()); | 458 web_stream->setExtraData(new MediaStream()); |
| 452 | 459 |
| 453 // Wait for the tracks to be started successfully or to fail. | 460 // Wait for the tracks to be started successfully or to fail. |
| 454 request_info->CallbackOnTracksStarted( | 461 request_info->CallbackOnTracksStarted( |
| 455 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, | 462 base::Bind(&UserMediaClientImpl::OnCreateNativeTracksCompleted, |
| 456 weak_factory_.GetWeakPtr())); | 463 weak_factory_.GetWeakPtr())); |
| 457 } | 464 } |
| 458 | 465 |
| 459 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( | 466 void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( |
| 460 const StreamDeviceInfoArray& audio_array, | 467 const StreamDeviceInfoArray& audio_array, |
| 461 const StreamDeviceInfoArray& video_array) { | 468 const StreamDeviceInfoArray& video_array) { |
| 462 // Only stop the device if the device is not used in another MediaStream. | 469 // Only stop the device if the device is not used in another MediaStream. |
| 463 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); | 470 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin(); |
| 464 device_it != audio_array.end(); ++device_it) { | 471 device_it != audio_array.end(); ++device_it) { |
| 465 if (!FindLocalSource(*device_it)) | 472 if (!FindLocalSource(*device_it)) |
| 466 media_stream_dispatcher_->StopStreamDevice(*device_it); | 473 media_stream_dispatcher_->StopStreamDevice(*device_it); |
| 467 } | 474 } |
| 468 | 475 |
| 469 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); | 476 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin(); |
| 470 device_it != video_array.end(); ++device_it) { | 477 device_it != video_array.end(); ++device_it) { |
| 471 if (!FindLocalSource(*device_it)) | 478 if (!FindLocalSource(*device_it)) |
| 472 media_stream_dispatcher_->StopStreamDevice(*device_it); | 479 media_stream_dispatcher_->StopStreamDevice(*device_it); |
| 473 } | 480 } |
| 474 } | 481 } |
| 475 | 482 |
| 483 // static | |
| 484 void UserMediaClientImpl::OnAudioSourceStartedOnAudioThread( | |
| 485 scoped_refptr<base::SingleThreadTaskRunner> task_runner, | |
| 486 base::WeakPtr<UserMediaClientImpl> weak_ptr, | |
| 487 MediaStreamSource* source, | |
| 488 MediaStreamRequestResult result, | |
| 489 const blink::WebString& result_name) { | |
| 490 task_runner->PostTask(FROM_HERE, | |
| 491 base::Bind(&UserMediaClientImpl::OnAudioSourceStarted, | |
| 492 weak_ptr, source, result, result_name)); | |
| 493 } | |
| 494 | |
| 495 void UserMediaClientImpl::OnAudioSourceStarted( | |
| 496 MediaStreamSource* source, | |
| 497 MediaStreamRequestResult result, | |
| 498 const blink::WebString& result_name) { | |
| 499 for (auto it = pending_local_sources_.begin(); | |
|
Guido Urdaneta
2017/01/09 11:08:40
DCHECK that this is running on the correct thread?
tommi (sloooow) - chröme
2017/01/09 11:39:30
Done.
| |
| 500 it != pending_local_sources_.end(); ++it) { | |
| 501 MediaStreamSource* const source_extra_data = | |
| 502 static_cast<MediaStreamSource*>((*it).getExtraData()); | |
| 503 if (source_extra_data == source) { | |
| 504 if (result == MEDIA_DEVICE_OK) | |
| 505 local_sources_.push_back((*it)); | |
| 506 pending_local_sources_.erase(it); | |
| 507 for (const auto& request : user_media_requests_) | |
| 508 request->OnAudioSourceStarted(source, result, result_name); | |
| 509 return; | |
| 510 } | |
| 511 } | |
| 512 NOTREACHED(); | |
| 513 } | |
| 514 | |
| 476 void UserMediaClientImpl::FinalizeEnumerateDevices( | 515 void UserMediaClientImpl::FinalizeEnumerateDevices( |
| 477 blink::WebMediaDevicesRequest request, | 516 blink::WebMediaDevicesRequest request, |
| 478 const EnumerationResult& result) { | 517 const EnumerationResult& result) { |
| 479 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); | 518 DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); |
| 480 | 519 |
| 481 blink::WebVector<blink::WebMediaDeviceInfo> devices( | 520 blink::WebVector<blink::WebMediaDeviceInfo> devices( |
| 482 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + | 521 result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + |
| 483 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + | 522 result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + |
| 484 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); | 523 result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); |
| 485 size_t index = 0; | 524 size_t index = 0; |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 533 // as the underlying media device is unplugged from the system. | 572 // as the underlying media device is unplugged from the system. |
| 534 return; | 573 return; |
| 535 } | 574 } |
| 536 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource | 575 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource |
| 537 // object is valid during the cleanup. | 576 // object is valid during the cleanup. |
| 538 blink::WebMediaStreamSource source(*source_ptr); | 577 blink::WebMediaStreamSource source(*source_ptr); |
| 539 StopLocalSource(source, false); | 578 StopLocalSource(source, false); |
| 540 RemoveLocalSource(source); | 579 RemoveLocalSource(source); |
| 541 } | 580 } |
| 542 | 581 |
| 543 void UserMediaClientImpl::InitializeSourceObject( | 582 void UserMediaClientImpl::InitializeVideoSourceObject( |
| 544 const StreamDeviceInfo& device, | 583 const StreamDeviceInfo& device, |
| 545 blink::WebMediaStreamSource::Type type, | |
| 546 const blink::WebMediaConstraints& constraints, | 584 const blink::WebMediaConstraints& constraints, |
| 547 blink::WebMediaStreamSource* webkit_source) { | 585 blink::WebMediaStreamSource* webkit_source) { |
| 548 const blink::WebMediaStreamSource* existing_source = | 586 DCHECK(CalledOnValidThread()); |
| 549 FindLocalSource(device); | 587 |
| 550 if (existing_source) { | 588 *webkit_source = FindOrInitializeSourceObject(device); |
| 551 *webkit_source = *existing_source; | 589 if (webkit_source->getExtraData()) |
| 552 DVLOG(1) << "Source already exist. Reusing source with id " | 590 return; |
| 553 << webkit_source->id().utf8(); | 591 |
| 592 webkit_source->setExtraData(CreateVideoSource( | |
| 593 device, base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | |
| 594 weak_factory_.GetWeakPtr()))); | |
| 595 local_sources_.push_back(*webkit_source); | |
| 596 } | |
| 597 | |
| 598 void UserMediaClientImpl::InitializeAudioSourceObject( | |
| 599 const StreamDeviceInfo& device, | |
| 600 const blink::WebMediaConstraints& constraints, | |
| 601 blink::WebMediaStreamSource* webkit_source, | |
| 602 bool* source_initialized) { | |
| 603 DCHECK(CalledOnValidThread()); | |
| 604 | |
| 605 *webkit_source = FindOrInitializeSourceObject(device); | |
| 606 if (webkit_source->getExtraData()) { | |
| 607 *source_initialized = true; | |
| 554 return; | 608 return; |
| 555 } | 609 } |
| 556 | 610 |
| 557 webkit_source->initialize(blink::WebString::fromUTF8(device.device.id), type, | 611 *source_initialized = false; |
| 558 blink::WebString::fromUTF8(device.device.name), | |
| 559 false /* remote */); | |
| 560 | 612 |
| 561 DVLOG(1) << "Initialize source object :" | 613 // See if the source is already being initialized. |
| 562 << "id = " << webkit_source->id().utf8() | 614 auto* pending = FindPendingLocalSource(device); |
| 563 << ", name = " << webkit_source->name().utf8(); | 615 if (pending) { |
| 616 *webkit_source = *pending; | |
| 617 return; | |
| 618 } | |
| 564 | 619 |
| 565 if (type == blink::WebMediaStreamSource::TypeVideo) { | 620 // While sources are being initialized, keep them in a separate array. |
| 566 webkit_source->setExtraData( | 621 // Once they've finished initialized, they'll be moved over to local_sources_. |
| 567 CreateVideoSource( | 622 // See OnAudioSourceStarted for more details. |
| 568 device, | 623 pending_local_sources_.push_back(*webkit_source); |
| 569 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | 624 |
| 570 weak_factory_.GetWeakPtr()))); | 625 MediaStreamSource::ConstraintsCallback source_ready = base::Bind( |
| 571 } else { | 626 &UserMediaClientImpl::OnAudioSourceStartedOnAudioThread, |
| 572 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type); | 627 base::ThreadTaskRunnerHandle::Get(), weak_factory_.GetWeakPtr()); |
| 573 MediaStreamAudioSource* const audio_source = | 628 |
| 574 CreateAudioSource(device, constraints); | 629 MediaStreamAudioSource* const audio_source = |
| 575 audio_source->SetStopCallback( | 630 CreateAudioSource(device, constraints, source_ready); |
| 576 base::Bind(&UserMediaClientImpl::OnLocalSourceStopped, | 631 audio_source->SetStopCallback(base::Bind( |
| 577 weak_factory_.GetWeakPtr())); | 632 &UserMediaClientImpl::OnLocalSourceStopped, weak_factory_.GetWeakPtr())); |
| 578 webkit_source->setExtraData(audio_source); // Takes ownership. | 633 webkit_source->setExtraData(audio_source); // Takes ownership. |
| 579 } | |
| 580 local_sources_.push_back(*webkit_source); | |
| 581 } | 634 } |
| 582 | 635 |
| 583 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( | 636 MediaStreamAudioSource* UserMediaClientImpl::CreateAudioSource( |
| 584 const StreamDeviceInfo& device, | 637 const StreamDeviceInfo& device, |
| 585 const blink::WebMediaConstraints& constraints) { | 638 const blink::WebMediaConstraints& constraints, |
| 639 const MediaStreamSource::ConstraintsCallback& source_ready) { | |
| 640 DCHECK(CalledOnValidThread()); | |
| 586 // If the audio device is a loopback device (for screen capture), or if the | 641 // If the audio device is a loopback device (for screen capture), or if the |
| 587 // constraints/effects parameters indicate no audio processing is needed, | 642 // constraints/effects parameters indicate no audio processing is needed, |
| 588 // create an efficient, direct-path MediaStreamAudioSource instance. | 643 // create an efficient, direct-path MediaStreamAudioSource instance. |
| 589 if (IsScreenCaptureMediaType(device.device.type) || | 644 if (IsScreenCaptureMediaType(device.device.type) || |
| 590 !MediaStreamAudioProcessor::WouldModifyAudio( | 645 !MediaStreamAudioProcessor::WouldModifyAudio( |
| 591 constraints, device.device.input.effects)) { | 646 constraints, device.device.input.effects)) { |
| 592 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), | 647 return new LocalMediaStreamAudioSource(RenderFrameObserver::routing_id(), |
| 593 device); | 648 device, source_ready); |
| 594 } | 649 } |
| 595 | 650 |
| 596 // The audio device is not associated with screen capture and also requires | 651 // The audio device is not associated with screen capture and also requires |
| 597 // processing. | 652 // processing. |
| 598 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( | 653 ProcessedLocalAudioSource* source = new ProcessedLocalAudioSource( |
| 599 RenderFrameObserver::routing_id(), device, dependency_factory_); | 654 RenderFrameObserver::routing_id(), device, constraints, source_ready, |
| 600 source->SetSourceConstraints(constraints); | 655 dependency_factory_); |
| 601 return source; | 656 return source; |
| 602 } | 657 } |
| 603 | 658 |
| 604 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( | 659 MediaStreamVideoSource* UserMediaClientImpl::CreateVideoSource( |
| 605 const StreamDeviceInfo& device, | 660 const StreamDeviceInfo& device, |
| 606 const MediaStreamSource::SourceStoppedCallback& stop_callback) { | 661 const MediaStreamSource::SourceStoppedCallback& stop_callback) { |
| 662 DCHECK(CalledOnValidThread()); | |
| 607 content::MediaStreamVideoCapturerSource* ret = | 663 content::MediaStreamVideoCapturerSource* ret = |
| 608 new content::MediaStreamVideoCapturerSource(stop_callback, device, | 664 new content::MediaStreamVideoCapturerSource(stop_callback, device, |
| 609 render_frame()); | 665 render_frame()); |
| 610 return ret; | 666 return ret; |
| 611 } | 667 } |
| 612 | 668 |
| 613 void UserMediaClientImpl::CreateVideoTracks( | 669 void UserMediaClientImpl::CreateVideoTracks( |
| 614 const StreamDeviceInfoArray& devices, | 670 const StreamDeviceInfoArray& devices, |
| 615 const blink::WebMediaConstraints& constraints, | 671 const blink::WebMediaConstraints& constraints, |
| 616 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 672 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
| 617 UserMediaRequestInfo* request) { | 673 UserMediaRequestInfo* request) { |
| 674 DCHECK(CalledOnValidThread()); | |
| 618 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 675 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
| 619 | 676 |
| 620 for (size_t i = 0; i < devices.size(); ++i) { | 677 for (size_t i = 0; i < devices.size(); ++i) { |
| 621 blink::WebMediaStreamSource webkit_source; | 678 blink::WebMediaStreamSource webkit_source; |
| 622 InitializeSourceObject(devices[i], | 679 InitializeVideoSourceObject(devices[i], constraints, &webkit_source); |
| 623 blink::WebMediaStreamSource::TypeVideo, | |
| 624 constraints, | |
| 625 &webkit_source); | |
| 626 (*webkit_tracks)[i] = | 680 (*webkit_tracks)[i] = |
| 627 request->CreateAndStartVideoTrack(webkit_source, constraints); | 681 request->CreateAndStartVideoTrack(webkit_source, constraints); |
| 628 } | 682 } |
| 629 } | 683 } |
| 630 | 684 |
| 631 void UserMediaClientImpl::CreateAudioTracks( | 685 void UserMediaClientImpl::CreateAudioTracks( |
| 632 const StreamDeviceInfoArray& devices, | 686 const StreamDeviceInfoArray& devices, |
| 633 const blink::WebMediaConstraints& constraints, | 687 const blink::WebMediaConstraints& constraints, |
| 634 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, | 688 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks, |
| 635 UserMediaRequestInfo* request) { | 689 UserMediaRequestInfo* request) { |
| 690 DCHECK(CalledOnValidThread()); | |
| 636 DCHECK_EQ(devices.size(), webkit_tracks->size()); | 691 DCHECK_EQ(devices.size(), webkit_tracks->size()); |
| 637 | 692 |
| 638 // Log the device names for this request. | |
| 639 for (StreamDeviceInfoArray::const_iterator it = devices.begin(); | |
| 640 it != devices.end(); ++it) { | |
| 641 WebRtcLogMessage(base::StringPrintf( | |
| 642 "Generated media stream for request id %d contains audio device name" | |
| 643 " \"%s\"", | |
| 644 request->request_id, | |
| 645 it->device.name.c_str())); | |
| 646 } | |
| 647 | |
| 648 StreamDeviceInfoArray overridden_audio_array = devices; | 693 StreamDeviceInfoArray overridden_audio_array = devices; |
| 649 if (!request->enable_automatic_output_device_selection) { | 694 if (!request->enable_automatic_output_device_selection) { |
| 650 // If the GetUserMedia request did not explicitly set the constraint | 695 // If the GetUserMedia request did not explicitly set the constraint |
| 651 // kMediaStreamRenderToAssociatedSink, the output device parameters must | 696 // kMediaStreamRenderToAssociatedSink, the output device parameters must |
| 652 // be removed. | 697 // be removed. |
| 653 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin(); | 698 for (auto& device_info : overridden_audio_array) { |
| 654 it != overridden_audio_array.end(); ++it) { | 699 device_info.device.matched_output_device_id = ""; |
| 655 it->device.matched_output_device_id = ""; | 700 device_info.device.matched_output = |
| 656 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters(); | 701 MediaStreamDevice::AudioDeviceParameters(); |
| 657 } | 702 } |
| 658 } | 703 } |
| 659 | 704 |
| 660 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { | 705 for (size_t i = 0; i < overridden_audio_array.size(); ++i) { |
| 661 blink::WebMediaStreamSource webkit_source; | 706 blink::WebMediaStreamSource webkit_source; |
| 662 InitializeSourceObject(overridden_audio_array[i], | 707 bool source_initialized = true; |
| 663 blink::WebMediaStreamSource::TypeAudio, | 708 InitializeAudioSourceObject(overridden_audio_array[i], constraints, |
| 664 constraints, | 709 &webkit_source, &source_initialized); |
| 665 &webkit_source); | |
| 666 (*webkit_tracks)[i].initialize(webkit_source); | 710 (*webkit_tracks)[i].initialize(webkit_source); |
| 667 request->StartAudioTrack((*webkit_tracks)[i]); | 711 request->StartAudioTrack((*webkit_tracks)[i], source_initialized); |
| 668 } | 712 } |
| 669 } | 713 } |
| 670 | 714 |
| 671 void UserMediaClientImpl::OnCreateNativeTracksCompleted( | 715 void UserMediaClientImpl::OnCreateNativeTracksCompleted( |
| 672 UserMediaRequestInfo* request, | 716 UserMediaRequestInfo* request, |
| 673 MediaStreamRequestResult result, | 717 MediaStreamRequestResult result, |
| 674 const blink::WebString& result_name) { | 718 const blink::WebString& result_name) { |
| 675 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" | 719 DVLOG(1) << "UserMediaClientImpl::OnCreateNativeTracksComplete(" |
| 676 << "{request_id = " << request->request_id << "} " | 720 << "{request_id = " << request->request_id << "} " |
| 677 << "{result = " << result << "})"; | 721 << "{result = " << result << "})"; |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 811 request_info.requestFailed(); | 855 request_info.requestFailed(); |
| 812 } | 856 } |
| 813 | 857 |
| 814 void UserMediaClientImpl::EnumerateDevicesSucceded( | 858 void UserMediaClientImpl::EnumerateDevicesSucceded( |
| 815 blink::WebMediaDevicesRequest* request, | 859 blink::WebMediaDevicesRequest* request, |
| 816 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { | 860 blink::WebVector<blink::WebMediaDeviceInfo>& devices) { |
| 817 request->requestSucceeded(devices); | 861 request->requestSucceeded(devices); |
| 818 } | 862 } |
| 819 | 863 |
| 820 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( | 864 const blink::WebMediaStreamSource* UserMediaClientImpl::FindLocalSource( |
| 865 const LocalStreamSources& sources, | |
| 821 const StreamDeviceInfo& device) const { | 866 const StreamDeviceInfo& device) const { |
| 822 for (LocalStreamSources::const_iterator it = local_sources_.begin(); | 867 for (const auto& local_source : sources) { |
| 823 it != local_sources_.end(); ++it) { | |
| 824 MediaStreamSource* const source = | 868 MediaStreamSource* const source = |
| 825 static_cast<MediaStreamSource*>(it->getExtraData()); | 869 static_cast<MediaStreamSource*>(local_source.getExtraData()); |
| 826 const StreamDeviceInfo& active_device = source->device_info(); | 870 const StreamDeviceInfo& active_device = source->device_info(); |
| 827 if (IsSameDevice(active_device, device)) { | 871 if (IsSameDevice(active_device, device)) |
| 828 return &(*it); | 872 return &local_source; |
| 829 } | |
| 830 } | 873 } |
| 831 return NULL; | 874 return NULL; |
|
Guido Urdaneta
2017/01/09 11:08:40
nit: since you're rewriting this to C++11 style, u
tommi (sloooow) - chröme
2017/01/09 11:39:30
Done.
| |
| 832 } | 875 } |
| 833 | 876 |
| 877 blink::WebMediaStreamSource UserMediaClientImpl::FindOrInitializeSourceObject( | |
| 878 const StreamDeviceInfo& device) { | |
| 879 const blink::WebMediaStreamSource* existing_source = FindLocalSource(device); | |
| 880 if (existing_source) { | |
| 881 DVLOG(1) << "Source already exist. Reusing source with id " | |
|
Guido Urdaneta
2017/01/09 11:08:40
nit: exist -> exists.
tommi (sloooow) - chröme
2017/01/09 11:39:29
Done.
| |
| 882 << existing_source->id().utf8(); | |
| 883 return *existing_source; | |
| 884 } | |
| 885 | |
| 886 blink::WebMediaStreamSource::Type type = | |
| 887 IsAudioInputMediaType(device.device.type) | |
| 888 ? blink::WebMediaStreamSource::TypeAudio | |
| 889 : blink::WebMediaStreamSource::TypeVideo; | |
| 890 | |
| 891 blink::WebMediaStreamSource source; | |
| 892 source.initialize(blink::WebString::fromUTF8(device.device.id), type, | |
| 893 blink::WebString::fromUTF8(device.device.name), | |
| 894 false /* remote */); | |
| 895 | |
| 896 DVLOG(1) << "Initialize source object :" | |
| 897 << "id = " << source.id().utf8() | |
| 898 << ", name = " << source.name().utf8(); | |
| 899 return source; | |
| 900 } | |
| 901 | |
| 834 bool UserMediaClientImpl::RemoveLocalSource( | 902 bool UserMediaClientImpl::RemoveLocalSource( |
| 835 const blink::WebMediaStreamSource& source) { | 903 const blink::WebMediaStreamSource& source) { |
| 836 bool device_found = false; | |
| 837 for (LocalStreamSources::iterator device_it = local_sources_.begin(); | 904 for (LocalStreamSources::iterator device_it = local_sources_.begin(); |
| 838 device_it != local_sources_.end(); ++device_it) { | 905 device_it != local_sources_.end(); ++device_it) { |
| 839 if (IsSameSource(*device_it, source)) { | 906 if (IsSameSource(*device_it, source)) { |
| 840 device_found = true; | |
| 841 local_sources_.erase(device_it); | 907 local_sources_.erase(device_it); |
| 842 break; | 908 return true; |
| 843 } | 909 } |
| 844 } | 910 } |
| 845 return device_found; | 911 |
| 912 // Check if the source was pending. | |
| 913 for (LocalStreamSources::iterator device_it = pending_local_sources_.begin(); | |
| 914 device_it != pending_local_sources_.end(); ++device_it) { | |
| 915 if (IsSameSource(*device_it, source)) { | |
| 916 MediaStreamSource* const source_extra_data = | |
| 917 static_cast<MediaStreamSource*>(source.getExtraData()); | |
| 918 for (const auto& request : user_media_requests_) { | |
| 919 request->OnAudioSourceStarted(source_extra_data, | |
| 920 MEDIA_DEVICE_TRACK_START_FAILURE, | |
| 921 "Failed to access audio capture device"); | |
| 922 } | |
| 923 pending_local_sources_.erase(device_it); | |
| 924 return true; | |
| 925 } | |
| 926 } | |
| 927 | |
| 928 return false; | |
| 846 } | 929 } |
| 847 | 930 |
| 848 UserMediaClientImpl::UserMediaRequestInfo* | 931 UserMediaClientImpl::UserMediaRequestInfo* |
| 849 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { | 932 UserMediaClientImpl::FindUserMediaRequestInfo(int request_id) { |
| 850 UserMediaRequests::iterator it = user_media_requests_.begin(); | 933 UserMediaRequests::iterator it = user_media_requests_.begin(); |
| 851 for (; it != user_media_requests_.end(); ++it) { | 934 for (; it != user_media_requests_.end(); ++it) { |
| 852 if ((*it)->request_id == request_id) | 935 if ((*it)->request_id == request_id) |
| 853 return (*it); | 936 return (*it); |
| 854 } | 937 } |
| 855 return NULL; | 938 return NULL; |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 968 request(request), | 1051 request(request), |
| 969 request_result_(MEDIA_DEVICE_OK), | 1052 request_result_(MEDIA_DEVICE_OK), |
| 970 request_result_name_("") { | 1053 request_result_name_("") { |
| 971 } | 1054 } |
| 972 | 1055 |
| 973 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { | 1056 UserMediaClientImpl::UserMediaRequestInfo::~UserMediaRequestInfo() { |
| 974 DVLOG(1) << "~UserMediaRequestInfo"; | 1057 DVLOG(1) << "~UserMediaRequestInfo"; |
| 975 } | 1058 } |
| 976 | 1059 |
| 977 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( | 1060 void UserMediaClientImpl::UserMediaRequestInfo::StartAudioTrack( |
| 978 const blink::WebMediaStreamTrack& track) { | 1061 const blink::WebMediaStreamTrack& track, |
| 1062 bool source_initialized) { | |
| 979 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); | 1063 DCHECK(track.source().getType() == blink::WebMediaStreamSource::TypeAudio); |
| 980 MediaStreamAudioSource* native_source = | 1064 MediaStreamAudioSource* native_source = |
| 981 MediaStreamAudioSource::From(track.source()); | 1065 MediaStreamAudioSource::From(track.source()); |
| 982 DCHECK(native_source); | 1066 // Add the source as pending since OnTrackStarted will expect it to be there. |
| 1067 sources_waiting_for_callback_.push_back(native_source); | |
| 983 | 1068 |
| 984 sources_.push_back(track.source()); | 1069 sources_.push_back(track.source()); |
| 985 sources_waiting_for_callback_.push_back(native_source); | 1070 bool connected = native_source->ConnectToTrack(track); |
| 986 if (native_source->ConnectToTrack(track)) | 1071 if (source_initialized) { |
| 1072 OnTrackStarted( | |
| 1073 native_source, | |
| 1074 connected ? MEDIA_DEVICE_OK : MEDIA_DEVICE_TRACK_START_FAILURE, ""); | |
| 1075 #if defined(OS_ANDROID) | |
| 1076 } else if (connected) { | |
| 1077 CHECK(native_source->is_local_source()); | |
| 1078 // On Android, we won't get the callback indicating the device readyness. | |
| 1079 // TODO(tommi): Update the android implementation to support the | |
| 1080 // OnAudioSourceStarted notification. | |
|
Guido Urdaneta
2017/01/09 11:08:40
Add crbug.com reference
tommi (sloooow) - chröme
2017/01/09 11:39:30
Done.
| |
| 987 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); | 1081 OnTrackStarted(native_source, MEDIA_DEVICE_OK, ""); |
| 988 else | 1082 #endif |
| 989 OnTrackStarted(native_source, MEDIA_DEVICE_TRACK_START_FAILURE, ""); | 1083 } |
| 990 } | 1084 } |
| 991 | 1085 |
| 992 blink::WebMediaStreamTrack | 1086 blink::WebMediaStreamTrack |
| 993 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( | 1087 UserMediaClientImpl::UserMediaRequestInfo::CreateAndStartVideoTrack( |
| 994 const blink::WebMediaStreamSource& source, | 1088 const blink::WebMediaStreamSource& source, |
| 995 const blink::WebMediaConstraints& constraints) { | 1089 const blink::WebMediaConstraints& constraints) { |
| 996 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); | 1090 DCHECK(source.getType() == blink::WebMediaStreamSource::TypeVideo); |
| 997 MediaStreamVideoSource* native_source = | 1091 MediaStreamVideoSource* native_source = |
| 998 MediaStreamVideoSource::GetVideoSource(source); | 1092 MediaStreamVideoSource::GetVideoSource(source); |
| 999 DCHECK(native_source); | 1093 DCHECK(native_source); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1033 | 1127 |
| 1034 CheckAllTracksStarted(); | 1128 CheckAllTracksStarted(); |
| 1035 } | 1129 } |
| 1036 | 1130 |
| 1037 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { | 1131 void UserMediaClientImpl::UserMediaRequestInfo::CheckAllTracksStarted() { |
| 1038 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { | 1132 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) { |
| 1039 ready_callback_.Run(this, request_result_, request_result_name_); | 1133 ready_callback_.Run(this, request_result_, request_result_name_); |
| 1040 } | 1134 } |
| 1041 } | 1135 } |
| 1042 | 1136 |
| 1043 bool UserMediaClientImpl::UserMediaRequestInfo::IsSourceUsed( | |
| 1044 const blink::WebMediaStreamSource& source) const { | |
| 1045 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it = | |
| 1046 sources_.begin(); | |
| 1047 source_it != sources_.end(); ++source_it) { | |
| 1048 if (source_it->id() == source.id()) | |
| 1049 return true; | |
| 1050 } | |
| 1051 return false; | |
| 1052 } | |
| 1053 | |
| 1054 void UserMediaClientImpl::UserMediaRequestInfo::RemoveSource( | |
| 1055 const blink::WebMediaStreamSource& source) { | |
| 1056 for (std::vector<blink::WebMediaStreamSource>::iterator it = | |
| 1057 sources_.begin(); | |
| 1058 it != sources_.end(); ++it) { | |
| 1059 if (source.id() == it->id()) { | |
| 1060 sources_.erase(it); | |
| 1061 return; | |
| 1062 } | |
| 1063 } | |
| 1064 } | |
| 1065 | |
| 1066 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { | 1137 bool UserMediaClientImpl::UserMediaRequestInfo::HasPendingSources() const { |
| 1067 return !sources_waiting_for_callback_.empty(); | 1138 return !sources_waiting_for_callback_.empty(); |
| 1068 } | 1139 } |
| 1069 | 1140 |
| 1141 void UserMediaClientImpl::UserMediaRequestInfo::OnAudioSourceStarted( | |
| 1142 MediaStreamSource* source, | |
| 1143 MediaStreamRequestResult result, | |
| 1144 const blink::WebString& result_name) { | |
| 1145 // Check if we're waiting to be notified of this source. If not, then we'll | |
| 1146 // ignore the notification. | |
| 1147 auto found = std::find(sources_waiting_for_callback_.begin(), | |
| 1148 sources_waiting_for_callback_.end(), source); | |
| 1149 if (found != sources_waiting_for_callback_.end()) | |
| 1150 OnTrackStarted(source, result, result_name); | |
| 1151 } | |
| 1152 | |
| 1070 void UserMediaClientImpl::OnDestruct() { | 1153 void UserMediaClientImpl::OnDestruct() { |
| 1071 delete this; | 1154 delete this; |
| 1072 } | 1155 } |
| 1073 | 1156 |
| 1074 } // namespace content | 1157 } // namespace content |
| OLD | NEW |