Chromium Code Reviews| Index: content/renderer/media/user_media_client_impl.cc |
| diff --git a/content/renderer/media/user_media_client_impl.cc b/content/renderer/media/user_media_client_impl.cc |
| index 82e5bf0e3b009ec80c02ea501a1035847f2c0919..3262a4cde71a68df1c02b9035c2c1c78033e3b81 100644 |
| --- a/content/renderer/media/user_media_client_impl.cc |
| +++ b/content/renderer/media/user_media_client_impl.cc |
| @@ -18,6 +18,7 @@ |
| #include "base/strings/stringprintf.h" |
| #include "base/strings/utf_string_conversions.h" |
| #include "base/threading/thread_task_runner_handle.h" |
| +#include "build/build_config.h" |
| #include "content/public/renderer/render_frame.h" |
| #include "content/renderer/media/local_media_stream_audio_source.h" |
| #include "content/renderer/media/media_stream.h" |
| @@ -123,53 +124,36 @@ bool IsSameSource(const blink::WebMediaStreamSource& source, |
| return IsSameDevice(device, other_device); |
| } |
| -static int g_next_request_id = 0; |
| - |
| -} // namespace |
| +blink::WebMediaDeviceInfo::MediaDeviceKind ToMediaDeviceKind( |
| + MediaDeviceType type) { |
| + switch (type) { |
| + case MEDIA_DEVICE_TYPE_AUDIO_INPUT: |
| + return blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput; |
| + case MEDIA_DEVICE_TYPE_VIDEO_INPUT: |
| + return blink::WebMediaDeviceInfo::MediaDeviceKindVideoInput; |
| + case MEDIA_DEVICE_TYPE_AUDIO_OUTPUT: |
| + return blink::WebMediaDeviceInfo::MediaDeviceKindAudioOutput; |
| + default: |
| + NOTREACHED(); |
| + return blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput; |
| + } |
| +} |
| -struct UserMediaClientImpl::MediaDevicesRequestInfo { |
| - MediaDevicesRequestInfo(const blink::WebMediaDevicesRequest& request, |
| - int audio_input_request_id, |
| - int video_input_request_id, |
| - int audio_output_request_id) |
| - : media_devices_request(request), |
| - audio_input_request_id(audio_input_request_id), |
| - video_input_request_id(video_input_request_id), |
| - audio_output_request_id(audio_output_request_id), |
| - has_audio_input_returned(false), |
| - has_video_input_returned(false), |
| - has_audio_output_returned(false) {} |
| - |
| - MediaDevicesRequestInfo( |
| - const blink::WebMediaStreamTrackSourcesRequest& request, |
| - int audio_input_request_id, |
| - int video_input_request_id) |
| - : sources_request(request), |
| - audio_input_request_id(audio_input_request_id), |
| - video_input_request_id(video_input_request_id), |
| - audio_output_request_id(-1), |
| - has_audio_input_returned(false), |
| - has_video_input_returned(false), |
| - has_audio_output_returned(false) {} |
| - |
| - bool IsSourcesRequest() { |
| - // We can't check isNull() on |media_devices_request| and |sources_request|, |
| - // because in unit tests they will always be null. |
| - return audio_output_request_id == -1; |
| +blink::WebSourceInfo::VideoFacingMode ToVideoFacingMode( |
| + const std::string& device_label) { |
| +#if defined(OS_ANDROID) |
| + if (device_label.find("front") != std::string::npos) { |
|
tommi (sloooow) - chröme
2016/10/07 19:59:40
is this behavior copied from somewhere? Would it
Guido Urdaneta
2016/10/08 10:09:56
This is copied from here: https://cs.chromium.org/
|
| + return blink::WebSourceInfo::VideoFacingModeUser; |
| + } else if (device_label.find("back") != std::string::npos) { |
| + return blink::WebSourceInfo::VideoFacingModeEnvironment; |
| } |
| +#endif |
| + return blink::WebSourceInfo::VideoFacingModeNone; |
| +} |
| - blink::WebMediaDevicesRequest media_devices_request; |
| - blink::WebMediaStreamTrackSourcesRequest sources_request; |
| - int audio_input_request_id; |
| - int video_input_request_id; |
| - int audio_output_request_id; |
| - bool has_audio_input_returned; |
| - bool has_video_input_returned; |
| - bool has_audio_output_returned; |
| - StreamDeviceInfoArray audio_input_devices; |
| - StreamDeviceInfoArray video_input_devices; |
| - StreamDeviceInfoArray audio_output_devices; |
| -}; |
| +static int g_next_request_id = 0; |
| + |
| +} // namespace |
| UserMediaClientImpl::UserMediaClientImpl( |
| RenderFrame* render_frame, |
| @@ -290,10 +274,6 @@ void UserMediaClientImpl::requestMediaDevices( |
| UpdateWebRTCMethodCount(WEBKIT_GET_MEDIA_DEVICES); |
| DCHECK(CalledOnValidThread()); |
| - int audio_input_request_id = g_next_request_id++; |
| - int video_input_request_id = g_next_request_id++; |
| - int audio_output_request_id = g_next_request_id++; |
| - |
| // |media_devices_request| can't be mocked, so in tests it will be empty (the |
| // underlying pointer is null). In order to use this function in a test we |
| // need to check if it isNull. |
| @@ -301,33 +281,11 @@ void UserMediaClientImpl::requestMediaDevices( |
| if (!media_devices_request.isNull()) |
| security_origin = media_devices_request.getSecurityOrigin(); |
| - DVLOG(1) << "UserMediaClientImpl::requestMediaDevices(" |
| - << audio_input_request_id << ", " << video_input_request_id << ", " |
| - << audio_output_request_id << ", " << security_origin << ")"; |
| - |
| - media_devices_requests_.push_back(new MediaDevicesRequestInfo( |
| - media_devices_request, |
| - audio_input_request_id, |
| - video_input_request_id, |
| - audio_output_request_id)); |
| - |
| - media_stream_dispatcher_->EnumerateDevices( |
| - audio_input_request_id, |
| - weak_factory_.GetWeakPtr(), |
| - MEDIA_DEVICE_AUDIO_CAPTURE, |
| - security_origin); |
| - |
| - media_stream_dispatcher_->EnumerateDevices( |
| - video_input_request_id, |
| - weak_factory_.GetWeakPtr(), |
| - MEDIA_DEVICE_VIDEO_CAPTURE, |
| - security_origin); |
| - |
| - media_stream_dispatcher_->EnumerateDevices( |
| - audio_output_request_id, |
| - weak_factory_.GetWeakPtr(), |
| - MEDIA_DEVICE_AUDIO_OUTPUT, |
| - security_origin); |
| + GetMediaDevicesDispatcher()->EnumerateDevices( |
| + true /* audio input */, true /* video input */, true /* audio output */, |
| + security_origin, |
| + base::Bind(&UserMediaClientImpl::FinalizeEnumerateDevices, |
| + weak_factory_.GetWeakPtr(), media_devices_request)); |
| } |
| void UserMediaClientImpl::requestSources( |
| @@ -336,9 +294,6 @@ void UserMediaClientImpl::requestSources( |
| // stats. This is instead counted in MediaStreamTrack::getSources in blink. |
| DCHECK(CalledOnValidThread()); |
| - int audio_input_request_id = g_next_request_id++; |
| - int video_input_request_id = g_next_request_id++; |
| - |
| // |sources_request| can't be mocked, so in tests it will be empty (the |
| // underlying pointer is null). In order to use this function in a test we |
| // need to check if it isNull. |
| @@ -346,25 +301,10 @@ void UserMediaClientImpl::requestSources( |
| if (!sources_request.isNull()) |
| security_origin = sources_request.origin(); |
| - DVLOG(1) << "UserMediaClientImpl::requestSources(" << audio_input_request_id |
| - << ", " << video_input_request_id << ", " << security_origin << ")"; |
| - |
| - media_devices_requests_.push_back(new MediaDevicesRequestInfo( |
| - sources_request, |
| - audio_input_request_id, |
| - video_input_request_id)); |
| - |
| - media_stream_dispatcher_->EnumerateDevices( |
| - audio_input_request_id, |
| - weak_factory_.GetWeakPtr(), |
| - MEDIA_DEVICE_AUDIO_CAPTURE, |
| - security_origin); |
| - |
| - media_stream_dispatcher_->EnumerateDevices( |
| - video_input_request_id, |
| - weak_factory_.GetWeakPtr(), |
| - MEDIA_DEVICE_VIDEO_CAPTURE, |
| - security_origin); |
| + GetMediaDevicesDispatcher()->EnumerateDevices( |
| + true /* audio input */, true /* video input */, false /* audio output */, |
| + security_origin, base::Bind(&UserMediaClientImpl::FinalizeGetSources, |
| + weak_factory_.GetWeakPtr(), sources_request)); |
| } |
| void UserMediaClientImpl::setMediaDeviceChangeObserver( |
| @@ -456,83 +396,55 @@ void UserMediaClientImpl::OnStreamGeneratedForCancelledRequest( |
| } |
| void UserMediaClientImpl::FinalizeEnumerateDevices( |
| - MediaDevicesRequestInfo* request) { |
| - blink::WebVector<blink::WebMediaDeviceInfo> |
| - devices(request->audio_input_devices.size() + |
| - request->video_input_devices.size() + |
| - request->audio_output_devices.size()); |
| - for (size_t i = 0; i < request->audio_input_devices.size(); ++i) { |
| - const MediaStreamDevice& device = request->audio_input_devices[i].device; |
| - DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE); |
| - |
| - devices[i].initialize(blink::WebString::fromUTF8(device.id), |
| - blink::WebMediaDeviceInfo::MediaDeviceKindAudioInput, |
| - blink::WebString::fromUTF8(device.name), |
| - blink::WebString::fromUTF8(device.group_id)); |
| - } |
| - size_t offset = request->audio_input_devices.size(); |
| - for (size_t i = 0; i < request->video_input_devices.size(); ++i) { |
| - const MediaStreamDevice& device = request->video_input_devices[i].device; |
| - DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE); |
| - devices[offset + i].initialize( |
| - blink::WebString::fromUTF8(device.id), |
| - blink::WebMediaDeviceInfo::MediaDeviceKindVideoInput, |
| - blink::WebString::fromUTF8(device.name), |
| - blink::WebString()); |
| - } |
| - offset += request->video_input_devices.size(); |
| - for (size_t i = 0; i < request->audio_output_devices.size(); ++i) { |
| - const MediaStreamDevice& device = request->audio_output_devices[i].device; |
| - DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_OUTPUT); |
| - devices[offset + i].initialize( |
| - blink::WebString::fromUTF8(device.id), |
| - blink::WebMediaDeviceInfo::MediaDeviceKindAudioOutput, |
| - blink::WebString::fromUTF8(device.name), |
| - blink::WebString::fromUTF8(device.group_id)); |
| + blink::WebMediaDevicesRequest request, |
| + const EnumerationResult& result) { |
| + DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); |
| + |
| + blink::WebVector<blink::WebMediaDeviceInfo> devices( |
| + result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + |
| + result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size() + |
| + result[MEDIA_DEVICE_TYPE_AUDIO_OUTPUT].size()); |
| + size_t index = 0; |
| + for (size_t i = 0; i < NUM_MEDIA_DEVICE_TYPES; ++i) { |
| + blink::WebMediaDeviceInfo::MediaDeviceKind device_kind = |
| + ToMediaDeviceKind(static_cast<MediaDeviceType>(i)); |
| + for (const auto& device_info : result[i]) { |
| + devices[index++].initialize( |
| + blink::WebString::fromUTF8(device_info.device_id), device_kind, |
| + blink::WebString::fromUTF8(device_info.label), |
| + blink::WebString::fromUTF8(device_info.group_id)); |
| + } |
| } |
| - EnumerateDevicesSucceded(&request->media_devices_request, devices); |
| + EnumerateDevicesSucceded(&request, devices); |
| } |
| -void UserMediaClientImpl::FinalizeEnumerateSources( |
| - MediaDevicesRequestInfo* request) { |
| - blink::WebVector<blink::WebSourceInfo> |
| - sources(request->audio_input_devices.size() + |
| - request->video_input_devices.size()); |
| - for (size_t i = 0; i < request->audio_input_devices.size(); ++i) { |
| - const MediaStreamDevice& device = request->audio_input_devices[i].device; |
| - DCHECK_EQ(device.type, MEDIA_DEVICE_AUDIO_CAPTURE); |
| - std::string group_id = base::UintToString(base::Hash( |
| - !device.matched_output_device_id.empty() ? |
| - device.matched_output_device_id : |
| - device.id)); |
| - sources[i].initialize(blink::WebString::fromUTF8(device.id), |
| - blink::WebSourceInfo::SourceKindAudio, |
| - blink::WebString::fromUTF8(device.name), |
| - blink::WebSourceInfo::VideoFacingModeNone); |
| +void UserMediaClientImpl::FinalizeGetSources( |
| + blink::WebMediaStreamTrackSourcesRequest request, |
| + const EnumerationResult& result) { |
| + DCHECK_EQ(static_cast<size_t>(NUM_MEDIA_DEVICE_TYPES), result.size()); |
| + |
| + blink::WebVector<blink::WebSourceInfo> sources( |
| + result[MEDIA_DEVICE_TYPE_AUDIO_INPUT].size() + |
| + result[MEDIA_DEVICE_TYPE_VIDEO_INPUT].size()); |
| + size_t index = 0; |
| + for (const auto& device_info : result[MEDIA_DEVICE_TYPE_AUDIO_INPUT]) { |
| + sources[index++].initialize( |
| + blink::WebString::fromUTF8(device_info.device_id), |
| + blink::WebSourceInfo::SourceKindAudio, |
| + blink::WebString::fromUTF8(device_info.label), |
| + blink::WebSourceInfo::VideoFacingModeNone); |
| } |
| - size_t offset = request->audio_input_devices.size(); |
| - for (size_t i = 0; i < request->video_input_devices.size(); ++i) { |
| - const MediaStreamDevice& device = request->video_input_devices[i].device; |
| - DCHECK_EQ(device.type, MEDIA_DEVICE_VIDEO_CAPTURE); |
| - blink::WebSourceInfo::VideoFacingMode video_facing; |
| - switch (device.video_facing) { |
| - case MEDIA_VIDEO_FACING_USER: |
| - video_facing = blink::WebSourceInfo::VideoFacingModeUser; |
| - break; |
| - case MEDIA_VIDEO_FACING_ENVIRONMENT: |
| - video_facing = blink::WebSourceInfo::VideoFacingModeEnvironment; |
| - break; |
| - default: |
| - video_facing = blink::WebSourceInfo::VideoFacingModeNone; |
| - } |
| - sources[offset + i].initialize(blink::WebString::fromUTF8(device.id), |
| - blink::WebSourceInfo::SourceKindVideo, |
| - blink::WebString::fromUTF8(device.name), |
| - video_facing); |
| + |
| + for (const auto& device_info : result[MEDIA_DEVICE_TYPE_VIDEO_INPUT]) { |
| + sources[index++].initialize( |
| + blink::WebString::fromUTF8(device_info.device_id), |
| + blink::WebSourceInfo::SourceKindVideo, |
| + blink::WebString::fromUTF8(device_info.label), |
| + ToVideoFacingMode(device_info.label)); |
| } |
| - EnumerateSourcesSucceded(&request->sources_request, sources); |
| + EnumerateSourcesSucceded(&request, sources); |
| } |
| // Callback from MediaStreamDispatcher. |
| @@ -742,37 +654,7 @@ void UserMediaClientImpl::OnCreateNativeTracksCompleted( |
| void UserMediaClientImpl::OnDevicesEnumerated( |
| int request_id, |
| const StreamDeviceInfoArray& device_array) { |
| - DVLOG(1) << "UserMediaClientImpl::OnDevicesEnumerated(" << request_id << ")"; |
| - |
| - MediaDevicesRequestInfo* request = FindMediaDevicesRequestInfo(request_id); |
| - if (!request) |
| - return; |
| - |
| - if (request_id == request->audio_input_request_id) { |
| - request->has_audio_input_returned = true; |
| - request->audio_input_devices = device_array; |
| - } else if (request_id == request->video_input_request_id) { |
| - request->has_video_input_returned = true; |
| - request->video_input_devices = device_array; |
| - } else { |
| - DCHECK_EQ(request->audio_output_request_id, request_id); |
| - request->has_audio_output_returned = true; |
| - request->audio_output_devices = device_array; |
| - } |
| - |
| - if (!request->has_audio_input_returned || |
| - !request->has_video_input_returned || |
| - (!request->IsSourcesRequest() && !request->has_audio_output_returned)) { |
| - // Wait for the rest of the devices to complete. |
| - return; |
| - } |
| - |
| - if (request->IsSourcesRequest()) |
| - FinalizeEnumerateSources(request); |
| - else |
| - FinalizeEnumerateDevices(request); |
| - |
| - CancelAndDeleteMediaDevicesRequest(request); |
| + NOTREACHED(); |
| } |
| void UserMediaClientImpl::OnDeviceOpened( |
| @@ -983,51 +865,6 @@ void UserMediaClientImpl::DeleteAllUserMediaRequests() { |
| } |
| } |
| -UserMediaClientImpl::MediaDevicesRequestInfo* |
| -UserMediaClientImpl::FindMediaDevicesRequestInfo( |
| - int request_id) { |
| - MediaDevicesRequests::iterator it = media_devices_requests_.begin(); |
| - for (; it != media_devices_requests_.end(); ++it) { |
| - if ((*it)->audio_input_request_id == request_id || |
| - (*it)->video_input_request_id == request_id || |
| - (*it)->audio_output_request_id == request_id) { |
| - return (*it); |
| - } |
| - } |
| - return NULL; |
| -} |
| - |
| -UserMediaClientImpl::MediaDevicesRequestInfo* |
| -UserMediaClientImpl::FindMediaDevicesRequestInfo( |
| - const blink::WebMediaDevicesRequest& request) { |
| - MediaDevicesRequests::iterator it = media_devices_requests_.begin(); |
| - for (; it != media_devices_requests_.end(); ++it) { |
| - if ((*it)->media_devices_request == request) |
| - return (*it); |
| - } |
| - return NULL; |
| -} |
| - |
| -void UserMediaClientImpl::CancelAndDeleteMediaDevicesRequest( |
| - MediaDevicesRequestInfo* request) { |
| - MediaDevicesRequests::iterator it = media_devices_requests_.begin(); |
| - for (; it != media_devices_requests_.end(); ++it) { |
| - if ((*it) == request) { |
| - // Cancel device enumeration. |
| - media_stream_dispatcher_->StopEnumerateDevices( |
| - request->audio_input_request_id, weak_factory_.GetWeakPtr()); |
| - media_stream_dispatcher_->StopEnumerateDevices( |
| - request->video_input_request_id, weak_factory_.GetWeakPtr()); |
| - media_stream_dispatcher_->StopEnumerateDevices( |
| - request->audio_output_request_id, weak_factory_.GetWeakPtr()); |
| - |
| - media_devices_requests_.erase(it); |
| - return; |
| - } |
| - } |
| - NOTREACHED(); |
| -} |
| - |
| void UserMediaClientImpl::WillCommitProvisionalLoad() { |
| // Cancel all outstanding UserMediaRequests. |
| DeleteAllUserMediaRequests(); |
| @@ -1040,6 +877,11 @@ void UserMediaClientImpl::WillCommitProvisionalLoad() { |
| } |
| } |
| +void UserMediaClientImpl::SetMediaDevicesDispatcherForTesting( |
| + ::mojom::MediaDevicesDispatcherHostPtr media_devices_dispatcher) { |
| + media_devices_dispatcher_ = std::move(media_devices_dispatcher); |
| +} |
| + |
| void UserMediaClientImpl::OnLocalSourceStopped( |
| const blink::WebMediaStreamSource& source) { |
| DCHECK(CalledOnValidThread()); |
| @@ -1068,6 +910,16 @@ void UserMediaClientImpl::StopLocalSource( |
| source_impl->StopSource(); |
| } |
| +const ::mojom::MediaDevicesDispatcherHostPtr& |
| +UserMediaClientImpl::GetMediaDevicesDispatcher() { |
| + if (!media_devices_dispatcher_) { |
| + render_frame()->GetRemoteInterfaces()->GetInterface( |
| + mojo::GetProxy(&media_devices_dispatcher_)); |
| + } |
| + |
| + return media_devices_dispatcher_; |
| +} |
| + |
| UserMediaClientImpl::UserMediaRequestInfo::UserMediaRequestInfo( |
| int request_id, |
| const blink::WebUserMediaRequest& request, |