OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/video/capture/win/video_capture_device_win.h" | 5 #include "media/video/capture/win/video_capture_device_win.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <list> | 8 #include <list> |
9 | 9 |
10 #include "base/command_line.h" | 10 #include "base/command_line.h" |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
143 | 143 |
144 // Delete a media type structure that was allocated on the heap. | 144 // Delete a media type structure that was allocated on the heap. |
145 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | 145 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx |
146 void DeleteMediaType(AM_MEDIA_TYPE* mt) { | 146 void DeleteMediaType(AM_MEDIA_TYPE* mt) { |
147 if (mt != NULL) { | 147 if (mt != NULL) { |
148 FreeMediaType(mt); | 148 FreeMediaType(mt); |
149 CoTaskMemFree(mt); | 149 CoTaskMemFree(mt); |
150 } | 150 } |
151 } | 151 } |
152 | 152 |
| 153 VideoPixelFormat TranslateMediaSubtypeToPixelFormat(const GUID& sub_type) { |
| 154 static struct { |
| 155 const GUID& sub_type; |
| 156 VideoPixelFormat format; |
| 157 } pixel_formats[] = { |
| 158 { kMediaSubTypeI420, PIXEL_FORMAT_I420 }, |
| 159 { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 }, |
| 160 { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 }, |
| 161 { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 }, |
| 162 { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG }, |
| 163 { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY }, |
| 164 { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB }, |
| 165 }; |
| 166 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) { |
| 167 if (sub_type == pixel_formats[i].sub_type) |
| 168 return pixel_formats[i].format; |
| 169 } |
| 170 #ifndef NDEBUG |
| 171 WCHAR guid_str[128]; |
| 172 StringFromGUID2(sub_type, guid_str, arraysize(guid_str)); |
| 173 DVLOG(2) << "Device (also) supports an unknown media type " << guid_str; |
| 174 #endif |
| 175 return PIXEL_FORMAT_UNKNOWN; |
| 176 } |
| 177 |
153 } // namespace | 178 } // namespace |
154 | 179 |
155 // static | 180 // static |
156 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { | 181 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { |
157 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | 182 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
158 // Use Media Foundation for Metro processes (after and including Win8) and | 183 // Use Media Foundation for Metro processes (after and including Win8) and |
159 // DirectShow for any other versions, unless forced via flag. Media Foundation | 184 // DirectShow for any other versions, unless forced via flag. Media Foundation |
160 // can also be forced if appropriate flag is set and we are in Windows 7 or | 185 // can also be forced if appropriate flag is set and we are in Windows 7 or |
161 // 8 in non-Metro mode. | 186 // 8 in non-Metro mode. |
162 if ((base::win::IsMetroProcess() && | 187 if ((base::win::IsMetroProcess() && |
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
247 | 272 |
248 // Find the description or friendly name. | 273 // Find the description or friendly name. |
249 ScopedVariant name; | 274 ScopedVariant name; |
250 hr = prop_bag->Read(L"Description", name.Receive(), 0); | 275 hr = prop_bag->Read(L"Description", name.Receive(), 0); |
251 if (FAILED(hr)) | 276 if (FAILED(hr)) |
252 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0); | 277 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0); |
253 | 278 |
254 if (SUCCEEDED(hr) && name.type() == VT_BSTR) { | 279 if (SUCCEEDED(hr) && name.type() == VT_BSTR) { |
255 // Ignore all VFW drivers and the special Google Camera Adapter. | 280 // Ignore all VFW drivers and the special Google Camera Adapter. |
256 // Google Camera Adapter is not a real DirectShow camera device. | 281 // Google Camera Adapter is not a real DirectShow camera device. |
257 // VFW is very old Video for Windows drivers that can not be used. | 282 // VFW are very old Video for Windows drivers that can not be used. |
258 const wchar_t* str_ptr = V_BSTR(&name); | 283 const wchar_t* str_ptr = V_BSTR(&name); |
259 const int name_length = arraysize(kGoogleCameraAdapter) - 1; | 284 const int name_length = arraysize(kGoogleCameraAdapter) - 1; |
260 | 285 |
261 if ((wcsstr(str_ptr, L"(VFW)") == NULL) && | 286 if ((wcsstr(str_ptr, L"(VFW)") == NULL) && |
262 lstrlenW(str_ptr) < name_length || | 287 lstrlenW(str_ptr) < name_length || |
263 (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length, | 288 (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length, |
264 kGoogleCameraAdapter)))) { | 289 kGoogleCameraAdapter)))) { |
265 std::string id; | 290 std::string id; |
266 std::string device_name(base::SysWideToUTF8(str_ptr)); | 291 std::string device_name(base::SysWideToUTF8(str_ptr)); |
267 name.Reset(); | 292 name.Reset(); |
268 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0); | 293 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0); |
269 if (FAILED(hr) || name.type() != VT_BSTR) { | 294 if (FAILED(hr) || name.type() != VT_BSTR) { |
270 id = device_name; | 295 id = device_name; |
271 } else { | 296 } else { |
272 DCHECK_EQ(name.type(), VT_BSTR); | 297 DCHECK_EQ(name.type(), VT_BSTR); |
273 id = base::SysWideToUTF8(V_BSTR(&name)); | 298 id = base::SysWideToUTF8(V_BSTR(&name)); |
274 } | 299 } |
275 | 300 |
276 device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW)); | 301 device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW)); |
277 } | 302 } |
278 } | 303 } |
279 moniker.Release(); | 304 moniker.Release(); |
280 } | 305 } |
281 } | 306 } |
282 | 307 |
283 // static | 308 // static |
284 void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device, | 309 void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device, |
285 VideoCaptureFormats* formats) { | 310 VideoCaptureFormats* formats) { |
286 NOTIMPLEMENTED(); | 311 DVLOG(1) << "GetDeviceSupportedFormats for " << device.name(); |
| 312 ScopedComPtr<ICreateDevEnum> dev_enum; |
| 313 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, |
| 314 CLSCTX_INPROC); |
| 315 if (FAILED(hr)) |
| 316 return; |
| 317 |
| 318 ScopedComPtr<IEnumMoniker> enum_moniker; |
| 319 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, |
| 320 enum_moniker.Receive(), 0); |
| 321 // CreateClassEnumerator returns S_FALSE on some Windows OS when no camera |
| 322 // exists. Therefore the FAILED macro can't be used. |
| 323 if (hr != S_OK) |
| 324 return; |
| 325 |
| 326 // Walk the capture devices. No need to check for "google camera adapter", |
| 327 // since this is already skipped in the enumeration of GetDeviceNames(). |
| 328 ScopedComPtr<IMoniker> moniker; |
| 329 int index = 0; |
| 330 ScopedVariant device_id; |
| 331 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) { |
| 332 ScopedComPtr<IPropertyBag> prop_bag; |
| 333 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); |
| 334 if (FAILED(hr)) { |
| 335 moniker.Release(); |
| 336 continue; |
| 337 } |
| 338 |
| 339 device_id.Reset(); |
| 340 hr = prop_bag->Read(L"DevicePath", device_id.Receive(), 0); |
| 341 if (FAILED(hr)) { |
| 342 DVLOG(1) << "Couldn't read a device's DevicePath."; |
| 343 return; |
| 344 } |
| 345 if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id))) |
| 346 break; |
| 347 moniker.Release(); |
| 348 } |
| 349 |
| 350 if (moniker.get()) { |
| 351 base::win::ScopedComPtr<IBaseFilter> capture_filter; |
| 352 hr = GetDeviceFilter(device, capture_filter.Receive()); |
| 353 if (!capture_filter) { |
| 354 DVLOG(2) << "Failed to create capture filter."; |
| 355 return; |
| 356 } |
| 357 |
| 358 base::win::ScopedComPtr<IPin> output_capture_pin; |
| 359 hr = GetPin(capture_filter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE, |
| 360 output_capture_pin.Receive()); |
| 361 if (!output_capture_pin) { |
| 362 DVLOG(2) << "Failed to get capture output pin"; |
| 363 return; |
| 364 } |
| 365 |
| 366 ScopedComPtr<IAMStreamConfig> stream_config; |
| 367 hr = output_capture_pin.QueryInterface(stream_config.Receive()); |
| 368 if (FAILED(hr)) { |
| 369 DVLOG(2) << "Failed to get IAMStreamConfig interface from " |
| 370 "capture device"; |
| 371 return; |
| 372 } |
| 373 |
| 374 int count, size; |
| 375 hr = stream_config->GetNumberOfCapabilities(&count, &size); |
| 376 if (FAILED(hr)) { |
| 377 DVLOG(2) << "Failed to GetNumberOfCapabilities"; |
| 378 return; |
| 379 } |
| 380 |
| 381 AM_MEDIA_TYPE* media_type = NULL; |
| 382 VIDEO_STREAM_CONFIG_CAPS caps; |
| 383 for (int i = 0; i < count; ++i) { |
| 384 hr = stream_config->GetStreamCaps(i, &media_type, |
| 385 reinterpret_cast<BYTE*>(&caps)); |
| 386 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED() |
| 387 // macros here since they'll trigger incorrectly. |
| 388 if (hr != S_OK) { |
| 389 DVLOG(2) << "Failed to GetStreamCaps"; |
| 390 return; |
| 391 } |
| 392 |
| 393 if (media_type->majortype == MEDIATYPE_Video && |
| 394 media_type->formattype == FORMAT_VideoInfo) { |
| 395 VIDEOINFOHEADER* h = |
| 396 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); |
| 397 VideoCaptureFormat format; |
| 398 format.frame_size.SetSize(h->bmiHeader.biWidth, |
| 399 h->bmiHeader.biHeight); |
| 400 // Trust the frame rate from the VIDEOINFOHEADER. |
| 401 format.frame_rate = (h->AvgTimePerFrame > 0) ? |
| 402 static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) : |
| 403 0; |
| 404 format.pixel_format = |
| 405 TranslateMediaSubtypeToPixelFormat(media_type->subtype); |
| 406 formats->push_back(format); |
| 407 DVLOG(1) << device.name() << " resolution: " |
| 408 << format.frame_size.ToString() << ", fps: " << format.frame_rate |
| 409 << ", pixel format: " << format.pixel_format; |
| 410 } |
| 411 } |
| 412 } |
287 } | 413 } |
288 | 414 |
289 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) | 415 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) |
290 : device_name_(device_name), | 416 : device_name_(device_name), |
291 state_(kIdle) { | 417 state_(kIdle) { |
292 DetachFromThread(); | 418 DetachFromThread(); |
293 } | 419 } |
294 | 420 |
295 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | 421 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { |
296 DCHECK(CalledOnValidThread()); | 422 DCHECK(CalledOnValidThread()); |
(...skipping 278 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
575 capability.supported_format.frame_rate = | 701 capability.supported_format.frame_rate = |
576 (time_per_frame > 0) | 702 (time_per_frame > 0) |
577 ? static_cast<int>(kSecondsToReferenceTime / time_per_frame) | 703 ? static_cast<int>(kSecondsToReferenceTime / time_per_frame) |
578 : 0; | 704 : 0; |
579 | 705 |
580 // DirectShow works at the moment only on integer frame_rate but the | 706 // DirectShow works at the moment only on integer frame_rate but the |
581 // best capability matching class works on rational frame rates. | 707 // best capability matching class works on rational frame rates. |
582 capability.frame_rate_numerator = capability.supported_format.frame_rate; | 708 capability.frame_rate_numerator = capability.supported_format.frame_rate; |
583 capability.frame_rate_denominator = 1; | 709 capability.frame_rate_denominator = 1; |
584 | 710 |
585 // We can't switch MEDIATYPE :~(. | 711 capability.supported_format.pixel_format = |
586 if (media_type->subtype == kMediaSubTypeI420) { | 712 TranslateMediaSubtypeToPixelFormat(media_type->subtype); |
587 capability.supported_format.pixel_format = PIXEL_FORMAT_I420; | |
588 } else if (media_type->subtype == MEDIASUBTYPE_IYUV) { | |
589 // This is identical to PIXEL_FORMAT_I420. | |
590 capability.supported_format.pixel_format = PIXEL_FORMAT_I420; | |
591 } else if (media_type->subtype == MEDIASUBTYPE_RGB24) { | |
592 capability.supported_format.pixel_format = PIXEL_FORMAT_RGB24; | |
593 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) { | |
594 capability.supported_format.pixel_format = PIXEL_FORMAT_YUY2; | |
595 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) { | |
596 capability.supported_format.pixel_format = PIXEL_FORMAT_MJPEG; | |
597 } else if (media_type->subtype == MEDIASUBTYPE_UYVY) { | |
598 capability.supported_format.pixel_format = PIXEL_FORMAT_UYVY; | |
599 } else if (media_type->subtype == MEDIASUBTYPE_ARGB32) { | |
600 capability.supported_format.pixel_format = PIXEL_FORMAT_ARGB; | |
601 } else { | |
602 WCHAR guid_str[128]; | |
603 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str)); | |
604 DVLOG(2) << "Device supports (also) an unknown media type " << guid_str; | |
605 continue; | |
606 } | |
607 capabilities_.Add(capability); | 713 capabilities_.Add(capability); |
608 } | 714 } |
609 DeleteMediaType(media_type); | 715 DeleteMediaType(media_type); |
610 media_type = NULL; | 716 media_type = NULL; |
611 } | 717 } |
612 | 718 |
613 return !capabilities_.empty(); | 719 return !capabilities_.empty(); |
614 } | 720 } |
615 | 721 |
616 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) { | 722 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) { |
617 DCHECK(CalledOnValidThread()); | 723 DCHECK(CalledOnValidThread()); |
618 DVLOG(1) << reason; | 724 DVLOG(1) << reason; |
619 state_ = kError; | 725 state_ = kError; |
620 client_->OnError(reason); | 726 client_->OnError(reason); |
621 } | 727 } |
622 } // namespace media | 728 } // namespace media |
OLD | NEW |