| Index: trunk/src/media/video/capture/win/video_capture_device_win.cc
|
| ===================================================================
|
| --- trunk/src/media/video/capture/win/video_capture_device_win.cc (revision 252035)
|
| +++ trunk/src/media/video/capture/win/video_capture_device_win.cc (working copy)
|
| @@ -150,31 +150,6 @@
|
| }
|
| }
|
|
|
| -VideoPixelFormat TranslateMediaSubtypeToPixelFormat(const GUID& sub_type) {
|
| - static struct {
|
| - const GUID& sub_type;
|
| - VideoPixelFormat format;
|
| - } pixel_formats[] = {
|
| - { kMediaSubTypeI420, PIXEL_FORMAT_I420 },
|
| - { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 },
|
| - { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 },
|
| - { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 },
|
| - { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG },
|
| - { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY },
|
| - { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB },
|
| - };
|
| - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) {
|
| - if (sub_type == pixel_formats[i].sub_type)
|
| - return pixel_formats[i].format;
|
| - }
|
| -#ifndef NDEBUG
|
| - WCHAR guid_str[128];
|
| - StringFromGUID2(sub_type, guid_str, arraysize(guid_str));
|
| - DVLOG(2) << "Device (also) supports an unknown media type " << guid_str;
|
| -#endif
|
| - return PIXEL_FORMAT_UNKNOWN;
|
| -}
|
| -
|
| } // namespace
|
|
|
| // static
|
| @@ -279,7 +254,7 @@
|
| if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
|
| // Ignore all VFW drivers and the special Google Camera Adapter.
|
| // Google Camera Adapter is not a real DirectShow camera device.
|
| - // VFW are very old Video for Windows drivers that can not be used.
|
| + // VFW is very old Video for Windows drivers that can not be used.
|
| const wchar_t* str_ptr = V_BSTR(&name);
|
| const int name_length = arraysize(kGoogleCameraAdapter) - 1;
|
|
|
| @@ -308,108 +283,7 @@
|
| // static
|
| void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device,
|
| VideoCaptureFormats* formats) {
|
| - DVLOG(1) << "GetDeviceSupportedFormats for " << device.name();
|
| - ScopedComPtr<ICreateDevEnum> dev_enum;
|
| - HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
|
| - CLSCTX_INPROC);
|
| - if (FAILED(hr))
|
| - return;
|
| -
|
| - ScopedComPtr<IEnumMoniker> enum_moniker;
|
| - hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
|
| - enum_moniker.Receive(), 0);
|
| - // CreateClassEnumerator returns S_FALSE on some Windows OS when no camera
|
| - // exists. Therefore the FAILED macro can't be used.
|
| - if (hr != S_OK)
|
| - return;
|
| -
|
| - // Walk the capture devices. No need to check for "google camera adapter",
|
| - // since this is already skipped in the enumeration of GetDeviceNames().
|
| - ScopedComPtr<IMoniker> moniker;
|
| - int index = 0;
|
| - ScopedVariant device_id;
|
| - while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
|
| - ScopedComPtr<IPropertyBag> prop_bag;
|
| - hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
|
| - if (FAILED(hr)) {
|
| - moniker.Release();
|
| - continue;
|
| - }
|
| -
|
| - device_id.Reset();
|
| - hr = prop_bag->Read(L"DevicePath", device_id.Receive(), 0);
|
| - if (FAILED(hr)) {
|
| - DVLOG(1) << "Couldn't read a device's DevicePath.";
|
| - return;
|
| - }
|
| - if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id)))
|
| - break;
|
| - moniker.Release();
|
| - }
|
| -
|
| - if (moniker.get()) {
|
| - base::win::ScopedComPtr<IBaseFilter> capture_filter;
|
| - hr = GetDeviceFilter(device, capture_filter.Receive());
|
| - if (!capture_filter) {
|
| - DVLOG(2) << "Failed to create capture filter.";
|
| - return;
|
| - }
|
| -
|
| - base::win::ScopedComPtr<IPin> output_capture_pin;
|
| - hr = GetPin(capture_filter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE,
|
| - output_capture_pin.Receive());
|
| - if (!output_capture_pin) {
|
| - DVLOG(2) << "Failed to get capture output pin";
|
| - return;
|
| - }
|
| -
|
| - ScopedComPtr<IAMStreamConfig> stream_config;
|
| - hr = output_capture_pin.QueryInterface(stream_config.Receive());
|
| - if (FAILED(hr)) {
|
| - DVLOG(2) << "Failed to get IAMStreamConfig interface from "
|
| - "capture device";
|
| - return;
|
| - }
|
| -
|
| - int count, size;
|
| - hr = stream_config->GetNumberOfCapabilities(&count, &size);
|
| - if (FAILED(hr)) {
|
| - DVLOG(2) << "Failed to GetNumberOfCapabilities";
|
| - return;
|
| - }
|
| -
|
| - AM_MEDIA_TYPE* media_type = NULL;
|
| - VIDEO_STREAM_CONFIG_CAPS caps;
|
| - for (int i = 0; i < count; ++i) {
|
| - hr = stream_config->GetStreamCaps(i, &media_type,
|
| - reinterpret_cast<BYTE*>(&caps));
|
| - // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED()
|
| - // macros here since they'll trigger incorrectly.
|
| - if (hr != S_OK) {
|
| - DVLOG(2) << "Failed to GetStreamCaps";
|
| - return;
|
| - }
|
| -
|
| - if (media_type->majortype == MEDIATYPE_Video &&
|
| - media_type->formattype == FORMAT_VideoInfo) {
|
| - VIDEOINFOHEADER* h =
|
| - reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
|
| - VideoCaptureFormat format;
|
| - format.frame_size.SetSize(h->bmiHeader.biWidth,
|
| - h->bmiHeader.biHeight);
|
| - // Trust the frame rate from the VIDEOINFOHEADER.
|
| - format.frame_rate = (h->AvgTimePerFrame > 0) ?
|
| - static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) :
|
| - 0;
|
| - format.pixel_format =
|
| - TranslateMediaSubtypeToPixelFormat(media_type->subtype);
|
| - formats->push_back(format);
|
| - DVLOG(1) << device.name() << " resolution: "
|
| - << format.frame_size.ToString() << ", fps: " << format.frame_rate
|
| - << ", pixel format: " << format.pixel_format;
|
| - }
|
| - }
|
| - }
|
| + NOTIMPLEMENTED();
|
| }
|
|
|
| VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
|
| @@ -708,8 +582,28 @@
|
| capability.frame_rate_numerator = capability.supported_format.frame_rate;
|
| capability.frame_rate_denominator = 1;
|
|
|
| - capability.supported_format.pixel_format =
|
| - TranslateMediaSubtypeToPixelFormat(media_type->subtype);
|
| + // We can't switch MEDIATYPE :~(.
|
| + if (media_type->subtype == kMediaSubTypeI420) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
|
| + // This is identical to PIXEL_FORMAT_I420.
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_I420;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_RGB24;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_YUY2;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_MJPEG;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_UYVY) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_UYVY;
|
| + } else if (media_type->subtype == MEDIASUBTYPE_ARGB32) {
|
| + capability.supported_format.pixel_format = PIXEL_FORMAT_ARGB;
|
| + } else {
|
| + WCHAR guid_str[128];
|
| + StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
|
| + DVLOG(2) << "Device supports (also) an unknown media type " << guid_str;
|
| + continue;
|
| + }
|
| capabilities_.Add(capability);
|
| }
|
| DeleteMediaType(media_type);
|
|
|