OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/video/capture/win/video_capture_device_win.h" | 5 #include "media/video/capture/win/video_capture_device_factory_win.h" |
6 | 6 |
7 #include <ks.h> | 7 #include <mfapi.h> |
8 #include <ksmedia.h> | 8 #include <mferror.h> |
9 | |
10 #include <algorithm> | |
11 #include <list> | |
12 | 9 |
13 #include "base/command_line.h" | 10 #include "base/command_line.h" |
11 #include "base/lazy_instance.h" | |
14 #include "base/strings/string_util.h" | 12 #include "base/strings/string_util.h" |
15 #include "base/strings/sys_string_conversions.h" | 13 #include "base/strings/sys_string_conversions.h" |
16 #include "base/win/metro.h" | 14 #include "base/win/metro.h" |
17 #include "base/win/scoped_co_mem.h" | 15 #include "base/win/scoped_co_mem.h" |
18 #include "base/win/scoped_variant.h" | 16 #include "base/win/scoped_variant.h" |
19 #include "base/win/windows_version.h" | 17 #include "base/win/windows_version.h" |
20 #include "media/base/media_switches.h" | 18 #include "media/base/media_switches.h" |
21 #include "media/video/capture/win/video_capture_device_mf_win.h" | 19 #include "media/video/capture/win/video_capture_device_mf_win.h" |
20 #include "media/video/capture/win/video_capture_device_win.h" | |
22 | 21 |
23 using base::win::ScopedCoMem; | 22 using base::win::ScopedCoMem; |
24 using base::win::ScopedComPtr; | 23 using base::win::ScopedComPtr; |
25 using base::win::ScopedVariant; | 24 using base::win::ScopedVariant; |
26 | 25 |
27 namespace media { | 26 namespace media { |
28 namespace { | |
29 | 27 |
30 // Finds and creates a DirectShow Video Capture filter matching the device_name. | 28 // Lazy Instance to initialize the MediaFoundation Library. |
31 HRESULT GetDeviceFilter(const VideoCaptureDevice::Name& device_name, | 29 class MFInitializerSingleton { |
32 IBaseFilter** filter) { | 30 public: |
33 DCHECK(filter); | 31 MFInitializerSingleton() { MFStartup(MF_VERSION, MFSTARTUP_LITE); } |
32 ~MFInitializerSingleton() { MFShutdown(); } | |
33 }; | |
34 | |
35 static base::LazyInstance<MFInitializerSingleton> g_mf_initialize = | |
36 LAZY_INSTANCE_INITIALIZER; | |
37 | |
38 static void EnsureMediaFoundationInit() { | |
39 g_mf_initialize.Get(); | |
40 } | |
41 | |
42 // TODO(mcasas): This method is only used from PlatformSupportsMediaFoundation, | |
43 // who is either hiding behind a DCHECK or used from tests only. Consider | |
44 // removing this function altogether and rely only on MFInitializerSingleton for | |
45 // initializing MF. | |
tommi (sloooow) - chröme
2014/05/15 15:18:33
See the comments where LoadMediaFoundationDlls is
mcasas
2014/05/15 16:03:23
Works for me!
| |
46 static bool LoadMediaFoundationDlls() { | |
47 static const wchar_t* const kMfDLLs[] = { | |
48 L"%WINDIR%\\system32\\mf.dll", | |
49 L"%WINDIR%\\system32\\mfplat.dll", | |
50 L"%WINDIR%\\system32\\mfreadwrite.dll", | |
51 }; | |
52 | |
53 for (int i = 0; i < arraysize(kMfDLLs); ++i) { | |
54 wchar_t path[MAX_PATH] = {0}; | |
55 ExpandEnvironmentStringsW(kMfDLLs[i], path, arraysize(path)); | |
56 if (!LoadLibraryExW(path, NULL, LOAD_WITH_ALTERED_SEARCH_PATH)) | |
57 return false; | |
58 } | |
59 return true; | |
60 } | |
61 | |
62 static bool PrepareVideoCaptureAttributesMediaFoundation( | |
63 IMFAttributes** attributes, | |
64 int count) { | |
65 EnsureMediaFoundationInit(); | |
66 | |
67 if (FAILED(MFCreateAttributes(attributes, count))) | |
68 return false; | |
69 | |
70 return SUCCEEDED((*attributes)->SetGUID(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, | |
71 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID)); | |
72 } | |
73 | |
74 static bool CreateVideoCaptureDeviceMediaFoundation(const char* sym_link, | |
75 IMFMediaSource** source) { | |
76 ScopedComPtr<IMFAttributes> attributes; | |
77 if (!PrepareVideoCaptureAttributesMediaFoundation(attributes.Receive(), 2)) | |
78 return false; | |
79 | |
80 attributes->SetString(MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, | |
81 base::SysUTF8ToWide(sym_link).c_str()); | |
82 | |
83 return SUCCEEDED(MFCreateDeviceSource(attributes, source)); | |
84 } | |
85 | |
86 static bool EnumerateVideoDevicesMediaFoundation(IMFActivate*** devices, | |
87 UINT32* count) { | |
88 ScopedComPtr<IMFAttributes> attributes; | |
89 if (!PrepareVideoCaptureAttributesMediaFoundation(attributes.Receive(), 1)) | |
90 return false; | |
91 | |
92 return SUCCEEDED(MFEnumDeviceSources(attributes, devices, count)); | |
93 } | |
94 | |
95 static void GetDeviceNamesDirectShow(VideoCaptureDevice::Names* device_names) { | |
96 DCHECK(device_names); | |
97 DVLOG(1) << " GetDeviceNamesDirectShow"; | |
34 | 98 |
35 ScopedComPtr<ICreateDevEnum> dev_enum; | 99 ScopedComPtr<ICreateDevEnum> dev_enum; |
36 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | 100 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, |
37 CLSCTX_INPROC); | |
38 if (FAILED(hr)) | |
39 return hr; | |
40 | |
41 ScopedComPtr<IEnumMoniker> enum_moniker; | |
42 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | |
43 enum_moniker.Receive(), 0); | |
44 // CreateClassEnumerator returns S_FALSE on some Windows OS | |
45 // when no camera exist. Therefore the FAILED macro can't be used. | |
46 if (hr != S_OK) | |
47 return NULL; | |
48 | |
49 ScopedComPtr<IMoniker> moniker; | |
50 ScopedComPtr<IBaseFilter> capture_filter; | |
51 DWORD fetched = 0; | |
52 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) { | |
53 ScopedComPtr<IPropertyBag> prop_bag; | |
54 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); | |
55 if (FAILED(hr)) { | |
56 moniker.Release(); | |
57 continue; | |
58 } | |
59 | |
60 // Find the description or friendly name. | |
61 static const wchar_t* kPropertyNames[] = { | |
62 L"DevicePath", L"Description", L"FriendlyName" | |
63 }; | |
64 ScopedVariant name; | |
65 for (size_t i = 0; | |
66 i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) { | |
67 prop_bag->Read(kPropertyNames[i], name.Receive(), 0); | |
68 } | |
69 if (name.type() == VT_BSTR) { | |
70 std::string device_path(base::SysWideToUTF8(V_BSTR(&name))); | |
71 if (device_path.compare(device_name.id()) == 0) { | |
72 // We have found the requested device | |
73 hr = moniker->BindToObject(0, 0, IID_IBaseFilter, | |
74 capture_filter.ReceiveVoid()); | |
75 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter."; | |
76 break; | |
77 } | |
78 } | |
79 moniker.Release(); | |
80 } | |
81 | |
82 *filter = capture_filter.Detach(); | |
83 if (!*filter && SUCCEEDED(hr)) | |
84 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND); | |
85 | |
86 return hr; | |
87 } | |
88 | |
89 // Check if a Pin matches a category. | |
90 bool PinMatchesCategory(IPin* pin, REFGUID category) { | |
91 DCHECK(pin); | |
92 bool found = false; | |
93 ScopedComPtr<IKsPropertySet> ks_property; | |
94 HRESULT hr = ks_property.QueryFrom(pin); | |
95 if (SUCCEEDED(hr)) { | |
96 GUID pin_category; | |
97 DWORD return_value; | |
98 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, | |
99 &pin_category, sizeof(pin_category), &return_value); | |
100 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) { | |
101 found = (pin_category == category); | |
102 } | |
103 } | |
104 return found; | |
105 } | |
106 | |
107 // Finds a IPin on a IBaseFilter given the direction an category. | |
108 ScopedComPtr<IPin> GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, | |
109 REFGUID category) { | |
110 ScopedComPtr<IPin> pin; | |
111 ScopedComPtr<IEnumPins> pin_emum; | |
112 HRESULT hr = filter->EnumPins(pin_emum.Receive()); | |
113 if (pin_emum == NULL) | |
114 return pin; | |
115 | |
116 // Get first unconnected pin. | |
117 hr = pin_emum->Reset(); // set to first pin | |
118 while ((hr = pin_emum->Next(1, pin.Receive(), NULL)) == S_OK) { | |
119 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1); | |
120 hr = pin->QueryDirection(&this_pin_dir); | |
121 if (pin_dir == this_pin_dir) { | |
122 if (category == GUID_NULL || PinMatchesCategory(pin, category)) | |
123 return pin; | |
124 } | |
125 pin.Release(); | |
126 } | |
127 | |
128 DCHECK(!pin); | |
129 return pin; | |
130 } | |
131 | |
132 // Release the format block for a media type. | |
133 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
134 void FreeMediaType(AM_MEDIA_TYPE* mt) { | |
135 if (mt->cbFormat != 0) { | |
136 CoTaskMemFree(mt->pbFormat); | |
137 mt->cbFormat = 0; | |
138 mt->pbFormat = NULL; | |
139 } | |
140 if (mt->pUnk != NULL) { | |
141 NOTREACHED(); | |
142 // pUnk should not be used. | |
143 mt->pUnk->Release(); | |
144 mt->pUnk = NULL; | |
145 } | |
146 } | |
147 | |
148 // Delete a media type structure that was allocated on the heap. | |
149 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
150 void DeleteMediaType(AM_MEDIA_TYPE* mt) { | |
151 if (mt != NULL) { | |
152 FreeMediaType(mt); | |
153 CoTaskMemFree(mt); | |
154 } | |
155 } | |
156 | |
157 // A utility class that wraps the AM_MEDIA_TYPE type and guarantees that | |
158 // we free the structure when exiting the scope. DCHECKing is also done to | |
159 // avoid memory leaks. | |
160 class ScopedMediaType { | |
161 public: | |
162 ScopedMediaType() : media_type_(NULL) {} | |
163 ~ScopedMediaType() { Free(); } | |
164 | |
165 AM_MEDIA_TYPE* operator->() { return media_type_; } | |
166 AM_MEDIA_TYPE* get() { return media_type_; } | |
167 | |
168 void Free() { | |
169 if (!media_type_) | |
170 return; | |
171 | |
172 DeleteMediaType(media_type_); | |
173 media_type_= NULL; | |
174 } | |
175 | |
176 AM_MEDIA_TYPE** Receive() { | |
177 DCHECK(!media_type_); | |
178 return &media_type_; | |
179 } | |
180 | |
181 private: | |
182 AM_MEDIA_TYPE* media_type_; | |
183 }; | |
184 | |
185 VideoPixelFormat TranslateMediaSubtypeToPixelFormat(const GUID& sub_type) { | |
186 static struct { | |
187 const GUID& sub_type; | |
188 VideoPixelFormat format; | |
189 } pixel_formats[] = { | |
190 { kMediaSubTypeI420, PIXEL_FORMAT_I420 }, | |
191 { MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420 }, | |
192 { MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24 }, | |
193 { MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2 }, | |
194 { MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG }, | |
195 { MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY }, | |
196 { MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB }, | |
197 }; | |
198 for (size_t i = 0; i < ARRAYSIZE_UNSAFE(pixel_formats); ++i) { | |
199 if (sub_type == pixel_formats[i].sub_type) | |
200 return pixel_formats[i].format; | |
201 } | |
202 #ifndef NDEBUG | |
203 WCHAR guid_str[128]; | |
204 StringFromGUID2(sub_type, guid_str, arraysize(guid_str)); | |
205 DVLOG(2) << "Device (also) supports an unknown media type " << guid_str; | |
206 #endif | |
207 return PIXEL_FORMAT_UNKNOWN; | |
208 } | |
209 | |
210 } // namespace | |
211 | |
212 // static | |
213 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { | |
214 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | |
215 // Use Media Foundation for Metro processes (after and including Win8) and | |
216 // DirectShow for any other versions, unless forced via flag. Media Foundation | |
217 // can also be forced if appropriate flag is set and we are in Windows 7 or | |
218 // 8 in non-Metro mode. | |
219 if ((base::win::IsMetroProcess() && | |
220 !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) || | |
221 (base::win::GetVersion() >= base::win::VERSION_WIN7 && | |
222 cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) { | |
223 VideoCaptureDeviceMFWin::GetDeviceNames(device_names); | |
224 } else { | |
225 VideoCaptureDeviceWin::GetDeviceNames(device_names); | |
226 } | |
227 } | |
228 | |
229 // static | |
230 void VideoCaptureDevice::GetDeviceSupportedFormats(const Name& device, | |
231 VideoCaptureFormats* formats) { | |
232 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); | |
233 // Use Media Foundation for Metro processes (after and including Win8) and | |
234 // DirectShow for any other versions, unless forced via flag. Media Foundation | |
235 // can also be forced if appropriate flag is set and we are in Windows 7 or | |
236 // 8 in non-Metro mode. | |
237 if ((base::win::IsMetroProcess() && | |
238 !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) || | |
239 (base::win::GetVersion() >= base::win::VERSION_WIN7 && | |
240 cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) { | |
241 VideoCaptureDeviceMFWin::GetDeviceSupportedFormats(device, formats); | |
242 } else { | |
243 VideoCaptureDeviceWin::GetDeviceSupportedFormats(device, formats); | |
244 } | |
245 } | |
246 | |
247 // static | |
248 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) { | |
249 VideoCaptureDevice* ret = NULL; | |
250 if (device_name.capture_api_type() == Name::MEDIA_FOUNDATION) { | |
251 DCHECK(VideoCaptureDeviceMFWin::PlatformSupported()); | |
252 scoped_ptr<VideoCaptureDeviceMFWin> device( | |
253 new VideoCaptureDeviceMFWin(device_name)); | |
254 DVLOG(1) << " MediaFoundation Device: " << device_name.name(); | |
255 if (device->Init()) | |
256 ret = device.release(); | |
257 } else if (device_name.capture_api_type() == Name::DIRECT_SHOW) { | |
258 scoped_ptr<VideoCaptureDeviceWin> device( | |
259 new VideoCaptureDeviceWin(device_name)); | |
260 DVLOG(1) << " DirectShow Device: " << device_name.name(); | |
261 if (device->Init()) | |
262 ret = device.release(); | |
263 } else{ | |
264 NOTREACHED() << " Couldn't recognize VideoCaptureDevice type"; | |
265 } | |
266 | |
267 return ret; | |
268 } | |
269 | |
270 // static | |
271 void VideoCaptureDeviceWin::GetDeviceNames(Names* device_names) { | |
272 DCHECK(device_names); | |
273 | |
274 ScopedComPtr<ICreateDevEnum> dev_enum; | |
275 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | |
276 CLSCTX_INPROC); | 101 CLSCTX_INPROC); |
277 if (FAILED(hr)) | 102 if (FAILED(hr)) |
278 return; | 103 return; |
279 | 104 |
280 ScopedComPtr<IEnumMoniker> enum_moniker; | 105 ScopedComPtr<IEnumMoniker> enum_moniker; |
281 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | 106 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, |
282 enum_moniker.Receive(), 0); | 107 enum_moniker.Receive(), 0); |
283 // CreateClassEnumerator returns S_FALSE on some Windows OS | 108 // CreateClassEnumerator returns S_FALSE on some Windows OS |
284 // when no camera exist. Therefore the FAILED macro can't be used. | 109 // when no camera exist. Therefore the FAILED macro can't be used. |
285 if (hr != S_OK) | 110 if (hr != S_OK) |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
323 std::string device_name(base::SysWideToUTF8(str_ptr)); | 148 std::string device_name(base::SysWideToUTF8(str_ptr)); |
324 name.Reset(); | 149 name.Reset(); |
325 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0); | 150 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0); |
326 if (FAILED(hr) || name.type() != VT_BSTR) { | 151 if (FAILED(hr) || name.type() != VT_BSTR) { |
327 id = device_name; | 152 id = device_name; |
328 } else { | 153 } else { |
329 DCHECK_EQ(name.type(), VT_BSTR); | 154 DCHECK_EQ(name.type(), VT_BSTR); |
330 id = base::SysWideToUTF8(V_BSTR(&name)); | 155 id = base::SysWideToUTF8(V_BSTR(&name)); |
331 } | 156 } |
332 | 157 |
333 device_names->push_back(Name(device_name, id, Name::DIRECT_SHOW)); | 158 device_names->push_back(VideoCaptureDevice::Name(device_name, id, |
159 VideoCaptureDevice::Name::DIRECT_SHOW)); | |
334 } | 160 } |
335 } | 161 } |
336 moniker.Release(); | 162 moniker.Release(); |
337 } | 163 } |
338 } | 164 } |
339 | 165 |
340 // static | 166 static void GetDeviceNamesMediaFoundation( |
341 void VideoCaptureDeviceWin::GetDeviceSupportedFormats(const Name& device, | 167 VideoCaptureDevice::Names* device_names) { |
168 DVLOG(1) << " GetDeviceNamesMediaFoundation"; | |
169 ScopedCoMem<IMFActivate*> devices; | |
170 UINT32 count; | |
171 if (!EnumerateVideoDevicesMediaFoundation(&devices, &count)) | |
172 return; | |
173 | |
174 HRESULT hr; | |
175 for (UINT32 i = 0; i < count; ++i) { | |
176 UINT32 name_size, id_size; | |
177 ScopedCoMem<wchar_t> name, id; | |
178 if (SUCCEEDED(hr = devices[i]->GetAllocatedString( | |
179 MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &name, &name_size)) && | |
180 SUCCEEDED(hr = devices[i]->GetAllocatedString( | |
181 MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &id, | |
182 &id_size))) { | |
183 std::wstring name_w(name, name_size), id_w(id, id_size); | |
184 VideoCaptureDevice::Name device(base::SysWideToUTF8(name_w), | |
185 base::SysWideToUTF8(id_w), | |
186 VideoCaptureDevice::Name::MEDIA_FOUNDATION); | |
187 device_names->push_back(device); | |
188 } else { | |
189 DLOG(WARNING) << "GetAllocatedString failed: " << std::hex << hr; | |
190 } | |
191 devices[i]->Release(); | |
192 } | |
193 } | |
194 | |
195 static void GetDeviceSupportedFormatsDirectShow( | |
196 const VideoCaptureDevice::Name& device, | |
342 VideoCaptureFormats* formats) { | 197 VideoCaptureFormats* formats) { |
343 DVLOG(1) << "GetDeviceSupportedFormats for " << device.name(); | 198 DVLOG(1) << "GetDeviceSupportedFormatsDirectShow for " << device.name(); |
344 ScopedComPtr<ICreateDevEnum> dev_enum; | 199 ScopedComPtr<ICreateDevEnum> dev_enum; |
345 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | 200 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, |
346 CLSCTX_INPROC); | 201 CLSCTX_INPROC); |
347 if (FAILED(hr)) | 202 if (FAILED(hr)) |
348 return; | 203 return; |
349 | 204 |
350 ScopedComPtr<IEnumMoniker> enum_moniker; | 205 ScopedComPtr<IEnumMoniker> enum_moniker; |
351 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | 206 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, |
352 enum_moniker.Receive(), 0); | 207 enum_moniker.Receive(), 0); |
353 // CreateClassEnumerator returns S_FALSE on some Windows OS when no camera | 208 // CreateClassEnumerator returns S_FALSE on some Windows OS when no camera |
(...skipping 20 matching lines...) Expand all Loading... | |
374 DVLOG(1) << "Couldn't read a device's DevicePath."; | 229 DVLOG(1) << "Couldn't read a device's DevicePath."; |
375 return; | 230 return; |
376 } | 231 } |
377 if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id))) | 232 if (device.id() == base::SysWideToUTF8(V_BSTR(&device_id))) |
378 break; | 233 break; |
379 moniker.Release(); | 234 moniker.Release(); |
380 } | 235 } |
381 | 236 |
382 if (moniker.get()) { | 237 if (moniker.get()) { |
383 base::win::ScopedComPtr<IBaseFilter> capture_filter; | 238 base::win::ScopedComPtr<IBaseFilter> capture_filter; |
384 hr = GetDeviceFilter(device, capture_filter.Receive()); | 239 hr = VideoCaptureDeviceWin::GetDeviceFilter(device, |
240 capture_filter.Receive()); | |
385 if (!capture_filter) { | 241 if (!capture_filter) { |
386 DVLOG(2) << "Failed to create capture filter."; | 242 DVLOG(2) << "Failed to create capture filter."; |
387 return; | 243 return; |
388 } | 244 } |
389 | 245 |
390 base::win::ScopedComPtr<IPin> output_capture_pin( | 246 base::win::ScopedComPtr<IPin> output_capture_pin( |
391 GetPin(capture_filter, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE)); | 247 VideoCaptureDeviceWin::GetPin(capture_filter, |
248 PINDIR_OUTPUT, | |
249 PIN_CATEGORY_CAPTURE)); | |
392 if (!output_capture_pin) { | 250 if (!output_capture_pin) { |
393 DVLOG(2) << "Failed to get capture output pin"; | 251 DVLOG(2) << "Failed to get capture output pin"; |
394 return; | 252 return; |
395 } | 253 } |
396 | 254 |
397 ScopedComPtr<IAMStreamConfig> stream_config; | 255 ScopedComPtr<IAMStreamConfig> stream_config; |
398 hr = output_capture_pin.QueryInterface(stream_config.Receive()); | 256 hr = output_capture_pin.QueryInterface(stream_config.Receive()); |
399 if (FAILED(hr)) { | 257 if (FAILED(hr)) { |
400 DVLOG(2) << "Failed to get IAMStreamConfig interface from " | 258 DVLOG(2) << "Failed to get IAMStreamConfig interface from " |
401 "capture device"; | 259 "capture device"; |
402 return; | 260 return; |
403 } | 261 } |
404 | 262 |
405 int count = 0, size = 0; | 263 int count = 0, size = 0; |
406 hr = stream_config->GetNumberOfCapabilities(&count, &size); | 264 hr = stream_config->GetNumberOfCapabilities(&count, &size); |
407 if (FAILED(hr)) { | 265 if (FAILED(hr)) { |
408 DVLOG(2) << "Failed to GetNumberOfCapabilities"; | 266 DVLOG(2) << "Failed to GetNumberOfCapabilities"; |
409 return; | 267 return; |
410 } | 268 } |
411 | 269 |
412 scoped_ptr<BYTE[]> caps(new BYTE[size]); | 270 scoped_ptr<BYTE[]> caps(new BYTE[size]); |
413 for (int i = 0; i < count; ++i) { | 271 for (int i = 0; i < count; ++i) { |
414 ScopedMediaType media_type; | 272 VideoCaptureDeviceWin::ScopedMediaType media_type; |
415 hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get()); | 273 hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get()); |
416 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED() | 274 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED() |
417 // macros here since they'll trigger incorrectly. | 275 // macros here since they'll trigger incorrectly. |
418 if (hr != S_OK) { | 276 if (hr != S_OK) { |
419 DVLOG(2) << "Failed to GetStreamCaps"; | 277 DVLOG(2) << "Failed to GetStreamCaps"; |
420 return; | 278 return; |
421 } | 279 } |
422 | 280 |
423 if (media_type->majortype == MEDIATYPE_Video && | 281 if (media_type->majortype == MEDIATYPE_Video && |
424 media_type->formattype == FORMAT_VideoInfo) { | 282 media_type->formattype == FORMAT_VideoInfo) { |
425 VideoCaptureFormat format; | 283 VideoCaptureFormat format; |
426 format.pixel_format = | 284 format.pixel_format = |
427 TranslateMediaSubtypeToPixelFormat(media_type->subtype); | 285 VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat( |
286 media_type->subtype); | |
428 if (format.pixel_format == PIXEL_FORMAT_UNKNOWN) | 287 if (format.pixel_format == PIXEL_FORMAT_UNKNOWN) |
429 continue; | 288 continue; |
430 VIDEOINFOHEADER* h = | 289 VIDEOINFOHEADER* h = |
431 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | 290 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); |
432 format.frame_size.SetSize(h->bmiHeader.biWidth, | 291 format.frame_size.SetSize(h->bmiHeader.biWidth, |
433 h->bmiHeader.biHeight); | 292 h->bmiHeader.biHeight); |
434 // Trust the frame rate from the VIDEOINFOHEADER. | 293 // Trust the frame rate from the VIDEOINFOHEADER. |
435 format.frame_rate = (h->AvgTimePerFrame > 0) ? | 294 format.frame_rate = (h->AvgTimePerFrame > 0) ? |
436 static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) : | 295 static_cast<int>(kSecondsToReferenceTime / h->AvgTimePerFrame) : |
437 0; | 296 0; |
438 formats->push_back(format); | 297 formats->push_back(format); |
439 DVLOG(1) << device.name() << " resolution: " | 298 DVLOG(1) << device.name() << " resolution: " |
440 << format.frame_size.ToString() << ", fps: " << format.frame_rate | 299 << format.frame_size.ToString() << ", fps: " << format.frame_rate |
441 << ", pixel format: " << format.pixel_format; | 300 << ", pixel format: " << format.pixel_format; |
442 } | 301 } |
443 } | 302 } |
444 } | 303 } |
445 } | 304 } |
446 | 305 |
447 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) | 306 static void GetDeviceSupportedFormatsMediaFoundation( |
448 : device_name_(device_name), | 307 const VideoCaptureDevice::Name& device, |
449 state_(kIdle) { | 308 VideoCaptureFormats* formats) { |
450 DetachFromThread(); | 309 DVLOG(1) << "GetDeviceSupportedFormatsMediaFoundation for " << device.name(); |
451 } | 310 ScopedComPtr<IMFMediaSource> source; |
311 if (!CreateVideoCaptureDeviceMediaFoundation(device.id().c_str(), | |
312 source.Receive())) { | |
313 return; | |
314 } | |
452 | 315 |
453 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | 316 HRESULT hr; |
454 DCHECK(CalledOnValidThread()); | 317 base::win::ScopedComPtr<IMFSourceReader> reader; |
455 if (media_control_) | 318 if (FAILED(hr = MFCreateSourceReaderFromMediaSource(source, NULL, |
456 media_control_->Stop(); | 319 reader.Receive()))) { |
320 DLOG(ERROR) << "MFCreateSourceReaderFromMediaSource: " << std::hex << hr; | |
321 return; | |
322 } | |
457 | 323 |
458 if (graph_builder_) { | 324 DWORD stream_index = 0; |
459 if (sink_filter_) { | 325 ScopedComPtr<IMFMediaType> type; |
460 graph_builder_->RemoveFilter(sink_filter_); | 326 while (SUCCEEDED(hr = reader->GetNativeMediaType( |
461 sink_filter_ = NULL; | 327 MF_SOURCE_READER_FIRST_VIDEO_STREAM, stream_index, type.Receive()))) { |
328 UINT32 width, height; | |
329 hr = MFGetAttributeSize(type, MF_MT_FRAME_SIZE, &width, &height); | |
330 if (FAILED(hr)) { | |
331 DLOG(ERROR) << "MFGetAttributeSize: " << std::hex << hr; | |
332 return; | |
462 } | 333 } |
334 VideoCaptureFormat capture_format; | |
335 capture_format.frame_size.SetSize(width, height); | |
463 | 336 |
464 if (capture_filter_) | 337 UINT32 numerator, denominator; |
465 graph_builder_->RemoveFilter(capture_filter_); | 338 hr = MFGetAttributeRatio(type, MF_MT_FRAME_RATE, &numerator, &denominator); |
339 if (FAILED(hr)) { | |
340 DLOG(ERROR) << "MFGetAttributeSize: " << std::hex << hr; | |
341 return; | |
342 } | |
343 capture_format.frame_rate = denominator ? numerator / denominator : 0; | |
466 | 344 |
467 if (mjpg_filter_) | 345 GUID type_guid; |
468 graph_builder_->RemoveFilter(mjpg_filter_); | 346 hr = type->GetGUID(MF_MT_SUBTYPE, &type_guid); |
347 if (FAILED(hr)) { | |
348 DLOG(ERROR) << "GetGUID: " << std::hex << hr; | |
349 return; | |
350 } | |
351 VideoCaptureDeviceMFWin::FormatFromGuid(type_guid, | |
352 &capture_format.pixel_format); | |
353 type.Release(); | |
354 formats->push_back(capture_format); | |
355 ++stream_index; | |
356 | |
357 DVLOG(1) << device.name() << " resolution: " | |
358 << capture_format.frame_size.ToString() << ", fps: " | |
359 << capture_format.frame_rate << ", pixel format: " | |
360 << capture_format.pixel_format; | |
469 } | 361 } |
470 } | 362 } |
471 | 363 |
472 bool VideoCaptureDeviceWin::Init() { | 364 // Returns true iff the current platform supports the Media Foundation API |
473 DCHECK(CalledOnValidThread()); | 365 // and that the DLLs are available. On Vista this API is an optional download |
474 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive()); | 366 // but the API is advertised as a part of Windows 7 and onwards. However, |
475 if (!capture_filter_) { | 367 // we've seen that the required DLLs are not available in some Win7 |
476 DVLOG(2) << "Failed to create capture filter."; | 368 // distributions such as Windows 7 N and Windows 7 KN. |
369 // static | |
370 bool VideoCaptureDeviceFactoryWin::PlatformSupportsMediaFoundation() { | |
371 // Even though the DLLs might be available on Vista, we get crashes | |
372 // when running our tests on the build bots. | |
373 if (base::win::GetVersion() < base::win::VERSION_WIN7) | |
477 return false; | 374 return false; |
478 } | |
479 | 375 |
480 output_capture_pin_ = | 376 static bool g_dlls_available = LoadMediaFoundationDlls(); |
481 GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE); | 377 return g_dlls_available; |
482 if (!output_capture_pin_) { | |
483 DVLOG(2) << "Failed to get capture output pin"; | |
484 return false; | |
485 } | |
486 | |
487 // Create the sink filter used for receiving Captured frames. | |
488 sink_filter_ = new SinkFilter(this); | |
489 if (sink_filter_ == NULL) { | |
490 DVLOG(2) << "Failed to create send filter"; | |
491 return false; | |
492 } | |
493 | |
494 input_sink_pin_ = sink_filter_->GetPin(0); | |
495 | |
496 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL, | |
497 CLSCTX_INPROC_SERVER); | |
498 if (FAILED(hr)) { | |
499 DVLOG(2) << "Failed to create graph builder."; | |
500 return false; | |
501 } | |
502 | |
503 hr = graph_builder_.QueryInterface(media_control_.Receive()); | |
504 if (FAILED(hr)) { | |
505 DVLOG(2) << "Failed to create media control builder."; | |
506 return false; | |
507 } | |
508 | |
509 hr = graph_builder_->AddFilter(capture_filter_, NULL); | |
510 if (FAILED(hr)) { | |
511 DVLOG(2) << "Failed to add the capture device to the graph."; | |
512 return false; | |
513 } | |
514 | |
515 hr = graph_builder_->AddFilter(sink_filter_, NULL); | |
516 if (FAILED(hr)) { | |
517 DVLOG(2)<< "Failed to add the send filter to the graph."; | |
518 return false; | |
519 } | |
520 | |
521 return CreateCapabilityMap(); | |
522 } | 378 } |
523 | 379 |
524 void VideoCaptureDeviceWin::AllocateAndStart( | 380 scoped_ptr<VideoCaptureDevice> VideoCaptureDeviceFactoryWin::Create( |
525 const VideoCaptureParams& params, | 381 const VideoCaptureDevice::Name& device_name) { |
526 scoped_ptr<VideoCaptureDevice::Client> client) { | 382 DCHECK(thread_checker_.CalledOnValidThread()); |
527 DCHECK(CalledOnValidThread()); | 383 scoped_ptr<VideoCaptureDevice> device; |
528 if (state_ != kIdle) | 384 if (device_name.capture_api_type() == |
529 return; | 385 VideoCaptureDevice::Name::MEDIA_FOUNDATION) { |
530 | 386 DCHECK(PlatformSupportsMediaFoundation()); |
531 client_ = client.Pass(); | 387 device.reset(new VideoCaptureDeviceMFWin(device_name)); |
532 | 388 DVLOG(1) << " MediaFoundation Device: " << device_name.name(); |
533 // Get the camera capability that best match the requested resolution. | 389 ScopedComPtr<IMFMediaSource> source; |
534 const VideoCaptureCapabilityWin& found_capability = | 390 if (!CreateVideoCaptureDeviceMediaFoundation(device_name.id().c_str(), |
535 capabilities_.GetBestMatchedFormat( | 391 source.Receive())) { |
536 params.requested_format.frame_size.width(), | 392 return scoped_ptr<VideoCaptureDevice>(); |
537 params.requested_format.frame_size.height(), | 393 } |
538 params.requested_format.frame_rate); | 394 if (!static_cast<VideoCaptureDeviceMFWin*>(device.get())->Init(source)) |
539 VideoCaptureFormat format = found_capability.supported_format; | 395 device.reset(); |
540 | 396 } else if (device_name.capture_api_type() == |
541 // Reduce the frame rate if the requested frame rate is lower | 397 VideoCaptureDevice::Name::DIRECT_SHOW) { |
542 // than the capability. | 398 device.reset(new VideoCaptureDeviceWin(device_name)); |
543 if (format.frame_rate > params.requested_format.frame_rate) | 399 DVLOG(1) << " DirectShow Device: " << device_name.name(); |
544 format.frame_rate = params.requested_format.frame_rate; | 400 if (!static_cast<VideoCaptureDeviceWin*>(device.get())->Init()) |
545 | 401 device.reset(); |
546 ScopedComPtr<IAMStreamConfig> stream_config; | 402 } else { |
547 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | 403 NOTREACHED() << " Couldn't recognize VideoCaptureDevice type"; |
548 if (FAILED(hr)) { | |
549 SetErrorState("Can't get the Capture format settings"); | |
550 return; | |
551 } | 404 } |
552 | 405 return device.Pass(); |
553 int count = 0, size = 0; | |
554 hr = stream_config->GetNumberOfCapabilities(&count, &size); | |
555 if (FAILED(hr)) { | |
556 DVLOG(2) << "Failed to GetNumberOfCapabilities"; | |
557 return; | |
558 } | |
559 | |
560 scoped_ptr<BYTE[]> caps(new BYTE[size]); | |
561 ScopedMediaType media_type; | |
562 | |
563 // Get the windows capability from the capture device. | |
564 hr = stream_config->GetStreamCaps( | |
565 found_capability.stream_index, media_type.Receive(), caps.get()); | |
566 if (SUCCEEDED(hr)) { | |
567 if (media_type->formattype == FORMAT_VideoInfo) { | |
568 VIDEOINFOHEADER* h = | |
569 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | |
570 if (format.frame_rate > 0) | |
571 h->AvgTimePerFrame = kSecondsToReferenceTime / format.frame_rate; | |
572 } | |
573 // Set the sink filter to request this format. | |
574 sink_filter_->SetRequestedMediaFormat(format); | |
575 // Order the capture device to use this format. | |
576 hr = stream_config->SetFormat(media_type.get()); | |
577 } | |
578 | |
579 if (FAILED(hr)) | |
580 SetErrorState("Failed to set capture device output format"); | |
581 | |
582 if (format.pixel_format == PIXEL_FORMAT_MJPEG && !mjpg_filter_.get()) { | |
583 // Create MJPG filter if we need it. | |
584 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC); | |
585 | |
586 if (SUCCEEDED(hr)) { | |
587 input_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL); | |
588 output_mjpg_pin_ = GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL); | |
589 hr = graph_builder_->AddFilter(mjpg_filter_, NULL); | |
590 } | |
591 | |
592 if (FAILED(hr)) { | |
593 mjpg_filter_.Release(); | |
594 input_mjpg_pin_.Release(); | |
595 output_mjpg_pin_.Release(); | |
596 } | |
597 } | |
598 | |
599 SetAntiFlickerInCaptureFilter(); | |
600 | |
601 if (format.pixel_format == PIXEL_FORMAT_MJPEG && mjpg_filter_.get()) { | |
602 // Connect the camera to the MJPEG decoder. | |
603 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_, | |
604 NULL); | |
605 // Connect the MJPEG filter to the Capture filter. | |
606 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_, | |
607 NULL); | |
608 } else { | |
609 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_, | |
610 NULL); | |
611 } | |
612 | |
613 if (FAILED(hr)) { | |
614 SetErrorState("Failed to connect the Capture graph."); | |
615 return; | |
616 } | |
617 | |
618 hr = media_control_->Pause(); | |
619 if (FAILED(hr)) { | |
620 SetErrorState("Failed to Pause the Capture device. " | |
621 "Is it already occupied?"); | |
622 return; | |
623 } | |
624 | |
625 // Get the format back from the sink filter after the filter have been | |
626 // connected. | |
627 capture_format_ = sink_filter_->ResultingFormat(); | |
628 | |
629 // Start capturing. | |
630 hr = media_control_->Run(); | |
631 if (FAILED(hr)) { | |
632 SetErrorState("Failed to start the Capture device."); | |
633 return; | |
634 } | |
635 | |
636 state_ = kCapturing; | |
637 } | 406 } |
638 | 407 |
639 void VideoCaptureDeviceWin::StopAndDeAllocate() { | 408 void VideoCaptureDeviceFactoryWin::GetDeviceNames( |
640 DCHECK(CalledOnValidThread()); | 409 VideoCaptureDevice::Names* device_names) { |
641 if (state_ != kCapturing) | 410 DCHECK(thread_checker_.CalledOnValidThread()); |
642 return; | 411 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
643 | 412 // Use Media Foundation for Metro processes (after and including Win8) and |
644 HRESULT hr = media_control_->Stop(); | 413 // DirectShow for any other versions, unless forced via flag. Media Foundation |
645 if (FAILED(hr)) { | 414 // can also be forced if appropriate flag is set and we are in Windows 7 or |
646 SetErrorState("Failed to stop the capture graph."); | 415 // 8 in non-Metro mode. |
647 return; | 416 if ((base::win::IsMetroProcess() && |
648 } | 417 !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) || |
649 | 418 (base::win::GetVersion() >= base::win::VERSION_WIN7 && |
650 graph_builder_->Disconnect(output_capture_pin_); | 419 cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) { |
651 graph_builder_->Disconnect(input_sink_pin_); | 420 GetDeviceNamesMediaFoundation(device_names); |
652 | |
653 // If the _mjpg filter exist disconnect it even if it has not been used. | |
654 if (mjpg_filter_) { | |
655 graph_builder_->Disconnect(input_mjpg_pin_); | |
656 graph_builder_->Disconnect(output_mjpg_pin_); | |
657 } | |
658 | |
659 if (FAILED(hr)) { | |
660 SetErrorState("Failed to Stop the Capture device"); | |
661 return; | |
662 } | |
663 client_.reset(); | |
664 state_ = kIdle; | |
665 } | |
666 | |
667 // Implements SinkFilterObserver::SinkFilterObserver. | |
668 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer, | |
669 int length) { | |
670 client_->OnIncomingCapturedData( | |
671 buffer, length, capture_format_, 0, base::TimeTicks::Now()); | |
672 } | |
673 | |
674 bool VideoCaptureDeviceWin::CreateCapabilityMap() { | |
675 DCHECK(CalledOnValidThread()); | |
676 ScopedComPtr<IAMStreamConfig> stream_config; | |
677 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | |
678 if (FAILED(hr)) { | |
679 DVLOG(2) << "Failed to get IAMStreamConfig interface from " | |
680 "capture device"; | |
681 return false; | |
682 } | |
683 | |
684 // Get interface used for getting the frame rate. | |
685 ScopedComPtr<IAMVideoControl> video_control; | |
686 hr = capture_filter_.QueryInterface(video_control.Receive()); | |
687 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED"; | |
688 | |
689 int count = 0, size = 0; | |
690 hr = stream_config->GetNumberOfCapabilities(&count, &size); | |
691 if (FAILED(hr)) { | |
692 DVLOG(2) << "Failed to GetNumberOfCapabilities"; | |
693 return false; | |
694 } | |
695 | |
696 scoped_ptr<BYTE[]> caps(new BYTE[size]); | |
697 for (int i = 0; i < count; ++i) { | |
698 ScopedMediaType media_type; | |
699 hr = stream_config->GetStreamCaps(i, media_type.Receive(), caps.get()); | |
700 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED() | |
701 // macros here since they'll trigger incorrectly. | |
702 if (hr != S_OK) { | |
703 DVLOG(2) << "Failed to GetStreamCaps"; | |
704 return false; | |
705 } | |
706 | |
707 if (media_type->majortype == MEDIATYPE_Video && | |
708 media_type->formattype == FORMAT_VideoInfo) { | |
709 VideoCaptureCapabilityWin capability(i); | |
710 capability.supported_format.pixel_format = | |
711 TranslateMediaSubtypeToPixelFormat(media_type->subtype); | |
712 if (capability.supported_format.pixel_format == PIXEL_FORMAT_UNKNOWN) | |
713 continue; | |
714 | |
715 VIDEOINFOHEADER* h = | |
716 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | |
717 capability.supported_format.frame_size.SetSize(h->bmiHeader.biWidth, | |
718 h->bmiHeader.biHeight); | |
719 | |
720 // Try to get a better |time_per_frame| from IAMVideoControl. If not, use | |
721 // the value from VIDEOINFOHEADER. | |
722 REFERENCE_TIME time_per_frame = h->AvgTimePerFrame; | |
723 if (video_control) { | |
724 ScopedCoMem<LONGLONG> max_fps; | |
725 LONG list_size = 0; | |
726 SIZE size = {capability.supported_format.frame_size.width(), | |
727 capability.supported_format.frame_size.height()}; | |
728 | |
729 // GetFrameRateList doesn't return max frame rate always | |
730 // eg: Logitech Notebook. This may be due to a bug in that API | |
731 // because GetFrameRateList array is reversed in the above camera. So | |
732 // a util method written. Can't assume the first value will return | |
733 // the max fps. | |
734 hr = video_control->GetFrameRateList(output_capture_pin_, i, size, | |
735 &list_size, &max_fps); | |
736 // Sometimes |list_size| will be > 0, but max_fps will be NULL. Some | |
737 // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates | |
738 // into success, so explicitly check S_OK. See http://crbug.com/306237. | |
739 if (hr == S_OK && list_size > 0 && max_fps) { | |
740 time_per_frame = *std::min_element(max_fps.get(), | |
741 max_fps.get() + list_size); | |
742 } | |
743 } | |
744 | |
745 capability.supported_format.frame_rate = | |
746 (time_per_frame > 0) | |
747 ? static_cast<int>(kSecondsToReferenceTime / time_per_frame) | |
748 : 0; | |
749 | |
750 // DirectShow works at the moment only on integer frame_rate but the | |
751 // best capability matching class works on rational frame rates. | |
752 capability.frame_rate_numerator = capability.supported_format.frame_rate; | |
753 capability.frame_rate_denominator = 1; | |
754 | |
755 capabilities_.Add(capability); | |
756 } | |
757 } | |
758 | |
759 return !capabilities_.empty(); | |
760 } | |
761 | |
762 // Set the power line frequency removal in |capture_filter_| if available. | |
763 void VideoCaptureDeviceWin::SetAntiFlickerInCaptureFilter() { | |
764 const int power_line_frequency = GetPowerLineFrequencyForLocation(); | |
765 if (power_line_frequency != kPowerLine50Hz && | |
766 power_line_frequency != kPowerLine60Hz) { | |
767 return; | |
768 } | |
769 ScopedComPtr<IKsPropertySet> ks_propset; | |
770 DWORD type_support = 0; | |
771 HRESULT hr; | |
772 if (SUCCEEDED(hr = ks_propset.QueryFrom(capture_filter_)) && | |
773 SUCCEEDED(hr = ks_propset->QuerySupported(PROPSETID_VIDCAP_VIDEOPROCAMP, | |
774 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, &type_support)) && | |
775 (type_support & KSPROPERTY_SUPPORT_SET)) { | |
776 KSPROPERTY_VIDEOPROCAMP_S data = {}; | |
777 data.Property.Set = PROPSETID_VIDCAP_VIDEOPROCAMP; | |
778 data.Property.Id = KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY; | |
779 data.Property.Flags = KSPROPERTY_TYPE_SET; | |
780 data.Value = (power_line_frequency == kPowerLine50Hz) ? 1 : 2; | |
781 data.Flags = KSPROPERTY_VIDEOPROCAMP_FLAGS_MANUAL; | |
782 hr = ks_propset->Set(PROPSETID_VIDCAP_VIDEOPROCAMP, | |
783 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, | |
784 &data, sizeof(data), &data, sizeof(data)); | |
785 DVLOG_IF(ERROR, FAILED(hr)) << "Anti-flicker setting failed."; | |
786 DVLOG_IF(2, SUCCEEDED(hr)) << "Anti-flicker set correctly."; | |
787 } else { | 421 } else { |
788 DVLOG(2) << "Anti-flicker setting not supported."; | 422 GetDeviceNamesDirectShow(device_names); |
789 } | 423 } |
790 } | 424 } |
791 | 425 |
792 void VideoCaptureDeviceWin::SetErrorState(const std::string& reason) { | 426 void VideoCaptureDeviceFactoryWin::GetDeviceSupportedFormats( |
793 DCHECK(CalledOnValidThread()); | 427 const VideoCaptureDevice::Name& device, |
794 DVLOG(1) << reason; | 428 VideoCaptureFormats* formats) { |
795 state_ = kError; | 429 DCHECK(thread_checker_.CalledOnValidThread()); |
796 client_->OnError(reason); | 430 const CommandLine* cmd_line = CommandLine::ForCurrentProcess(); |
431 // Use Media Foundation for Metro processes (after and including Win8) and | |
432 // DirectShow for any other versions, unless forced via flag. Media Foundation | |
433 // can also be forced if appropriate flag is set and we are in Windows 7 or | |
434 // 8 in non-Metro mode. | |
435 if ((base::win::IsMetroProcess() && | |
436 !cmd_line->HasSwitch(switches::kForceDirectShowVideoCapture)) || | |
437 (base::win::GetVersion() >= base::win::VERSION_WIN7 && | |
438 cmd_line->HasSwitch(switches::kForceMediaFoundationVideoCapture))) { | |
439 GetDeviceSupportedFormatsMediaFoundation(device, formats); | |
440 } else { | |
441 GetDeviceSupportedFormatsDirectShow(device, formats); | |
442 } | |
797 } | 443 } |
444 | |
798 } // namespace media | 445 } // namespace media |
OLD | NEW |