OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/video/capture/win/video_capture_device_win.h" | |
6 | |
7 #include <algorithm> | |
8 #include <list> | |
9 | |
10 #include "base/sys_string_conversions.h" | |
11 #include "base/win/scoped_variant.h" | |
12 | |
13 using base::win::ScopedComPtr; | |
14 using base::win::ScopedVariant; | |
15 | |
16 namespace { | |
17 | |
18 // Finds and creates a DirectShow Video Capture filter matching the device_name. | |
19 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name, | |
20 IBaseFilter** filter) { | |
21 DCHECK(filter); | |
22 | |
23 ScopedComPtr<ICreateDevEnum> dev_enum; | |
24 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | |
25 CLSCTX_INPROC); | |
26 if (FAILED(hr)) | |
27 return hr; | |
28 | |
29 ScopedComPtr<IEnumMoniker> enum_moniker; | |
30 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | |
31 enum_moniker.Receive(), 0); | |
32 if (FAILED(hr)) | |
33 return NULL; | |
34 | |
35 ScopedComPtr<IMoniker> moniker; | |
36 ScopedComPtr<IBaseFilter> capture_filter; | |
37 DWORD fetched = 0; | |
38 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) { | |
39 ScopedComPtr<IPropertyBag> prop_bag; | |
40 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); | |
41 if (FAILED(hr)) { | |
42 moniker.Release(); | |
43 continue; | |
44 } | |
45 | |
46 // Find the description or friendly name. | |
47 static const wchar_t* kPropertyNames[] = { | |
48 L"DevicePath", L"Description", L"FriendlyName" | |
49 }; | |
50 ScopedVariant name; | |
51 for (size_t i = 0; i < arraysize(kPropertyNames) && | |
scherkus (not reviewing)
2011/06/24 20:15:26
nit: I'd drop 2nd clause to next line instead of i
Per K
2011/06/27 11:47:50
Done.
| |
52 name.type() != VT_BSTR; ++i) { | |
53 prop_bag->Read(kPropertyNames[i], name.Receive(), 0); | |
54 } | |
55 if (name.type() == VT_BSTR) { | |
56 std::string device_path(base::SysWideToUTF8(V_BSTR(&name))); | |
57 if (device_path.compare(device_name.unique_id) == 0) { | |
58 // We have found the requested device | |
59 hr = moniker->BindToObject(0, 0, IID_IBaseFilter, | |
60 capture_filter.ReceiveVoid()); | |
61 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter."; | |
62 break; | |
63 } | |
64 } | |
65 moniker.Release(); | |
66 } | |
67 | |
68 *filter = capture_filter.Detach(); | |
69 if (!*filter && SUCCEEDED(hr)) | |
70 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND); | |
71 | |
72 return hr; | |
73 } | |
74 | |
75 // Check if a Pin matches a category. | |
76 bool PinMatchesCategory(IPin* pin, REFGUID category) { | |
77 DCHECK(pin); | |
78 bool found = false; | |
79 ScopedComPtr<IKsPropertySet> ks_property; | |
80 HRESULT hr = ks_property.QueryFrom(pin); | |
81 if (SUCCEEDED(hr)) { | |
82 GUID pin_category; | |
83 DWORD return_value; | |
84 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, | |
85 &pin_category, sizeof(pin_category), &return_value); | |
86 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) { | |
87 found = (pin_category == category) ? true : false; | |
88 } | |
89 } | |
90 return found; | |
91 } | |
92 | |
93 // Finds a IPin on a IBaseFilter given the direction an category. | |
94 HRESULT GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, REFGUID category, | |
95 IPin** pin) { | |
96 DCHECK(pin); | |
97 ScopedComPtr<IEnumPins> pin_emum; | |
98 HRESULT hr = filter->EnumPins(pin_emum.Receive()); | |
99 if (pin_emum == NULL) | |
100 return hr; | |
101 | |
102 // Get first unconnected pin. | |
103 hr = pin_emum->Reset(); // set to first pin | |
104 while ((hr = pin_emum->Next(1, pin, NULL)) == S_OK) { | |
105 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1); | |
106 hr = (*pin)->QueryDirection(&this_pin_dir); | |
107 if (pin_dir == this_pin_dir) { | |
108 if (category == GUID_NULL || PinMatchesCategory(*pin, category)) | |
109 return S_OK; | |
110 } | |
111 (*pin)->Release(); | |
112 } | |
113 | |
114 return E_FAIL; | |
115 } | |
116 | |
117 // Release the format block for a media type. | |
118 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
119 void FreeMediaType(AM_MEDIA_TYPE* mt) { | |
120 if (mt->cbFormat != 0) { | |
121 CoTaskMemFree((PVOID)mt->pbFormat); | |
122 mt->cbFormat = 0; | |
123 mt->pbFormat = NULL; | |
124 } | |
125 if (mt->pUnk != NULL) { | |
126 // pUnk should not be used. | |
127 mt->pUnk->Release(); | |
128 mt->pUnk = NULL; | |
129 } | |
130 } | |
131 | |
132 // Delete a media type structure that was allocated on the heap. | |
133 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
134 void DeleteMediaType(AM_MEDIA_TYPE *pmt) { | |
135 if (pmt != NULL) { | |
136 FreeMediaType(pmt); | |
137 CoTaskMemFree(pmt); | |
138 } | |
139 } | |
140 | |
141 // Help structure used for comparing video capture capabilities. | |
142 struct ResolutionDiff { | |
143 int capability_index; | |
144 int diff_height; | |
145 int diff_width; | |
146 int diff_frame_rate; | |
147 media::VideoCaptureDevice::Format color; | |
148 }; | |
149 | |
150 bool CompareHeight(ResolutionDiff item1, ResolutionDiff item2) { | |
151 return abs(item1.diff_height) < abs(item2.diff_height); | |
152 } | |
153 | |
154 bool CompareWidth(ResolutionDiff item1, ResolutionDiff item2) { | |
155 return abs(item1.diff_width) < abs(item2.diff_width); | |
156 } | |
157 | |
158 bool CompareFrameRate(ResolutionDiff item1, ResolutionDiff item2) { | |
159 return abs(item1.diff_frame_rate) < abs(item2.diff_frame_rate); | |
160 } | |
161 | |
162 bool CompareColor(ResolutionDiff item1, ResolutionDiff item2) { | |
163 return (item1.color < item2.color); | |
164 } | |
165 | |
166 } // namespace | |
167 | |
168 namespace media { | |
169 | |
170 // Gets the names of all video capture devices connected to this computer. | |
171 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { | |
172 DCHECK(device_names); | |
173 | |
174 app::win::ScopedCOMInitializer coinit; | |
175 ScopedComPtr<ICreateDevEnum> dev_enum; | |
176 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | |
177 CLSCTX_INPROC); | |
178 if (FAILED(hr)) | |
179 return; | |
180 | |
181 ScopedComPtr<IEnumMoniker> enum_moniker; | |
182 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | |
183 enum_moniker.Receive(), 0); | |
184 if (FAILED(hr)) | |
185 return; | |
186 | |
187 device_names->clear(); | |
188 | |
189 // Enumerate all video capture devices. | |
190 ScopedComPtr<IMoniker> moniker; | |
191 int index = 0; | |
192 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) { | |
193 Name device; | |
194 ScopedComPtr<IPropertyBag> prop_bag; | |
195 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); | |
196 if (FAILED(hr)) { | |
197 moniker.Release(); | |
198 continue; | |
199 } | |
200 | |
201 // Find the description or friendly name. | |
202 ScopedVariant name; | |
203 hr = prop_bag->Read(L"Description", name.Receive(), 0); | |
204 if (FAILED(hr)) | |
205 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0); | |
206 | |
207 if (SUCCEEDED(hr) && name.type() == VT_BSTR) { | |
208 // ignore all VFW drivers and the special Google Camera Adapter | |
scherkus (not reviewing)
2011/06/24 20:15:26
capitalize + end w/ period
what is the Google Cam
Per K
2011/06/27 11:47:50
Added comment in file.
It is fake DirectShow came
| |
209 const wchar_t* str_ptr = V_BSTR(&name); | |
210 static const wchar_t kGoogleCameraAdapter[] = L"Google Camera Adapter"; | |
scherkus (not reviewing)
2011/06/24 20:15:26
move to top of file
Per K
2011/06/27 11:47:50
Done.
| |
211 if ((wcsstr(str_ptr, L"(VFW)") == NULL) && | |
212 (_wcsnicmp(str_ptr, kGoogleCameraAdapter, | |
213 arraysize(kGoogleCameraAdapter) - 1) != 0)) { | |
214 device.device_name = base::SysWideToUTF8(str_ptr); | |
215 name.Reset(); | |
216 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0); | |
217 if (FAILED(hr)) { | |
218 device.unique_id = device.device_name; | |
219 } else if (name.type() == VT_BSTR) { | |
220 device.unique_id = base::SysWideToUTF8(V_BSTR(&name)); | |
221 } | |
222 | |
223 device_names->push_back(device); | |
224 } | |
225 } | |
226 moniker.Release(); | |
227 } | |
228 } | |
229 | |
230 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) { | |
231 VideoCaptureDeviceWin* self = new VideoCaptureDeviceWin(device_name); | |
232 if (!self || !self->Init()) { | |
233 delete self; | |
234 return NULL; | |
235 } | |
236 return self; | |
237 } | |
238 | |
239 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) | |
240 : device_name_(device_name), | |
241 state_(kIdle) { | |
242 } | |
243 | |
244 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | |
245 if (media_control_) | |
246 media_control_->Stop(); | |
247 | |
248 if (graph_builder_) { | |
249 if (sink_filter_) { | |
250 graph_builder_->RemoveFilter(sink_filter_); | |
251 sink_filter_ = NULL; | |
252 } | |
253 | |
254 if (capture_filter_) | |
255 graph_builder_->RemoveFilter(capture_filter_); | |
256 | |
257 if (mjpg_filter_) | |
258 graph_builder_->RemoveFilter(mjpg_filter_); | |
259 } | |
260 } | |
261 | |
262 bool VideoCaptureDeviceWin::Init() { | |
263 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive()); | |
264 if (!capture_filter_) { | |
265 DVLOG(2) << "Failed to create capture filter."; | |
266 return false; | |
267 } | |
268 | |
269 hr = GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE, | |
270 output_capture_pin_.Receive()); | |
271 if (!output_capture_pin_) { | |
272 DVLOG(2) << "Failed to get capture output pin"; | |
273 return false; | |
274 } | |
275 | |
276 // Create the sink filter used for receiving Captured frames. | |
277 sink_filter_ = new SinkFilter(this); | |
278 if (sink_filter_ == NULL) { | |
279 DVLOG(2) << "Failed to create send filter"; | |
280 return false; | |
281 } | |
282 | |
283 input_sink_pin_ = sink_filter_->GetPin(0); | |
284 | |
285 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL, | |
286 CLSCTX_INPROC_SERVER); | |
287 if (FAILED(hr)) { | |
288 DVLOG(2) << "Failed to create graph builder."; | |
289 return false; | |
290 } | |
291 | |
292 hr = graph_builder_.QueryInterface(media_control_.Receive()); | |
293 if (FAILED(hr)) { | |
294 DVLOG(2) << "Failed to create media control builder."; | |
295 return false; | |
296 } | |
297 | |
298 hr = graph_builder_->AddFilter(capture_filter_, NULL); | |
299 if (FAILED(hr)) { | |
300 DVLOG(2) << "Failed to add the capture device to the graph."; | |
301 return false; | |
302 } | |
303 | |
304 hr = graph_builder_->AddFilter(sink_filter_, NULL); | |
305 if (FAILED(hr)) { | |
306 DVLOG(2)<< "Failed to add the send filter to the graph."; | |
307 return false; | |
308 } | |
309 | |
310 return CreateCapabilityMap(); | |
311 } | |
312 | |
313 void VideoCaptureDeviceWin::Allocate( | |
314 int width, | |
315 int height, | |
316 int frame_rate, | |
317 VideoCaptureDevice::EventHandler* observer) { | |
318 if (state_ != kIdle) | |
319 return; | |
320 | |
321 observer_ = observer; | |
322 // Get the camera capability that best match the requested resolution. | |
323 const int capability_index = GetBestMatchedCapability(width, height, | |
324 frame_rate); | |
325 Capability capability = capabilities_[capability_index]; | |
326 | |
327 // Reduce the frame rate if the requested frame rate is lower | |
328 // than the capability. | |
329 if (capability.frame_rate > frame_rate) | |
330 capability.frame_rate = frame_rate; | |
331 | |
332 AM_MEDIA_TYPE* pmt = NULL; | |
333 VIDEO_STREAM_CONFIG_CAPS caps; | |
334 | |
335 ScopedComPtr<IAMStreamConfig> stream_config; | |
336 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | |
337 if (FAILED(hr)) { | |
338 SetErrorState("Can't get the Capture format settings"); | |
339 return; | |
340 } | |
341 | |
342 // Get the windows capability from the capture device. | |
343 hr = stream_config->GetStreamCaps(capability_index, &pmt, | |
344 reinterpret_cast<BYTE*>(&caps)); | |
345 if (SUCCEEDED(hr)) { | |
346 if (pmt->formattype == FORMAT_VideoInfo) { | |
347 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); | |
348 if (capability.frame_rate > 0) | |
349 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate; | |
350 } | |
351 // Set the sink filter to request this capability. | |
352 sink_filter_->SetRequestedMediaCapability(capability); | |
353 // Order the capture device to use this capability. | |
354 hr = stream_config->SetFormat(pmt); | |
355 } | |
356 | |
357 if (FAILED(hr)) | |
358 SetErrorState("Failed to set capture device output format"); | |
359 | |
360 if (capability.color == VideoCaptureDevice::kMJPEG && !mjpg_filter_.get()) { | |
361 // Create MJPG filter if we need it. | |
362 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC); | |
363 | |
364 if (SUCCEEDED(hr)) { | |
365 GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL, input_mjpg_pin_.Receive()); | |
366 GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL, | |
367 output_mjpg_pin_.Receive()); | |
368 hr = graph_builder_->AddFilter(mjpg_filter_, NULL); | |
369 } | |
370 | |
371 if (FAILED(hr)) { | |
372 mjpg_filter_.Release(); | |
373 input_mjpg_pin_.Release(); | |
374 output_mjpg_pin_.Release(); | |
375 } | |
376 } | |
377 | |
378 if (capability.color == VideoCaptureDevice::kMJPEG && mjpg_filter_.get()) { | |
379 // Connect the camera to the MJPEG decoder. | |
380 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_, | |
381 NULL); | |
382 // Connect the MJPEG filter to the Capture filter. | |
383 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_, | |
384 NULL); | |
385 } else { | |
386 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_, | |
387 NULL); | |
388 } | |
389 | |
390 if (FAILED(hr)) { | |
391 SetErrorState("Failed to connect the Capture graph."); | |
392 return; | |
393 } | |
394 | |
395 hr = media_control_->Pause(); | |
396 if (FAILED(hr)) { | |
397 SetErrorState("Failed to Pause the Capture device. " | |
398 "Is it already occupied?"); | |
399 return; | |
400 } | |
401 | |
402 // Get the capability back from the sink filter after the filter have been | |
403 // connected. | |
404 const Capability& used_capability = sink_filter_->ResultingCapability(); | |
405 observer_->OnFrameInfo(used_capability); | |
scherkus (not reviewing)
2011/06/24 20:15:26
inline the function call here?
Per K
2011/06/27 11:47:50
Can you please elaborate? observer is the VideoCap
scherkus (not reviewing)
2011/06/27 19:24:01
oh I meant because you're not using the variable y
| |
406 | |
407 state_ = kAllocated; | |
408 } | |
409 | |
410 void VideoCaptureDeviceWin::Start() { | |
411 if (state_ != kAllocated) | |
412 return; | |
413 | |
414 HRESULT hr = media_control_->Run(); | |
415 if (FAILED(hr)) { | |
416 SetErrorState("Failed to start the Capture device."); | |
417 return; | |
418 } | |
419 | |
420 state_ = kAllocated; | |
421 } | |
422 | |
423 void VideoCaptureDeviceWin::Stop() { | |
424 if (state_ != kCapturing) | |
425 return; | |
426 | |
427 HRESULT hr = media_control_->Stop(); | |
428 if (FAILED(hr)) { | |
429 SetErrorState("Failed to stop the capture graph."); | |
430 return; | |
431 } | |
432 | |
433 state_ = kAllocated; | |
434 } | |
435 | |
436 void VideoCaptureDeviceWin::DeAllocate() { | |
437 if (state_ == kIdle) | |
438 return; | |
439 | |
440 HRESULT hr = media_control_->Stop(); | |
441 graph_builder_->Disconnect(output_capture_pin_); | |
442 graph_builder_->Disconnect(input_sink_pin_); | |
443 | |
444 // If the _mjpg filter exist disconnect it even if it has not been used. | |
445 if (mjpg_filter_) { | |
446 graph_builder_->Disconnect(input_mjpg_pin_); | |
447 graph_builder_->Disconnect(output_mjpg_pin_); | |
448 } | |
449 | |
450 if (FAILED(hr)) | |
451 SetErrorState("Failed to Stop the Capture device"); | |
452 | |
453 state_ = kIdle; | |
454 } | |
455 | |
456 const VideoCaptureDevice::Name& VideoCaptureDeviceWin::device_name() { | |
457 return device_name_; | |
458 } | |
459 | |
460 // Implements SinkFilterObserver::SinkFilterObserver. | |
461 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer, | |
462 int length) { | |
463 observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now()); | |
464 } | |
465 | |
466 bool VideoCaptureDeviceWin::CreateCapabilityMap() { | |
467 ScopedComPtr<IAMStreamConfig> stream_config; | |
468 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | |
469 if (FAILED(hr)) { | |
470 DVLOG(2) << "Failed to get IAMStreamConfig interface from " | |
471 "capture device"; | |
472 return false; | |
473 } | |
474 | |
475 // Get interface used for getting the frame rate. | |
476 ScopedComPtr<IAMVideoControl> video_control; | |
477 hr = capture_filter_.QueryInterface(video_control.Receive()); | |
478 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED"; | |
479 | |
480 AM_MEDIA_TYPE* pmt = NULL; | |
481 VIDEO_STREAM_CONFIG_CAPS caps; | |
482 int count, size; | |
483 | |
484 hr = stream_config->GetNumberOfCapabilities(&count, &size); | |
485 if (FAILED(hr)) { | |
486 DVLOG(2) << "Failed to GetNumberOfCapabilities"; | |
487 return false; | |
488 } | |
489 | |
490 for (int i = 0; i < count; ++i) { | |
491 hr = stream_config->GetStreamCaps(i, &pmt, | |
492 reinterpret_cast<BYTE*>(&caps)); | |
493 if (FAILED(hr)) { | |
494 DVLOG(2) << "Failed to GetStreamCaps"; | |
495 return false; | |
496 } | |
497 | |
498 if (pmt->majortype == MEDIATYPE_Video && | |
499 pmt->formattype == FORMAT_VideoInfo) { | |
500 Capability capability; | |
501 REFERENCE_TIME time_per_frame = 0; | |
502 | |
503 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); | |
504 capability.width = h->bmiHeader.biWidth; | |
505 capability.height = h->bmiHeader.biHeight; | |
506 time_per_frame = h->AvgTimePerFrame; | |
507 | |
508 // Try to get the max frame rate from IAMVideoControl. | |
509 if (video_control.get()) { | |
510 LONGLONG* max_fps_ptr; | |
511 LONG list_size; | |
512 SIZE size; | |
513 size.cx = capability.width; | |
514 size.cy = capability.height; | |
515 | |
516 // GetFrameRateList doesn't return max frame rate always | |
517 // eg: Logitech Notebook. This may be due to a bug in that API | |
518 // because GetFrameRateList array is reversed in the above camera. So | |
519 // a util method written. Can't assume the first value will return | |
520 // the max fps. | |
521 hr = video_control->GetFrameRateList(output_capture_pin_, i, size, | |
522 &list_size, &max_fps_ptr); | |
523 | |
524 if (SUCCEEDED(hr) && list_size > 0) { | |
525 int min_time = *std::min_element(max_fps_ptr, | |
526 max_fps_ptr + list_size); | |
527 capability.frame_rate = (min_time > 0) ? | |
528 kSecondsToReferenceTime / min_time : 0; | |
529 } else { | |
530 // Get frame rate from VIDEOINFOHEADER. | |
531 capability.frame_rate = (time_per_frame > 0) ? | |
532 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0; | |
533 } | |
534 } else { | |
535 // Get frame rate from VIDEOINFOHEADER since IAMVideoControl is | |
536 // not supported. | |
537 capability.frame_rate = (time_per_frame > 0) ? | |
538 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0; | |
539 } | |
540 | |
541 // We can't switch MEDIATYPE :~(. | |
542 if (pmt->subtype == kMediaSubTypeI420) { | |
543 capability.color = VideoCaptureDevice::kI420; | |
544 } else if (pmt->subtype == MEDIASUBTYPE_IYUV) { // identical to kI420. | |
545 capability.color = VideoCaptureDevice::kI420; | |
546 } else if (pmt->subtype == MEDIASUBTYPE_RGB24) { | |
547 capability.color = VideoCaptureDevice::kRGB24; | |
548 } else if (pmt->subtype == MEDIASUBTYPE_YUY2) { | |
549 capability.color = VideoCaptureDevice::kYUY2; | |
550 } else if (pmt->subtype == MEDIASUBTYPE_MJPG) { | |
551 capability.color = VideoCaptureDevice::kMJPEG; | |
552 } else { | |
553 WCHAR guid_str[128]; | |
554 StringFromGUID2(pmt->subtype, guid_str, arraysize(guid_str)); | |
555 DVLOG(2) << "Device support unknown media type " << guid_str; | |
556 continue; | |
557 } | |
558 capabilities_[i] = capability; | |
559 } | |
560 DeleteMediaType(pmt); | |
561 pmt = NULL; | |
562 } | |
563 | |
564 return capabilities_.size() > 0; | |
565 } | |
566 | |
567 // Loops through the list of capabilities and returns an index of the best | |
568 // matching capability. | |
569 // The algorithm prioritize height, width, frame rate and color format in that | |
570 // order. | |
571 int VideoCaptureDeviceWin::GetBestMatchedCapability(int requested_width, | |
572 int requested_height, | |
573 int requested_frame_rate) { | |
574 std::list<ResolutionDiff> diff_list; | |
575 | |
576 // Loop through the candidates to create a list of differentials between the | |
577 // requested resolution and the camera capability. | |
578 for (CapabilityMap::iterator iterator = capabilities_.begin(); | |
579 iterator != capabilities_.end(); | |
580 ++iterator) { | |
581 Capability capability = iterator->second; | |
582 | |
583 ResolutionDiff diff; | |
584 diff.capability_index = iterator->first; | |
585 diff.diff_width = capability.width - requested_width; | |
586 diff.diff_height = capability.height - requested_height; | |
587 diff.diff_frame_rate = capability.frame_rate - requested_frame_rate; | |
588 diff.color = capability.color; | |
589 diff_list.push_back(diff); | |
590 } | |
591 | |
592 // Sort the best height candidates. | |
593 diff_list.sort(CompareHeight); | |
594 int best_diff = diff_list.front().diff_height; | |
595 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
596 it != diff_list.end(); ++it) { | |
597 if (it->diff_height != best_diff) { | |
598 // Remove all candidates but the best. | |
599 diff_list.erase(it, diff_list.end()); | |
600 break; | |
601 } | |
602 } | |
603 | |
604 // Sort the best width candidates. | |
605 diff_list.sort(CompareWidth); | |
606 best_diff = diff_list.front().diff_width; | |
607 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
608 it != diff_list.end(); ++it) { | |
609 if (it->diff_width != best_diff) { | |
610 // Remove all candidates but the best. | |
611 diff_list.erase(it, diff_list.end()); | |
612 break; | |
613 } | |
614 } | |
615 | |
616 // Sort the best frame rate candidates. | |
617 diff_list.sort(CompareFrameRate); | |
618 best_diff = diff_list.front().diff_frame_rate; | |
619 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
620 it != diff_list.end(); ++it) { | |
621 if (it->diff_frame_rate != best_diff) { | |
622 diff_list.erase(it, diff_list.end()); | |
623 break; | |
624 } | |
625 } | |
626 | |
627 // Decide the best color format. | |
628 diff_list.sort(CompareColor); | |
629 return diff_list.front().capability_index; | |
630 } | |
631 | |
632 void VideoCaptureDeviceWin::SetErrorState(const char* reason) { | |
633 DLOG(ERROR) << reason; | |
634 state_ = kError; | |
635 observer_->OnError(); | |
636 } | |
637 | |
638 } // namespace media | |
OLD | NEW |