Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(166)

Side by Side Diff: media/video/capture/win/video_capture_device_win.cc

Issue 7229013: This is the VideoCaptureDevice implementation for windows. (Closed) Base URL: http://src.chromium.org/svn/trunk/src/
Patch Set: Fix crash on some Windows versions when there is no camera available. Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « media/video/capture/win/video_capture_device_win.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/video/capture/win/video_capture_device_win.h"
6
7 #include <algorithm>
8 #include <list>
9
10 #include "base/string_util.h"
11 #include "base/sys_string_conversions.h"
12 #include "base/win/scoped_variant.h"
13
14 using base::win::ScopedComPtr;
15 using base::win::ScopedVariant;
16
17 namespace {
18
19 // Finds and creates a DirectShow Video Capture filter matching the device_name.
20 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name,
21 IBaseFilter** filter) {
22 DCHECK(filter);
23
24 ScopedComPtr<ICreateDevEnum> dev_enum;
25 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
26 CLSCTX_INPROC);
27 if (FAILED(hr))
28 return hr;
29
30 ScopedComPtr<IEnumMoniker> enum_moniker;
31 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
32 enum_moniker.Receive(), 0);
33 // CreateClassEnumerator returns S_FALSE on some Windows OS
34 // when no camera exist. Therefore the FAILED macro can't be used.
35 if (hr != S_OK)
36 return NULL;
37
38 ScopedComPtr<IMoniker> moniker;
39 ScopedComPtr<IBaseFilter> capture_filter;
40 DWORD fetched = 0;
41 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
42 ScopedComPtr<IPropertyBag> prop_bag;
43 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
44 if (FAILED(hr)) {
45 moniker.Release();
46 continue;
47 }
48
49 // Find the description or friendly name.
50 static const wchar_t* kPropertyNames[] = {
51 L"DevicePath", L"Description", L"FriendlyName"
52 };
53 ScopedVariant name;
54 for (size_t i = 0;
55 i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
56 prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
57 }
58 if (name.type() == VT_BSTR) {
59 std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
60 if (device_path.compare(device_name.unique_id) == 0) {
61 // We have found the requested device
62 hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
63 capture_filter.ReceiveVoid());
64 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
65 break;
66 }
67 }
68 moniker.Release();
69 }
70
71 *filter = capture_filter.Detach();
72 if (!*filter && SUCCEEDED(hr))
73 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
74
75 return hr;
76 }
77
78 // Check if a Pin matches a category.
79 bool PinMatchesCategory(IPin* pin, REFGUID category) {
80 DCHECK(pin);
81 bool found = false;
82 ScopedComPtr<IKsPropertySet> ks_property;
83 HRESULT hr = ks_property.QueryFrom(pin);
84 if (SUCCEEDED(hr)) {
85 GUID pin_category;
86 DWORD return_value;
87 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
88 &pin_category, sizeof(pin_category), &return_value);
89 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
90 found = (pin_category == category) ? true : false;
91 }
92 }
93 return found;
94 }
95
96 // Finds a IPin on a IBaseFilter given the direction an category.
97 HRESULT GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, REFGUID category,
98 IPin** pin) {
99 DCHECK(pin);
100 ScopedComPtr<IEnumPins> pin_emum;
101 HRESULT hr = filter->EnumPins(pin_emum.Receive());
102 if (pin_emum == NULL)
103 return hr;
104
105 // Get first unconnected pin.
106 hr = pin_emum->Reset(); // set to first pin
107 while ((hr = pin_emum->Next(1, pin, NULL)) == S_OK) {
108 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
109 hr = (*pin)->QueryDirection(&this_pin_dir);
110 if (pin_dir == this_pin_dir) {
111 if (category == GUID_NULL || PinMatchesCategory(*pin, category))
112 return S_OK;
113 }
114 (*pin)->Release();
115 }
116
117 return E_FAIL;
118 }
119
120 // Release the format block for a media type.
121 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
122 void FreeMediaType(AM_MEDIA_TYPE* mt) {
123 if (mt->cbFormat != 0) {
124 CoTaskMemFree(mt->pbFormat);
125 mt->cbFormat = 0;
126 mt->pbFormat = NULL;
127 }
128 if (mt->pUnk != NULL) {
129 NOTREACHED();
130 // pUnk should not be used.
131 mt->pUnk->Release();
132 mt->pUnk = NULL;
133 }
134 }
135
136 // Delete a media type structure that was allocated on the heap.
137 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
138 void DeleteMediaType(AM_MEDIA_TYPE* mt) {
139 if (mt != NULL) {
140 FreeMediaType(mt);
141 CoTaskMemFree(mt);
142 }
143 }
144
145 // Help structure used for comparing video capture capabilities.
146 struct ResolutionDiff {
147 int capability_index;
148 int diff_height;
149 int diff_width;
150 int diff_frame_rate;
151 media::VideoCaptureDevice::Format color;
152 };
153
154 bool CompareHeight(ResolutionDiff item1, ResolutionDiff item2) {
155 return abs(item1.diff_height) < abs(item2.diff_height);
156 }
157
158 bool CompareWidth(ResolutionDiff item1, ResolutionDiff item2) {
159 return abs(item1.diff_width) < abs(item2.diff_width);
160 }
161
162 bool CompareFrameRate(ResolutionDiff item1, ResolutionDiff item2) {
163 return abs(item1.diff_frame_rate) < abs(item2.diff_frame_rate);
164 }
165
166 bool CompareColor(ResolutionDiff item1, ResolutionDiff item2) {
167 return (item1.color < item2.color);
168 }
169
170 } // namespace
171
172 namespace media {
173
174 // Name of a fake DirectShow filter that exist on computers with
175 // GTalk installed.
176 static const char kGoogleCameraAdapter[] = "google camera adapter";
177
178 // Gets the names of all video capture devices connected to this computer.
179 void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
180 DCHECK(device_names);
181
182 base::win::ScopedCOMInitializer coinit;
183 ScopedComPtr<ICreateDevEnum> dev_enum;
184 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
185 CLSCTX_INPROC);
186 if (FAILED(hr))
187 return;
188
189 ScopedComPtr<IEnumMoniker> enum_moniker;
190 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
191 enum_moniker.Receive(), 0);
192 // CreateClassEnumerator returns S_FALSE on some Windows OS
193 // when no camera exist. Therefore the FAILED macro can't be used.
194 if (hr != S_OK)
195 return;
196
197 device_names->clear();
198
199 // Enumerate all video capture devices.
200 ScopedComPtr<IMoniker> moniker;
201 int index = 0;
202 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
203 Name device;
204 ScopedComPtr<IPropertyBag> prop_bag;
205 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
206 if (FAILED(hr)) {
207 moniker.Release();
208 continue;
209 }
210
211 // Find the description or friendly name.
212 ScopedVariant name;
213 hr = prop_bag->Read(L"Description", name.Receive(), 0);
214 if (FAILED(hr))
215 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
216
217 if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
218 // Ignore all VFW drivers and the special Google Camera Adapter.
219 // Google Camera Adapter is not a real DirectShow camera device.
220 // VFW is very old Video for Windows drivers that can not be used.
221 const wchar_t* str_ptr = V_BSTR(&name);
222 const int name_length = arraysize(kGoogleCameraAdapter) - 1;
223
224 if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
225 lstrlenW(str_ptr) < name_length ||
226 (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
227 kGoogleCameraAdapter)))) {
228 device.device_name = base::SysWideToUTF8(str_ptr);
229 name.Reset();
230 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
231 if (FAILED(hr)) {
232 device.unique_id = device.device_name;
233 } else if (name.type() == VT_BSTR) {
234 device.unique_id = base::SysWideToUTF8(V_BSTR(&name));
235 }
236
237 device_names->push_back(device);
238 }
239 }
240 moniker.Release();
241 }
242 }
243
244 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
245 VideoCaptureDeviceWin* self = new VideoCaptureDeviceWin(device_name);
246 if (self && self->Init())
247 return self;
248
249 delete self;
250 return NULL;
251 }
252
253 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
254 : device_name_(device_name),
255 state_(kIdle) {
256 }
257
258 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
259 if (media_control_)
260 media_control_->Stop();
261
262 if (graph_builder_) {
263 if (sink_filter_) {
264 graph_builder_->RemoveFilter(sink_filter_);
265 sink_filter_ = NULL;
266 }
267
268 if (capture_filter_)
269 graph_builder_->RemoveFilter(capture_filter_);
270
271 if (mjpg_filter_)
272 graph_builder_->RemoveFilter(mjpg_filter_);
273 }
274 }
275
276 bool VideoCaptureDeviceWin::Init() {
277 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
278 if (!capture_filter_) {
279 DVLOG(2) << "Failed to create capture filter.";
280 return false;
281 }
282
283 hr = GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE,
284 output_capture_pin_.Receive());
285 if (!output_capture_pin_) {
286 DVLOG(2) << "Failed to get capture output pin";
287 return false;
288 }
289
290 // Create the sink filter used for receiving Captured frames.
291 sink_filter_ = new SinkFilter(this);
292 if (sink_filter_ == NULL) {
293 DVLOG(2) << "Failed to create send filter";
294 return false;
295 }
296
297 input_sink_pin_ = sink_filter_->GetPin(0);
298
299 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
300 CLSCTX_INPROC_SERVER);
301 if (FAILED(hr)) {
302 DVLOG(2) << "Failed to create graph builder.";
303 return false;
304 }
305
306 hr = graph_builder_.QueryInterface(media_control_.Receive());
307 if (FAILED(hr)) {
308 DVLOG(2) << "Failed to create media control builder.";
309 return false;
310 }
311
312 hr = graph_builder_->AddFilter(capture_filter_, NULL);
313 if (FAILED(hr)) {
314 DVLOG(2) << "Failed to add the capture device to the graph.";
315 return false;
316 }
317
318 hr = graph_builder_->AddFilter(sink_filter_, NULL);
319 if (FAILED(hr)) {
320 DVLOG(2)<< "Failed to add the send filter to the graph.";
321 return false;
322 }
323
324 return CreateCapabilityMap();
325 }
326
327 void VideoCaptureDeviceWin::Allocate(
328 int width,
329 int height,
330 int frame_rate,
331 VideoCaptureDevice::EventHandler* observer) {
332 if (state_ != kIdle)
333 return;
334
335 observer_ = observer;
336 // Get the camera capability that best match the requested resolution.
337 const int capability_index = GetBestMatchedCapability(width, height,
338 frame_rate);
339 Capability capability = capabilities_[capability_index];
340
341 // Reduce the frame rate if the requested frame rate is lower
342 // than the capability.
343 if (capability.frame_rate > frame_rate)
344 capability.frame_rate = frame_rate;
345
346 AM_MEDIA_TYPE* pmt = NULL;
347 VIDEO_STREAM_CONFIG_CAPS caps;
348
349 ScopedComPtr<IAMStreamConfig> stream_config;
350 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
351 if (FAILED(hr)) {
352 SetErrorState("Can't get the Capture format settings");
353 return;
354 }
355
356 // Get the windows capability from the capture device.
357 hr = stream_config->GetStreamCaps(capability_index, &pmt,
358 reinterpret_cast<BYTE*>(&caps));
359 if (SUCCEEDED(hr)) {
360 if (pmt->formattype == FORMAT_VideoInfo) {
361 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
362 if (capability.frame_rate > 0)
363 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate;
364 }
365 // Set the sink filter to request this capability.
366 sink_filter_->SetRequestedMediaCapability(capability);
367 // Order the capture device to use this capability.
368 hr = stream_config->SetFormat(pmt);
369 }
370
371 if (FAILED(hr))
372 SetErrorState("Failed to set capture device output format");
373
374 if (capability.color == VideoCaptureDevice::kMJPEG && !mjpg_filter_.get()) {
375 // Create MJPG filter if we need it.
376 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
377
378 if (SUCCEEDED(hr)) {
379 GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL, input_mjpg_pin_.Receive());
380 GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL,
381 output_mjpg_pin_.Receive());
382 hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
383 }
384
385 if (FAILED(hr)) {
386 mjpg_filter_.Release();
387 input_mjpg_pin_.Release();
388 output_mjpg_pin_.Release();
389 }
390 }
391
392 if (capability.color == VideoCaptureDevice::kMJPEG && mjpg_filter_.get()) {
393 // Connect the camera to the MJPEG decoder.
394 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
395 NULL);
396 // Connect the MJPEG filter to the Capture filter.
397 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
398 NULL);
399 } else {
400 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
401 NULL);
402 }
403
404 if (FAILED(hr)) {
405 SetErrorState("Failed to connect the Capture graph.");
406 return;
407 }
408
409 hr = media_control_->Pause();
410 if (FAILED(hr)) {
411 SetErrorState("Failed to Pause the Capture device. "
412 "Is it already occupied?");
413 return;
414 }
415
416 // Get the capability back from the sink filter after the filter have been
417 // connected.
418 const Capability& used_capability = sink_filter_->ResultingCapability();
419 observer_->OnFrameInfo(used_capability);
420
421 state_ = kAllocated;
422 }
423
424 void VideoCaptureDeviceWin::Start() {
425 if (state_ != kAllocated)
426 return;
427
428 HRESULT hr = media_control_->Run();
429 if (FAILED(hr)) {
430 SetErrorState("Failed to start the Capture device.");
431 return;
432 }
433
434 state_ = kCapturing;
435 }
436
437 void VideoCaptureDeviceWin::Stop() {
438 if (state_ != kCapturing)
439 return;
440
441 HRESULT hr = media_control_->Stop();
442 if (FAILED(hr)) {
443 SetErrorState("Failed to stop the capture graph.");
444 return;
445 }
446
447 state_ = kAllocated;
448 }
449
450 void VideoCaptureDeviceWin::DeAllocate() {
451 if (state_ == kIdle)
452 return;
453
454 HRESULT hr = media_control_->Stop();
455 graph_builder_->Disconnect(output_capture_pin_);
456 graph_builder_->Disconnect(input_sink_pin_);
457
458 // If the _mjpg filter exist disconnect it even if it has not been used.
459 if (mjpg_filter_) {
460 graph_builder_->Disconnect(input_mjpg_pin_);
461 graph_builder_->Disconnect(output_mjpg_pin_);
462 }
463
464 if (FAILED(hr)) {
465 SetErrorState("Failed to Stop the Capture device");
466 return;
467 }
468
469 state_ = kIdle;
470 }
471
472 const VideoCaptureDevice::Name& VideoCaptureDeviceWin::device_name() {
473 return device_name_;
474 }
475
476 // Implements SinkFilterObserver::SinkFilterObserver.
477 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
478 int length) {
479 observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now());
480 }
481
482 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
483 ScopedComPtr<IAMStreamConfig> stream_config;
484 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
485 if (FAILED(hr)) {
486 DVLOG(2) << "Failed to get IAMStreamConfig interface from "
487 "capture device";
488 return false;
489 }
490
491 // Get interface used for getting the frame rate.
492 ScopedComPtr<IAMVideoControl> video_control;
493 hr = capture_filter_.QueryInterface(video_control.Receive());
494 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
495
496 AM_MEDIA_TYPE* media_type = NULL;
497 VIDEO_STREAM_CONFIG_CAPS caps;
498 int count, size;
499
500 hr = stream_config->GetNumberOfCapabilities(&count, &size);
501 if (FAILED(hr)) {
502 DVLOG(2) << "Failed to GetNumberOfCapabilities";
503 return false;
504 }
505
506 for (int i = 0; i < count; ++i) {
507 hr = stream_config->GetStreamCaps(i, &media_type,
508 reinterpret_cast<BYTE*>(&caps));
509 if (FAILED(hr)) {
510 DVLOG(2) << "Failed to GetStreamCaps";
511 return false;
512 }
513
514 if (media_type->majortype == MEDIATYPE_Video &&
515 media_type->formattype == FORMAT_VideoInfo) {
516 Capability capability;
517 REFERENCE_TIME time_per_frame = 0;
518
519 VIDEOINFOHEADER* h =
520 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
521 capability.width = h->bmiHeader.biWidth;
522 capability.height = h->bmiHeader.biHeight;
523 time_per_frame = h->AvgTimePerFrame;
524
525 // Try to get the max frame rate from IAMVideoControl.
526 if (video_control.get()) {
527 LONGLONG* max_fps_ptr;
528 LONG list_size;
529 SIZE size;
530 size.cx = capability.width;
531 size.cy = capability.height;
532
533 // GetFrameRateList doesn't return max frame rate always
534 // eg: Logitech Notebook. This may be due to a bug in that API
535 // because GetFrameRateList array is reversed in the above camera. So
536 // a util method written. Can't assume the first value will return
537 // the max fps.
538 hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
539 &list_size, &max_fps_ptr);
540
541 if (SUCCEEDED(hr) && list_size > 0) {
542 int min_time = *std::min_element(max_fps_ptr,
543 max_fps_ptr + list_size);
544 capability.frame_rate = (min_time > 0) ?
545 kSecondsToReferenceTime / min_time : 0;
546 } else {
547 // Get frame rate from VIDEOINFOHEADER.
548 capability.frame_rate = (time_per_frame > 0) ?
549 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
550 }
551 } else {
552 // Get frame rate from VIDEOINFOHEADER since IAMVideoControl is
553 // not supported.
554 capability.frame_rate = (time_per_frame > 0) ?
555 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
556 }
557
558 // We can't switch MEDIATYPE :~(.
559 if (media_type->subtype == kMediaSubTypeI420) {
560 capability.color = VideoCaptureDevice::kI420;
561 } else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
562 // This is identical to kI420.
563 capability.color = VideoCaptureDevice::kI420;
564 } else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
565 capability.color = VideoCaptureDevice::kRGB24;
566 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
567 capability.color = VideoCaptureDevice::kYUY2;
568 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
569 capability.color = VideoCaptureDevice::kMJPEG;
570 } else {
571 WCHAR guid_str[128];
572 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
573 DVLOG(2) << "Device support unknown media type " << guid_str;
574 continue;
575 }
576 capabilities_[i] = capability;
577 }
578 DeleteMediaType(media_type);
579 media_type = NULL;
580 }
581
582 return capabilities_.size() > 0;
583 }
584
585 // Loops through the list of capabilities and returns an index of the best
586 // matching capability.
587 // The algorithm prioritize height, width, frame rate and color format in that
588 // order.
589 int VideoCaptureDeviceWin::GetBestMatchedCapability(int requested_width,
590 int requested_height,
591 int requested_frame_rate) {
592 std::list<ResolutionDiff> diff_list;
593
594 // Loop through the candidates to create a list of differentials between the
595 // requested resolution and the camera capability.
596 for (CapabilityMap::iterator iterator = capabilities_.begin();
597 iterator != capabilities_.end();
598 ++iterator) {
599 Capability capability = iterator->second;
600
601 ResolutionDiff diff;
602 diff.capability_index = iterator->first;
603 diff.diff_width = capability.width - requested_width;
604 diff.diff_height = capability.height - requested_height;
605 diff.diff_frame_rate = capability.frame_rate - requested_frame_rate;
606 diff.color = capability.color;
607 diff_list.push_back(diff);
608 }
609
610 // Sort the best height candidates.
611 diff_list.sort(&CompareHeight);
612 int best_diff = diff_list.front().diff_height;
613 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
614 it != diff_list.end(); ++it) {
615 if (it->diff_height != best_diff) {
616 // Remove all candidates but the best.
617 diff_list.erase(it, diff_list.end());
618 break;
619 }
620 }
621
622 // Sort the best width candidates.
623 diff_list.sort(&CompareWidth);
624 best_diff = diff_list.front().diff_width;
625 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
626 it != diff_list.end(); ++it) {
627 if (it->diff_width != best_diff) {
628 // Remove all candidates but the best.
629 diff_list.erase(it, diff_list.end());
630 break;
631 }
632 }
633
634 // Sort the best frame rate candidates.
635 diff_list.sort(&CompareFrameRate);
636 best_diff = diff_list.front().diff_frame_rate;
637 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
638 it != diff_list.end(); ++it) {
639 if (it->diff_frame_rate != best_diff) {
640 diff_list.erase(it, diff_list.end());
641 break;
642 }
643 }
644
645 // Decide the best color format.
646 diff_list.sort(&CompareColor);
647 return diff_list.front().capability_index;
648 }
649
650 void VideoCaptureDeviceWin::SetErrorState(const char* reason) {
651 DLOG(ERROR) << reason;
652 state_ = kError;
653 observer_->OnError();
654 }
655
656 } // namespace media
OLDNEW
« no previous file with comments | « media/video/capture/win/video_capture_device_win.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698