Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(363)

Side by Side Diff: media/video/capture/win/video_capture_device_win.cc

Issue 7229013: This is the VideoCaptureDevice implementation for windows. (Closed) Base URL: http://src.chromium.org/svn/trunk/src/
Patch Set: Fixed code review issues found by Scherkus and Tommi. Created 9 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "media/video/capture/win/video_capture_device_win.h"
6
7 #include <algorithm>
8 #include <list>
9
10 #include "base/string_util.h"
11 #include "base/sys_string_conversions.h"
12 #include "base/win/scoped_variant.h"
13
14 using base::win::ScopedComPtr;
15 using base::win::ScopedVariant;
16
17 namespace {
18
19 // Finds and creates a DirectShow Video Capture filter matching the device_name.
20 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name,
21 IBaseFilter** filter) {
22 DCHECK(filter);
23
24 ScopedComPtr<ICreateDevEnum> dev_enum;
25 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
26 CLSCTX_INPROC);
27 if (FAILED(hr))
28 return hr;
29
30 ScopedComPtr<IEnumMoniker> enum_moniker;
31 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
32 enum_moniker.Receive(), 0);
33 if (FAILED(hr))
34 return NULL;
35
36 ScopedComPtr<IMoniker> moniker;
37 ScopedComPtr<IBaseFilter> capture_filter;
38 DWORD fetched = 0;
39 while (enum_moniker->Next(1, moniker.Receive(), &fetched) == S_OK) {
40 ScopedComPtr<IPropertyBag> prop_bag;
41 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
42 if (FAILED(hr)) {
43 moniker.Release();
44 continue;
45 }
46
47 // Find the description or friendly name.
48 static const wchar_t* kPropertyNames[] = {
49 L"DevicePath", L"Description", L"FriendlyName"
50 };
51 ScopedVariant name;
52 for (size_t i = 0;
53 i < arraysize(kPropertyNames) && name.type() != VT_BSTR; ++i) {
54 prop_bag->Read(kPropertyNames[i], name.Receive(), 0);
55 }
56 if (name.type() == VT_BSTR) {
57 std::string device_path(base::SysWideToUTF8(V_BSTR(&name)));
58 if (device_path.compare(device_name.unique_id) == 0) {
59 // We have found the requested device
60 hr = moniker->BindToObject(0, 0, IID_IBaseFilter,
61 capture_filter.ReceiveVoid());
62 DVPLOG_IF(2, FAILED(hr)) << "Failed to bind camera filter.";
63 break;
64 }
65 }
66 moniker.Release();
67 }
68
69 *filter = capture_filter.Detach();
70 if (!*filter && SUCCEEDED(hr))
71 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND);
72
73 return hr;
74 }
75
76 // Check if a Pin matches a category.
77 bool PinMatchesCategory(IPin* pin, REFGUID category) {
78 DCHECK(pin);
79 bool found = false;
80 ScopedComPtr<IKsPropertySet> ks_property;
81 HRESULT hr = ks_property.QueryFrom(pin);
82 if (SUCCEEDED(hr)) {
83 GUID pin_category;
84 DWORD return_value;
85 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0,
86 &pin_category, sizeof(pin_category), &return_value);
87 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) {
88 found = (pin_category == category) ? true : false;
89 }
90 }
91 return found;
92 }
93
94 // Finds a IPin on a IBaseFilter given the direction an category.
95 HRESULT GetPin(IBaseFilter* filter, PIN_DIRECTION pin_dir, REFGUID category,
96 IPin** pin) {
97 DCHECK(pin);
98 ScopedComPtr<IEnumPins> pin_emum;
99 HRESULT hr = filter->EnumPins(pin_emum.Receive());
100 if (pin_emum == NULL)
101 return hr;
102
103 // Get first unconnected pin.
104 hr = pin_emum->Reset(); // set to first pin
105 while ((hr = pin_emum->Next(1, pin, NULL)) == S_OK) {
106 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1);
107 hr = (*pin)->QueryDirection(&this_pin_dir);
108 if (pin_dir == this_pin_dir) {
109 if (category == GUID_NULL || PinMatchesCategory(*pin, category))
110 return S_OK;
111 }
112 (*pin)->Release();
113 }
114
115 return E_FAIL;
116 }
117
118 // Release the format block for a media type.
119 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
120 void FreeMediaType(AM_MEDIA_TYPE* mt) {
121 if (mt->cbFormat != 0) {
122 CoTaskMemFree(mt->pbFormat);
123 mt->cbFormat = 0;
124 mt->pbFormat = NULL;
125 }
126 if (mt->pUnk != NULL) {
127 NOTREACHED();
128 // pUnk should not be used.
129 mt->pUnk->Release();
130 mt->pUnk = NULL;
131 }
132 }
133
134 // Delete a media type structure that was allocated on the heap.
135 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx
136 void DeleteMediaType(AM_MEDIA_TYPE* mt) {
137 if (mt != NULL) {
138 FreeMediaType(mt);
139 CoTaskMemFree(mt);
140 }
141 }
142
143 // Help structure used for comparing video capture capabilities.
144 struct ResolutionDiff {
145 int capability_index;
146 int diff_height;
147 int diff_width;
148 int diff_frame_rate;
149 media::VideoCaptureDevice::Format color;
150 };
151
152 bool CompareHeight(ResolutionDiff item1, ResolutionDiff item2) {
153 return abs(item1.diff_height) < abs(item2.diff_height);
154 }
155
156 bool CompareWidth(ResolutionDiff item1, ResolutionDiff item2) {
157 return abs(item1.diff_width) < abs(item2.diff_width);
158 }
159
160 bool CompareFrameRate(ResolutionDiff item1, ResolutionDiff item2) {
161 return abs(item1.diff_frame_rate) < abs(item2.diff_frame_rate);
162 }
163
164 bool CompareColor(ResolutionDiff item1, ResolutionDiff item2) {
165 return (item1.color < item2.color);
166 }
167
168 } // namespace
169
170 namespace media {
171
172 // Name of a fake DirectShow filter that exist on computers with
173 // GTalk installed.
174 static const char kGoogleCameraAdapter[] = "google camera adapter";
175
176 // Gets the names of all video capture devices connected to this computer.
177 void VideoCaptureDevice::GetDeviceNames(Names* device_names) {
178 DCHECK(device_names);
179
180 app::win::ScopedCOMInitializer coinit;
181 ScopedComPtr<ICreateDevEnum> dev_enum;
182 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL,
183 CLSCTX_INPROC);
184 if (FAILED(hr))
185 return;
186
187 ScopedComPtr<IEnumMoniker> enum_moniker;
188 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
189 enum_moniker.Receive(), 0);
190 if (FAILED(hr))
191 return;
192
193 device_names->clear();
194
195 // Enumerate all video capture devices.
196 ScopedComPtr<IMoniker> moniker;
197 int index = 0;
198 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) {
199 Name device;
200 ScopedComPtr<IPropertyBag> prop_bag;
201 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid());
202 if (FAILED(hr)) {
203 moniker.Release();
204 continue;
205 }
206
207 // Find the description or friendly name.
208 ScopedVariant name;
209 hr = prop_bag->Read(L"Description", name.Receive(), 0);
210 if (FAILED(hr))
211 hr = prop_bag->Read(L"FriendlyName", name.Receive(), 0);
212
213 if (SUCCEEDED(hr) && name.type() == VT_BSTR) {
214 // Ignore all VFW drivers and the special Google Camera Adapter.
215 // Google Camera Adapter is not a real DirectShow camera device.
216 // VFW is very old Video for Windows drivers that can not be used.
217 const wchar_t* str_ptr = V_BSTR(&name);
218 const int name_length = arraysize(kGoogleCameraAdapter) - 1;
219
220 if ((wcsstr(str_ptr, L"(VFW)") == NULL) &&
221 wcslen(str_ptr) < name_length ||
tommi (sloooow) - chröme 2011/06/27 13:20:37 nit: use lstrlenW instead of wcslen since this is
Per K 2011/06/28 10:14:07 Done.
222 (!(LowerCaseEqualsASCII(str_ptr, str_ptr + name_length,
223 kGoogleCameraAdapter)))) {
224 device.device_name = base::SysWideToUTF8(str_ptr);
225 name.Reset();
226 hr = prop_bag->Read(L"DevicePath", name.Receive(), 0);
227 if (FAILED(hr)) {
228 device.unique_id = device.device_name;
229 } else if (name.type() == VT_BSTR) {
230 device.unique_id = base::SysWideToUTF8(V_BSTR(&name));
231 }
232
233 device_names->push_back(device);
234 }
235 }
236 moniker.Release();
237 }
238 }
239
240 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) {
241 VideoCaptureDeviceWin* self = new VideoCaptureDeviceWin(device_name);
242 if (!self || !self->Init()) {
243 delete self;
244 return NULL;
245 }
246 return self;
247 }
248
249 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name)
250 : device_name_(device_name),
251 state_(kIdle) {
252 }
253
254 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() {
255 if (media_control_)
256 media_control_->Stop();
257
258 if (graph_builder_) {
259 if (sink_filter_) {
260 graph_builder_->RemoveFilter(sink_filter_);
261 sink_filter_ = NULL;
262 }
263
264 if (capture_filter_)
265 graph_builder_->RemoveFilter(capture_filter_);
266
267 if (mjpg_filter_)
268 graph_builder_->RemoveFilter(mjpg_filter_);
269 }
270 }
271
272 bool VideoCaptureDeviceWin::Init() {
273 HRESULT hr = GetDeviceFilter(device_name_, capture_filter_.Receive());
274 if (!capture_filter_) {
275 DVLOG(2) << "Failed to create capture filter.";
276 return false;
277 }
278
279 hr = GetPin(capture_filter_, PINDIR_OUTPUT, PIN_CATEGORY_CAPTURE,
280 output_capture_pin_.Receive());
281 if (!output_capture_pin_) {
282 DVLOG(2) << "Failed to get capture output pin";
283 return false;
284 }
285
286 // Create the sink filter used for receiving Captured frames.
287 sink_filter_ = new SinkFilter(this);
288 if (sink_filter_ == NULL) {
289 DVLOG(2) << "Failed to create send filter";
290 return false;
291 }
292
293 input_sink_pin_ = sink_filter_->GetPin(0);
294
295 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL,
296 CLSCTX_INPROC_SERVER);
297 if (FAILED(hr)) {
298 DVLOG(2) << "Failed to create graph builder.";
299 return false;
300 }
301
302 hr = graph_builder_.QueryInterface(media_control_.Receive());
303 if (FAILED(hr)) {
304 DVLOG(2) << "Failed to create media control builder.";
305 return false;
306 }
307
308 hr = graph_builder_->AddFilter(capture_filter_, NULL);
309 if (FAILED(hr)) {
310 DVLOG(2) << "Failed to add the capture device to the graph.";
311 return false;
312 }
313
314 hr = graph_builder_->AddFilter(sink_filter_, NULL);
315 if (FAILED(hr)) {
316 DVLOG(2)<< "Failed to add the send filter to the graph.";
317 return false;
318 }
319
320 return CreateCapabilityMap();
321 }
322
323 void VideoCaptureDeviceWin::Allocate(
324 int width,
325 int height,
326 int frame_rate,
327 VideoCaptureDevice::EventHandler* observer) {
328 if (state_ != kIdle)
329 return;
330
331 observer_ = observer;
332 // Get the camera capability that best match the requested resolution.
333 const int capability_index = GetBestMatchedCapability(width, height,
334 frame_rate);
335 Capability capability = capabilities_[capability_index];
336
337 // Reduce the frame rate if the requested frame rate is lower
338 // than the capability.
339 if (capability.frame_rate > frame_rate)
340 capability.frame_rate = frame_rate;
341
342 AM_MEDIA_TYPE* pmt = NULL;
343 VIDEO_STREAM_CONFIG_CAPS caps;
344
345 ScopedComPtr<IAMStreamConfig> stream_config;
346 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
347 if (FAILED(hr)) {
348 SetErrorState("Can't get the Capture format settings");
349 return;
350 }
351
352 // Get the windows capability from the capture device.
353 hr = stream_config->GetStreamCaps(capability_index, &pmt,
354 reinterpret_cast<BYTE*>(&caps));
355 if (SUCCEEDED(hr)) {
356 if (pmt->formattype == FORMAT_VideoInfo) {
357 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat);
358 if (capability.frame_rate > 0)
359 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate;
360 }
361 // Set the sink filter to request this capability.
362 sink_filter_->SetRequestedMediaCapability(capability);
363 // Order the capture device to use this capability.
364 hr = stream_config->SetFormat(pmt);
365 }
366
367 if (FAILED(hr))
368 SetErrorState("Failed to set capture device output format");
369
370 if (capability.color == VideoCaptureDevice::kMJPEG && !mjpg_filter_.get()) {
371 // Create MJPG filter if we need it.
372 hr = mjpg_filter_.CreateInstance(CLSID_MjpegDec, NULL, CLSCTX_INPROC);
373
374 if (SUCCEEDED(hr)) {
375 GetPin(mjpg_filter_, PINDIR_INPUT, GUID_NULL, input_mjpg_pin_.Receive());
376 GetPin(mjpg_filter_, PINDIR_OUTPUT, GUID_NULL,
377 output_mjpg_pin_.Receive());
378 hr = graph_builder_->AddFilter(mjpg_filter_, NULL);
379 }
380
381 if (FAILED(hr)) {
382 mjpg_filter_.Release();
383 input_mjpg_pin_.Release();
384 output_mjpg_pin_.Release();
385 }
386 }
387
388 if (capability.color == VideoCaptureDevice::kMJPEG && mjpg_filter_.get()) {
389 // Connect the camera to the MJPEG decoder.
390 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_mjpg_pin_,
391 NULL);
392 // Connect the MJPEG filter to the Capture filter.
393 hr += graph_builder_->ConnectDirect(output_mjpg_pin_, input_sink_pin_,
394 NULL);
395 } else {
396 hr = graph_builder_->ConnectDirect(output_capture_pin_, input_sink_pin_,
397 NULL);
398 }
399
400 if (FAILED(hr)) {
401 SetErrorState("Failed to connect the Capture graph.");
402 return;
403 }
404
405 hr = media_control_->Pause();
406 if (FAILED(hr)) {
407 SetErrorState("Failed to Pause the Capture device. "
408 "Is it already occupied?");
409 return;
410 }
411
412 // Get the capability back from the sink filter after the filter have been
413 // connected.
414 const Capability& used_capability = sink_filter_->ResultingCapability();
415 observer_->OnFrameInfo(used_capability);
416
417 state_ = kAllocated;
418 }
419
420 void VideoCaptureDeviceWin::Start() {
421 if (state_ != kAllocated)
422 return;
423
424 HRESULT hr = media_control_->Run();
425 if (FAILED(hr)) {
426 SetErrorState("Failed to start the Capture device.");
427 return;
428 }
429
430 state_ = kCapturing;
431 }
432
433 void VideoCaptureDeviceWin::Stop() {
434 if (state_ != kCapturing)
435 return;
436
437 HRESULT hr = media_control_->Stop();
438 if (FAILED(hr)) {
439 SetErrorState("Failed to stop the capture graph.");
440 return;
441 }
442
443 state_ = kAllocated;
444 }
445
446 void VideoCaptureDeviceWin::DeAllocate() {
447 if (state_ == kIdle)
448 return;
449
450 HRESULT hr = media_control_->Stop();
451 graph_builder_->Disconnect(output_capture_pin_);
452 graph_builder_->Disconnect(input_sink_pin_);
453
454 // If the _mjpg filter exist disconnect it even if it has not been used.
455 if (mjpg_filter_) {
456 graph_builder_->Disconnect(input_mjpg_pin_);
457 graph_builder_->Disconnect(output_mjpg_pin_);
458 }
459
460 if (FAILED(hr)) {
461 SetErrorState("Failed to Stop the Capture device");
462 return;
463 }
464
465 state_ = kIdle;
466 }
467
468 const VideoCaptureDevice::Name& VideoCaptureDeviceWin::device_name() {
469 return device_name_;
470 }
471
472 // Implements SinkFilterObserver::SinkFilterObserver.
473 void VideoCaptureDeviceWin::FrameReceived(const uint8* buffer,
474 int length) {
475 observer_->OnIncomingCapturedFrame(buffer, length, base::Time::Now());
476 }
477
478 bool VideoCaptureDeviceWin::CreateCapabilityMap() {
479 ScopedComPtr<IAMStreamConfig> stream_config;
480 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive());
481 if (FAILED(hr)) {
482 DVLOG(2) << "Failed to get IAMStreamConfig interface from "
483 "capture device";
484 return false;
485 }
486
487 // Get interface used for getting the frame rate.
488 ScopedComPtr<IAMVideoControl> video_control;
489 hr = capture_filter_.QueryInterface(video_control.Receive());
490 DVLOG_IF(2, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED";
491
492 AM_MEDIA_TYPE* media_type = NULL;
493 VIDEO_STREAM_CONFIG_CAPS caps;
494 int count, size;
495
496 hr = stream_config->GetNumberOfCapabilities(&count, &size);
497 if (FAILED(hr)) {
498 DVLOG(2) << "Failed to GetNumberOfCapabilities";
499 return false;
500 }
501
502 for (int i = 0; i < count; ++i) {
503 hr = stream_config->GetStreamCaps(i, &media_type,
504 reinterpret_cast<BYTE*>(&caps));
505 if (FAILED(hr)) {
506 DVLOG(2) << "Failed to GetStreamCaps";
507 return false;
508 }
509
510 if (media_type->majortype == MEDIATYPE_Video &&
511 media_type->formattype == FORMAT_VideoInfo) {
512 Capability capability;
513 REFERENCE_TIME time_per_frame = 0;
514
515 VIDEOINFOHEADER* h =
516 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat);
517 capability.width = h->bmiHeader.biWidth;
518 capability.height = h->bmiHeader.biHeight;
519 time_per_frame = h->AvgTimePerFrame;
520
521 // Try to get the max frame rate from IAMVideoControl.
522 if (video_control.get()) {
523 LONGLONG* max_fps_ptr;
524 LONG list_size;
525 SIZE size;
526 size.cx = capability.width;
527 size.cy = capability.height;
528
529 // GetFrameRateList doesn't return max frame rate always
530 // eg: Logitech Notebook. This may be due to a bug in that API
531 // because GetFrameRateList array is reversed in the above camera. So
532 // a util method written. Can't assume the first value will return
533 // the max fps.
534 hr = video_control->GetFrameRateList(output_capture_pin_, i, size,
535 &list_size, &max_fps_ptr);
536
537 if (SUCCEEDED(hr) && list_size > 0) {
538 int min_time = *std::min_element(max_fps_ptr,
539 max_fps_ptr + list_size);
540 capability.frame_rate = (min_time > 0) ?
541 kSecondsToReferenceTime / min_time : 0;
542 } else {
543 // Get frame rate from VIDEOINFOHEADER.
544 capability.frame_rate = (time_per_frame > 0) ?
545 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
546 }
547 } else {
548 // Get frame rate from VIDEOINFOHEADER since IAMVideoControl is
549 // not supported.
550 capability.frame_rate = (time_per_frame > 0) ?
551 static_cast<int>(kSecondsToReferenceTime / time_per_frame) : 0;
552 }
553
554 // We can't switch MEDIATYPE :~(.
555 if (media_type->subtype == kMediaSubTypeI420) {
556 capability.color = VideoCaptureDevice::kI420;
557 } else if (media_type->subtype == MEDIASUBTYPE_IYUV) {
558 // This is identical to kI420.
559 capability.color = VideoCaptureDevice::kI420;
560 } else if (media_type->subtype == MEDIASUBTYPE_RGB24) {
561 capability.color = VideoCaptureDevice::kRGB24;
562 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) {
563 capability.color = VideoCaptureDevice::kYUY2;
564 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) {
565 capability.color = VideoCaptureDevice::kMJPEG;
566 } else {
567 WCHAR guid_str[128];
568 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str));
569 DVLOG(2) << "Device support unknown media type " << guid_str;
570 continue;
571 }
572 capabilities_[i] = capability;
573 }
574 DeleteMediaType(media_type);
575 media_type = NULL;
576 }
577
578 return capabilities_.size() > 0;
579 }
580
581 // Loops through the list of capabilities and returns an index of the best
582 // matching capability.
583 // The algorithm prioritize height, width, frame rate and color format in that
584 // order.
585 int VideoCaptureDeviceWin::GetBestMatchedCapability(int requested_width,
586 int requested_height,
587 int requested_frame_rate) {
588 std::list<ResolutionDiff> diff_list;
589
590 // Loop through the candidates to create a list of differentials between the
591 // requested resolution and the camera capability.
592 for (CapabilityMap::iterator iterator = capabilities_.begin();
593 iterator != capabilities_.end();
594 ++iterator) {
595 Capability capability = iterator->second;
596
597 ResolutionDiff diff;
598 diff.capability_index = iterator->first;
599 diff.diff_width = capability.width - requested_width;
600 diff.diff_height = capability.height - requested_height;
601 diff.diff_frame_rate = capability.frame_rate - requested_frame_rate;
602 diff.color = capability.color;
603 diff_list.push_back(diff);
604 }
605
606 // Sort the best height candidates.
607 diff_list.sort(&CompareHeight);
608 int best_diff = diff_list.front().diff_height;
609 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
610 it != diff_list.end(); ++it) {
611 if (it->diff_height != best_diff) {
612 // Remove all candidates but the best.
613 diff_list.erase(it, diff_list.end());
614 break;
615 }
616 }
617
618 // Sort the best width candidates.
619 diff_list.sort(&CompareWidth);
620 best_diff = diff_list.front().diff_width;
621 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
622 it != diff_list.end(); ++it) {
623 if (it->diff_width != best_diff) {
624 // Remove all candidates but the best.
625 diff_list.erase(it, diff_list.end());
626 break;
627 }
628 }
629
630 // Sort the best frame rate candidates.
631 diff_list.sort(&CompareFrameRate);
632 best_diff = diff_list.front().diff_frame_rate;
633 for (std::list<ResolutionDiff>::iterator it = diff_list.begin();
634 it != diff_list.end(); ++it) {
635 if (it->diff_frame_rate != best_diff) {
636 diff_list.erase(it, diff_list.end());
637 break;
638 }
639 }
640
641 // Decide the best color format.
642 diff_list.sort(&CompareColor);
643 return diff_list.front().capability_index;
644 }
645
646 void VideoCaptureDeviceWin::SetErrorState(const char* reason) {
647 DLOG(ERROR) << reason;
648 state_ = kError;
649 observer_->OnError();
650 }
651
652 } // namespace media
OLDNEW
« media/video/capture/win/pin_base_win.cc ('K') | « media/video/capture/win/video_capture_device_win.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698