| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/video/capture/win/video_capture_device_win.h" | 5 #include "media/video/capture/win/video_capture_device_win.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <list> | 8 #include <list> |
| 9 | 9 |
| 10 #include "base/string_util.h" | 10 #include "base/string_util.h" |
| 11 #include "base/sys_string_conversions.h" | 11 #include "base/sys_string_conversions.h" |
| 12 #include "base/win/scoped_variant.h" | 12 #include "base/win/scoped_variant.h" |
| 13 #include "base/win/windows_version.h" |
| 14 #include "media/video/capture/win/video_capture_device_mf_win.h" |
| 13 | 15 |
| 14 using base::win::ScopedComPtr; | 16 using base::win::ScopedComPtr; |
| 15 using base::win::ScopedVariant; | 17 using base::win::ScopedVariant; |
| 16 | 18 |
| 17 namespace { | 19 namespace { |
| 18 | 20 |
| 19 // Finds and creates a DirectShow Video Capture filter matching the device_name. | 21 // Finds and creates a DirectShow Video Capture filter matching the device_name. |
| 20 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name, | 22 HRESULT GetDeviceFilter(const media::VideoCaptureDevice::Name& device_name, |
| 21 IBaseFilter** filter) { | 23 IBaseFilter** filter) { |
| 22 DCHECK(filter); | 24 DCHECK(filter); |
| (...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 135 | 137 |
| 136 // Delete a media type structure that was allocated on the heap. | 138 // Delete a media type structure that was allocated on the heap. |
| 137 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | 139 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx |
| 138 void DeleteMediaType(AM_MEDIA_TYPE* mt) { | 140 void DeleteMediaType(AM_MEDIA_TYPE* mt) { |
| 139 if (mt != NULL) { | 141 if (mt != NULL) { |
| 140 FreeMediaType(mt); | 142 FreeMediaType(mt); |
| 141 CoTaskMemFree(mt); | 143 CoTaskMemFree(mt); |
| 142 } | 144 } |
| 143 } | 145 } |
| 144 | 146 |
| 145 // Help structure used for comparing video capture capabilities. | |
| 146 struct ResolutionDiff { | |
| 147 int capability_index; | |
| 148 int diff_height; | |
| 149 int diff_width; | |
| 150 int diff_frame_rate; | |
| 151 media::VideoCaptureCapability::Format color; | |
| 152 }; | |
| 153 | |
| 154 bool CompareHeight(const ResolutionDiff& item1, const ResolutionDiff& item2) { | |
| 155 return abs(item1.diff_height) < abs(item2.diff_height); | |
| 156 } | |
| 157 | |
| 158 bool CompareWidth(const ResolutionDiff& item1, const ResolutionDiff& item2) { | |
| 159 return abs(item1.diff_width) < abs(item2.diff_width); | |
| 160 } | |
| 161 | |
| 162 bool CompareFrameRate(const ResolutionDiff& item1, | |
| 163 const ResolutionDiff& item2) { | |
| 164 return abs(item1.diff_frame_rate) < abs(item2.diff_frame_rate); | |
| 165 } | |
| 166 | |
| 167 bool CompareColor(const ResolutionDiff& item1, const ResolutionDiff& item2) { | |
| 168 return (item1.color < item2.color); | |
| 169 } | |
| 170 | |
| 171 } // namespace | 147 } // namespace |
| 172 | 148 |
| 173 namespace media { | 149 namespace media { |
| 174 | 150 |
| 175 // Name of a fake DirectShow filter that exist on computers with | 151 // static |
| 176 // GTalk installed. | 152 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { |
| 177 static const char kGoogleCameraAdapter[] = "google camera adapter"; | 153 if (base::win::GetVersion() >= base::win::VERSION_VISTA) { |
| 154 VideoCaptureDeviceMFWin::GetDeviceNames(device_names); |
| 155 } else { |
| 156 VideoCaptureDeviceWin::GetDeviceNames(device_names); |
| 157 } |
| 158 } |
| 178 | 159 |
| 179 // Gets the names of all video capture devices connected to this computer. | 160 // static |
| 180 void VideoCaptureDevice::GetDeviceNames(Names* device_names) { | 161 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) { |
| 162 VideoCaptureDevice* ret = NULL; |
| 163 if (base::win::GetVersion() >= base::win::VERSION_VISTA) { |
| 164 scoped_ptr<VideoCaptureDeviceMFWin> device( |
| 165 new VideoCaptureDeviceMFWin(device_name)); |
| 166 if (device->Init()) |
| 167 ret = device.release(); |
| 168 } else { |
| 169 scoped_ptr<VideoCaptureDeviceWin> device( |
| 170 new VideoCaptureDeviceWin(device_name)); |
| 171 if (device->Init()) |
| 172 ret = device.release(); |
| 173 } |
| 174 |
| 175 return ret; |
| 176 } |
| 177 |
| 178 // static |
| 179 void VideoCaptureDeviceWin::GetDeviceNames(Names* device_names) { |
| 181 DCHECK(device_names); | 180 DCHECK(device_names); |
| 182 | 181 |
| 183 ScopedComPtr<ICreateDevEnum> dev_enum; | 182 ScopedComPtr<ICreateDevEnum> dev_enum; |
| 184 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, | 183 HRESULT hr = dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, |
| 185 CLSCTX_INPROC); | 184 CLSCTX_INPROC); |
| 186 if (FAILED(hr)) | 185 if (FAILED(hr)) |
| 187 return; | 186 return; |
| 188 | 187 |
| 189 ScopedComPtr<IEnumMoniker> enum_moniker; | 188 ScopedComPtr<IEnumMoniker> enum_moniker; |
| 190 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | 189 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, |
| 191 enum_moniker.Receive(), 0); | 190 enum_moniker.Receive(), 0); |
| 192 // CreateClassEnumerator returns S_FALSE on some Windows OS | 191 // CreateClassEnumerator returns S_FALSE on some Windows OS |
| 193 // when no camera exist. Therefore the FAILED macro can't be used. | 192 // when no camera exist. Therefore the FAILED macro can't be used. |
| 194 if (hr != S_OK) | 193 if (hr != S_OK) |
| 195 return; | 194 return; |
| 196 | 195 |
| 197 device_names->clear(); | 196 device_names->clear(); |
| 198 | 197 |
| 198 // Name of a fake DirectShow filter that exist on computers with |
| 199 // GTalk installed. |
| 200 static const char kGoogleCameraAdapter[] = "google camera adapter"; |
| 201 |
| 199 // Enumerate all video capture devices. | 202 // Enumerate all video capture devices. |
| 200 ScopedComPtr<IMoniker> moniker; | 203 ScopedComPtr<IMoniker> moniker; |
| 201 int index = 0; | 204 int index = 0; |
| 202 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) { | 205 while (enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK) { |
| 203 Name device; | 206 Name device; |
| 204 ScopedComPtr<IPropertyBag> prop_bag; | 207 ScopedComPtr<IPropertyBag> prop_bag; |
| 205 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); | 208 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); |
| 206 if (FAILED(hr)) { | 209 if (FAILED(hr)) { |
| 207 moniker.Release(); | 210 moniker.Release(); |
| 208 continue; | 211 continue; |
| (...skipping 25 matching lines...) Expand all Loading... |
| 234 device.unique_id = base::SysWideToUTF8(V_BSTR(&name)); | 237 device.unique_id = base::SysWideToUTF8(V_BSTR(&name)); |
| 235 } | 238 } |
| 236 | 239 |
| 237 device_names->push_back(device); | 240 device_names->push_back(device); |
| 238 } | 241 } |
| 239 } | 242 } |
| 240 moniker.Release(); | 243 moniker.Release(); |
| 241 } | 244 } |
| 242 } | 245 } |
| 243 | 246 |
| 244 VideoCaptureDevice* VideoCaptureDevice::Create(const Name& device_name) { | |
| 245 VideoCaptureDeviceWin* self = new VideoCaptureDeviceWin(device_name); | |
| 246 if (self && self->Init()) | |
| 247 return self; | |
| 248 | |
| 249 delete self; | |
| 250 return NULL; | |
| 251 } | |
| 252 | |
| 253 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) | 247 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) |
| 254 : device_name_(device_name), | 248 : device_name_(device_name), |
| 255 state_(kIdle), | 249 state_(kIdle), |
| 256 observer_(NULL) { | 250 observer_(NULL) { |
| 257 DetachFromThread(); | 251 DetachFromThread(); |
| 258 } | 252 } |
| 259 | 253 |
| 260 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | 254 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { |
| 261 DCHECK(CalledOnValidThread()); | 255 DCHECK(CalledOnValidThread()); |
| 262 if (media_control_) | 256 if (media_control_) |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 331 void VideoCaptureDeviceWin::Allocate( | 325 void VideoCaptureDeviceWin::Allocate( |
| 332 int width, | 326 int width, |
| 333 int height, | 327 int height, |
| 334 int frame_rate, | 328 int frame_rate, |
| 335 VideoCaptureDevice::EventHandler* observer) { | 329 VideoCaptureDevice::EventHandler* observer) { |
| 336 DCHECK(CalledOnValidThread()); | 330 DCHECK(CalledOnValidThread()); |
| 337 if (state_ != kIdle) | 331 if (state_ != kIdle) |
| 338 return; | 332 return; |
| 339 | 333 |
| 340 observer_ = observer; | 334 observer_ = observer; |
| 335 |
| 341 // Get the camera capability that best match the requested resolution. | 336 // Get the camera capability that best match the requested resolution. |
| 342 const int capability_index = GetBestMatchedCapability(width, height, | 337 const VideoCaptureCapabilityWin& found_capability = |
| 343 frame_rate); | 338 capabilities_.GetBestMatchedCapability(width, height, frame_rate); |
| 344 VideoCaptureCapability capability = capabilities_[capability_index]; | 339 VideoCaptureCapability capability = found_capability; |
| 345 | 340 |
| 346 // Reduce the frame rate if the requested frame rate is lower | 341 // Reduce the frame rate if the requested frame rate is lower |
| 347 // than the capability. | 342 // than the capability. |
| 348 if (capability.frame_rate > frame_rate) | 343 if (capability.frame_rate > frame_rate) |
| 349 capability.frame_rate = frame_rate; | 344 capability.frame_rate = frame_rate; |
| 350 | 345 |
| 351 AM_MEDIA_TYPE* pmt = NULL; | 346 AM_MEDIA_TYPE* pmt = NULL; |
| 352 VIDEO_STREAM_CONFIG_CAPS caps; | 347 VIDEO_STREAM_CONFIG_CAPS caps; |
| 353 | 348 |
| 354 ScopedComPtr<IAMStreamConfig> stream_config; | 349 ScopedComPtr<IAMStreamConfig> stream_config; |
| 355 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | 350 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); |
| 356 if (FAILED(hr)) { | 351 if (FAILED(hr)) { |
| 357 SetErrorState("Can't get the Capture format settings"); | 352 SetErrorState("Can't get the Capture format settings"); |
| 358 return; | 353 return; |
| 359 } | 354 } |
| 360 | 355 |
| 361 // Get the windows capability from the capture device. | 356 // Get the windows capability from the capture device. |
| 362 hr = stream_config->GetStreamCaps(capability_index, &pmt, | 357 hr = stream_config->GetStreamCaps(found_capability.stream_index, &pmt, |
| 363 reinterpret_cast<BYTE*>(&caps)); | 358 reinterpret_cast<BYTE*>(&caps)); |
| 364 if (SUCCEEDED(hr)) { | 359 if (SUCCEEDED(hr)) { |
| 365 if (pmt->formattype == FORMAT_VideoInfo) { | 360 if (pmt->formattype == FORMAT_VideoInfo) { |
| 366 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); | 361 VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>(pmt->pbFormat); |
| 367 if (capability.frame_rate > 0) | 362 if (capability.frame_rate > 0) |
| 368 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate; | 363 h->AvgTimePerFrame = kSecondsToReferenceTime / capability.frame_rate; |
| 369 } | 364 } |
| 370 // Set the sink filter to request this capability. | 365 // Set the sink filter to request this capability. |
| 371 sink_filter_->SetRequestedMediaCapability(capability); | 366 sink_filter_->SetRequestedMediaCapability(capability); |
| 372 // Order the capture device to use this capability. | 367 // Order the capture device to use this capability. |
| (...skipping 146 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 519 for (int i = 0; i < count; ++i) { | 514 for (int i = 0; i < count; ++i) { |
| 520 hr = stream_config->GetStreamCaps(i, &media_type, | 515 hr = stream_config->GetStreamCaps(i, &media_type, |
| 521 reinterpret_cast<BYTE*>(&caps)); | 516 reinterpret_cast<BYTE*>(&caps)); |
| 522 if (FAILED(hr)) { | 517 if (FAILED(hr)) { |
| 523 DVLOG(2) << "Failed to GetStreamCaps"; | 518 DVLOG(2) << "Failed to GetStreamCaps"; |
| 524 return false; | 519 return false; |
| 525 } | 520 } |
| 526 | 521 |
| 527 if (media_type->majortype == MEDIATYPE_Video && | 522 if (media_type->majortype == MEDIATYPE_Video && |
| 528 media_type->formattype == FORMAT_VideoInfo) { | 523 media_type->formattype == FORMAT_VideoInfo) { |
| 529 VideoCaptureCapability capability; | 524 VideoCaptureCapabilityWin capability(i); |
| 530 REFERENCE_TIME time_per_frame = 0; | 525 REFERENCE_TIME time_per_frame = 0; |
| 531 | 526 |
| 532 VIDEOINFOHEADER* h = | 527 VIDEOINFOHEADER* h = |
| 533 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | 528 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); |
| 534 capability.width = h->bmiHeader.biWidth; | 529 capability.width = h->bmiHeader.biWidth; |
| 535 capability.height = h->bmiHeader.biHeight; | 530 capability.height = h->bmiHeader.biHeight; |
| 536 time_per_frame = h->AvgTimePerFrame; | 531 time_per_frame = h->AvgTimePerFrame; |
| 537 | 532 |
| 538 // Try to get the max frame rate from IAMVideoControl. | 533 // Try to get the max frame rate from IAMVideoControl. |
| 539 if (video_control.get()) { | 534 if (video_control.get()) { |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 579 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) { | 574 } else if (media_type->subtype == MEDIASUBTYPE_YUY2) { |
| 580 capability.color = VideoCaptureCapability::kYUY2; | 575 capability.color = VideoCaptureCapability::kYUY2; |
| 581 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) { | 576 } else if (media_type->subtype == MEDIASUBTYPE_MJPG) { |
| 582 capability.color = VideoCaptureCapability::kMJPEG; | 577 capability.color = VideoCaptureCapability::kMJPEG; |
| 583 } else { | 578 } else { |
| 584 WCHAR guid_str[128]; | 579 WCHAR guid_str[128]; |
| 585 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str)); | 580 StringFromGUID2(media_type->subtype, guid_str, arraysize(guid_str)); |
| 586 DVLOG(2) << "Device support unknown media type " << guid_str; | 581 DVLOG(2) << "Device support unknown media type " << guid_str; |
| 587 continue; | 582 continue; |
| 588 } | 583 } |
| 589 capabilities_[i] = capability; | 584 capabilities_.Add(capability); |
| 590 } | 585 } |
| 591 DeleteMediaType(media_type); | 586 DeleteMediaType(media_type); |
| 592 media_type = NULL; | 587 media_type = NULL; |
| 593 } | 588 } |
| 594 | 589 |
| 595 return capabilities_.size() > 0; | 590 return !capabilities_.empty(); |
| 596 } | |
| 597 | |
| 598 // Loops through the list of capabilities and returns an index of the best | |
| 599 // matching capability. | |
| 600 // The algorithm prioritize height, width, frame rate and color format in that | |
| 601 // order. | |
| 602 int VideoCaptureDeviceWin::GetBestMatchedCapability(int requested_width, | |
| 603 int requested_height, | |
| 604 int requested_frame_rate) { | |
| 605 DCHECK(CalledOnValidThread()); | |
| 606 std::list<ResolutionDiff> diff_list; | |
| 607 | |
| 608 // Loop through the candidates to create a list of differentials between the | |
| 609 // requested resolution and the camera capability. | |
| 610 for (CapabilityMap::iterator iterator = capabilities_.begin(); | |
| 611 iterator != capabilities_.end(); | |
| 612 ++iterator) { | |
| 613 VideoCaptureCapability capability = iterator->second; | |
| 614 | |
| 615 ResolutionDiff diff; | |
| 616 diff.capability_index = iterator->first; | |
| 617 diff.diff_width = capability.width - requested_width; | |
| 618 diff.diff_height = capability.height - requested_height; | |
| 619 diff.diff_frame_rate = capability.frame_rate - requested_frame_rate; | |
| 620 diff.color = capability.color; | |
| 621 diff_list.push_back(diff); | |
| 622 } | |
| 623 | |
| 624 // Sort the best height candidates. | |
| 625 diff_list.sort(&CompareHeight); | |
| 626 int best_diff = diff_list.front().diff_height; | |
| 627 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
| 628 it != diff_list.end(); ++it) { | |
| 629 if (it->diff_height != best_diff) { | |
| 630 // Remove all candidates but the best. | |
| 631 diff_list.erase(it, diff_list.end()); | |
| 632 break; | |
| 633 } | |
| 634 } | |
| 635 | |
| 636 // Sort the best width candidates. | |
| 637 diff_list.sort(&CompareWidth); | |
| 638 best_diff = diff_list.front().diff_width; | |
| 639 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
| 640 it != diff_list.end(); ++it) { | |
| 641 if (it->diff_width != best_diff) { | |
| 642 // Remove all candidates but the best. | |
| 643 diff_list.erase(it, diff_list.end()); | |
| 644 break; | |
| 645 } | |
| 646 } | |
| 647 | |
| 648 // Sort the best frame rate candidates. | |
| 649 diff_list.sort(&CompareFrameRate); | |
| 650 best_diff = diff_list.front().diff_frame_rate; | |
| 651 for (std::list<ResolutionDiff>::iterator it = diff_list.begin(); | |
| 652 it != diff_list.end(); ++it) { | |
| 653 if (it->diff_frame_rate != best_diff) { | |
| 654 diff_list.erase(it, diff_list.end()); | |
| 655 break; | |
| 656 } | |
| 657 } | |
| 658 | |
| 659 // Decide the best color format. | |
| 660 diff_list.sort(&CompareColor); | |
| 661 return diff_list.front().capability_index; | |
| 662 } | 591 } |
| 663 | 592 |
| 664 void VideoCaptureDeviceWin::SetErrorState(const char* reason) { | 593 void VideoCaptureDeviceWin::SetErrorState(const char* reason) { |
| 665 DCHECK(CalledOnValidThread()); | 594 DCHECK(CalledOnValidThread()); |
| 666 DVLOG(1) << reason; | 595 DVLOG(1) << reason; |
| 667 state_ = kError; | 596 state_ = kError; |
| 668 observer_->OnError(); | 597 observer_->OnError(); |
| 669 } | 598 } |
| 670 | 599 |
| 671 } // namespace media | 600 } // namespace media |
| OLD | NEW |