| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/capture/video/win/video_capture_device_win.h" | |
| 6 | |
| 7 #include <ks.h> | |
| 8 #include <ksmedia.h> | |
| 9 | |
| 10 #include <algorithm> | |
| 11 #include <list> | |
| 12 #include <utility> | |
| 13 | |
| 14 #include "base/macros.h" | |
| 15 #include "base/strings/sys_string_conversions.h" | |
| 16 #include "base/win/scoped_co_mem.h" | |
| 17 #include "base/win/scoped_variant.h" | |
| 18 #include "media/base/timestamp_constants.h" | |
| 19 | |
| 20 using base::win::ScopedCoMem; | |
| 21 using base::win::ScopedComPtr; | |
| 22 using base::win::ScopedVariant; | |
| 23 | |
| 24 namespace media { | |
| 25 | |
| 26 // Check if a Pin matches a category. | |
| 27 bool PinMatchesCategory(IPin* pin, REFGUID category) { | |
| 28 DCHECK(pin); | |
| 29 bool found = false; | |
| 30 ScopedComPtr<IKsPropertySet> ks_property; | |
| 31 HRESULT hr = ks_property.QueryFrom(pin); | |
| 32 if (SUCCEEDED(hr)) { | |
| 33 GUID pin_category; | |
| 34 DWORD return_value; | |
| 35 hr = ks_property->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, | |
| 36 &pin_category, sizeof(pin_category), &return_value); | |
| 37 if (SUCCEEDED(hr) && (return_value == sizeof(pin_category))) { | |
| 38 found = (pin_category == category); | |
| 39 } | |
| 40 } | |
| 41 return found; | |
| 42 } | |
| 43 | |
| 44 // Check if a Pin's MediaType matches a given |major_type|. | |
| 45 bool PinMatchesMajorType(IPin* pin, REFGUID major_type) { | |
| 46 DCHECK(pin); | |
| 47 AM_MEDIA_TYPE connection_media_type; | |
| 48 const HRESULT hr = pin->ConnectionMediaType(&connection_media_type); | |
| 49 return SUCCEEDED(hr) && connection_media_type.majortype == major_type; | |
| 50 } | |
| 51 | |
| 52 // Finds and creates a DirectShow Video Capture filter matching the |device_id|. | |
| 53 // static | |
| 54 HRESULT VideoCaptureDeviceWin::GetDeviceFilter(const std::string& device_id, | |
| 55 IBaseFilter** filter) { | |
| 56 DCHECK(filter); | |
| 57 | |
| 58 ScopedComPtr<ICreateDevEnum> dev_enum; | |
| 59 HRESULT hr = | |
| 60 dev_enum.CreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC); | |
| 61 if (FAILED(hr)) | |
| 62 return hr; | |
| 63 | |
| 64 ScopedComPtr<IEnumMoniker> enum_moniker; | |
| 65 hr = dev_enum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory, | |
| 66 enum_moniker.Receive(), 0); | |
| 67 // CreateClassEnumerator returns S_FALSE on some Windows OS | |
| 68 // when no camera exist. Therefore the FAILED macro can't be used. | |
| 69 if (hr != S_OK) | |
| 70 return hr; | |
| 71 | |
| 72 ScopedComPtr<IBaseFilter> capture_filter; | |
| 73 for (ScopedComPtr<IMoniker> moniker; | |
| 74 enum_moniker->Next(1, moniker.Receive(), NULL) == S_OK; | |
| 75 moniker.Release()) { | |
| 76 ScopedComPtr<IPropertyBag> prop_bag; | |
| 77 hr = moniker->BindToStorage(0, 0, IID_IPropertyBag, prop_bag.ReceiveVoid()); | |
| 78 if (FAILED(hr)) | |
| 79 continue; | |
| 80 | |
| 81 // Find |device_id| via DevicePath, Description or FriendlyName, whichever | |
| 82 // is available first and is a VT_BSTR (i.e. String) type. | |
| 83 static const wchar_t* kPropertyNames[] = { | |
| 84 L"DevicePath", L"Description", L"FriendlyName"}; | |
| 85 | |
| 86 ScopedVariant name; | |
| 87 for (const auto* property_name : kPropertyNames) { | |
| 88 prop_bag->Read(property_name, name.Receive(), 0); | |
| 89 if (name.type() == VT_BSTR) | |
| 90 break; | |
| 91 } | |
| 92 | |
| 93 if (name.type() == VT_BSTR) { | |
| 94 const std::string device_path(base::SysWideToUTF8(V_BSTR(name.ptr()))); | |
| 95 if (device_path.compare(device_id) == 0) { | |
| 96 // We have found the requested device | |
| 97 hr = moniker->BindToObject(0, 0, IID_IBaseFilter, | |
| 98 capture_filter.ReceiveVoid()); | |
| 99 DLOG_IF(ERROR, FAILED(hr)) << "Failed to bind camera filter: " | |
| 100 << logging::SystemErrorCodeToString(hr); | |
| 101 break; | |
| 102 } | |
| 103 } | |
| 104 } | |
| 105 | |
| 106 *filter = capture_filter.Detach(); | |
| 107 if (!*filter && SUCCEEDED(hr)) | |
| 108 hr = HRESULT_FROM_WIN32(ERROR_NOT_FOUND); | |
| 109 | |
| 110 return hr; | |
| 111 } | |
| 112 | |
| 113 // Finds an IPin on an IBaseFilter given the direction, Category and/or Major | |
| 114 // Type. If either |category| or |major_type| are GUID_NULL, they are ignored. | |
| 115 // static | |
| 116 ScopedComPtr<IPin> VideoCaptureDeviceWin::GetPin(IBaseFilter* filter, | |
| 117 PIN_DIRECTION pin_dir, | |
| 118 REFGUID category, | |
| 119 REFGUID major_type) { | |
| 120 ScopedComPtr<IPin> pin; | |
| 121 ScopedComPtr<IEnumPins> pin_enum; | |
| 122 HRESULT hr = filter->EnumPins(pin_enum.Receive()); | |
| 123 if (pin_enum.get() == NULL) | |
| 124 return pin; | |
| 125 | |
| 126 // Get first unconnected pin. | |
| 127 hr = pin_enum->Reset(); // set to first pin | |
| 128 while ((hr = pin_enum->Next(1, pin.Receive(), NULL)) == S_OK) { | |
| 129 PIN_DIRECTION this_pin_dir = static_cast<PIN_DIRECTION>(-1); | |
| 130 hr = pin->QueryDirection(&this_pin_dir); | |
| 131 if (pin_dir == this_pin_dir) { | |
| 132 if ((category == GUID_NULL || PinMatchesCategory(pin.get(), category)) && | |
| 133 (major_type == GUID_NULL || | |
| 134 PinMatchesMajorType(pin.get(), major_type))) { | |
| 135 return pin; | |
| 136 } | |
| 137 } | |
| 138 pin.Release(); | |
| 139 } | |
| 140 | |
| 141 DCHECK(!pin.get()); | |
| 142 return pin; | |
| 143 } | |
| 144 | |
| 145 // static | |
| 146 VideoPixelFormat | |
| 147 VideoCaptureDeviceWin::TranslateMediaSubtypeToPixelFormat( | |
| 148 const GUID& sub_type) { | |
| 149 static struct { | |
| 150 const GUID& sub_type; | |
| 151 VideoPixelFormat format; | |
| 152 } const kMediaSubtypeToPixelFormatCorrespondence[] = { | |
| 153 {kMediaSubTypeI420, PIXEL_FORMAT_I420}, | |
| 154 {MEDIASUBTYPE_IYUV, PIXEL_FORMAT_I420}, | |
| 155 {MEDIASUBTYPE_RGB24, PIXEL_FORMAT_RGB24}, | |
| 156 {MEDIASUBTYPE_YUY2, PIXEL_FORMAT_YUY2}, | |
| 157 {MEDIASUBTYPE_MJPG, PIXEL_FORMAT_MJPEG}, | |
| 158 {MEDIASUBTYPE_UYVY, PIXEL_FORMAT_UYVY}, | |
| 159 {MEDIASUBTYPE_ARGB32, PIXEL_FORMAT_ARGB}, | |
| 160 {kMediaSubTypeHDYC, PIXEL_FORMAT_UYVY}, | |
| 161 }; | |
| 162 for (const auto& pixel_format : kMediaSubtypeToPixelFormatCorrespondence) { | |
| 163 if (sub_type == pixel_format.sub_type) | |
| 164 return pixel_format.format; | |
| 165 } | |
| 166 #ifndef NDEBUG | |
| 167 WCHAR guid_str[128]; | |
| 168 StringFromGUID2(sub_type, guid_str, arraysize(guid_str)); | |
| 169 DVLOG(2) << "Device (also) supports an unknown media type " << guid_str; | |
| 170 #endif | |
| 171 return PIXEL_FORMAT_UNKNOWN; | |
| 172 } | |
| 173 | |
| 174 void VideoCaptureDeviceWin::ScopedMediaType::Free() { | |
| 175 if (!media_type_) | |
| 176 return; | |
| 177 | |
| 178 DeleteMediaType(media_type_); | |
| 179 media_type_ = NULL; | |
| 180 } | |
| 181 | |
| 182 AM_MEDIA_TYPE** VideoCaptureDeviceWin::ScopedMediaType::Receive() { | |
| 183 DCHECK(!media_type_); | |
| 184 return &media_type_; | |
| 185 } | |
| 186 | |
| 187 // Release the format block for a media type. | |
| 188 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
| 189 void VideoCaptureDeviceWin::ScopedMediaType::FreeMediaType(AM_MEDIA_TYPE* mt) { | |
| 190 if (mt->cbFormat != 0) { | |
| 191 CoTaskMemFree(mt->pbFormat); | |
| 192 mt->cbFormat = 0; | |
| 193 mt->pbFormat = NULL; | |
| 194 } | |
| 195 if (mt->pUnk != NULL) { | |
| 196 NOTREACHED(); | |
| 197 // pUnk should not be used. | |
| 198 mt->pUnk->Release(); | |
| 199 mt->pUnk = NULL; | |
| 200 } | |
| 201 } | |
| 202 | |
| 203 // Delete a media type structure that was allocated on the heap. | |
| 204 // http://msdn.microsoft.com/en-us/library/dd375432(VS.85).aspx | |
| 205 void VideoCaptureDeviceWin::ScopedMediaType::DeleteMediaType( | |
| 206 AM_MEDIA_TYPE* mt) { | |
| 207 if (mt != NULL) { | |
| 208 FreeMediaType(mt); | |
| 209 CoTaskMemFree(mt); | |
| 210 } | |
| 211 } | |
| 212 | |
| 213 VideoCaptureDeviceWin::VideoCaptureDeviceWin(const Name& device_name) | |
| 214 : device_name_(device_name), state_(kIdle) { | |
| 215 // TODO(mcasas): Check that CoInitializeEx() has been called with the | |
| 216 // appropriate Apartment model, i.e., Single Threaded. | |
| 217 } | |
| 218 | |
| 219 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | |
| 220 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 221 if (media_control_.get()) | |
| 222 media_control_->Stop(); | |
| 223 | |
| 224 if (graph_builder_.get()) { | |
| 225 if (sink_filter_.get()) { | |
| 226 graph_builder_->RemoveFilter(sink_filter_.get()); | |
| 227 sink_filter_ = NULL; | |
| 228 } | |
| 229 | |
| 230 if (capture_filter_.get()) | |
| 231 graph_builder_->RemoveFilter(capture_filter_.get()); | |
| 232 } | |
| 233 | |
| 234 if (capture_graph_builder_.get()) | |
| 235 capture_graph_builder_.Release(); | |
| 236 } | |
| 237 | |
| 238 bool VideoCaptureDeviceWin::Init() { | |
| 239 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 240 HRESULT hr; | |
| 241 | |
| 242 hr = GetDeviceFilter(device_name_.id(), capture_filter_.Receive()); | |
| 243 | |
| 244 if (!capture_filter_.get()) { | |
| 245 DLOG(ERROR) << "Failed to create capture filter: " | |
| 246 << logging::SystemErrorCodeToString(hr); | |
| 247 return false; | |
| 248 } | |
| 249 | |
| 250 output_capture_pin_ = GetPin(capture_filter_.get(), PINDIR_OUTPUT, | |
| 251 PIN_CATEGORY_CAPTURE, GUID_NULL); | |
| 252 if (!output_capture_pin_.get()) { | |
| 253 DLOG(ERROR) << "Failed to get capture output pin"; | |
| 254 return false; | |
| 255 } | |
| 256 | |
| 257 // Create the sink filter used for receiving Captured frames. | |
| 258 sink_filter_ = new SinkFilter(this); | |
| 259 if (sink_filter_.get() == NULL) { | |
| 260 DLOG(ERROR) << "Failed to create sink filter"; | |
| 261 return false; | |
| 262 } | |
| 263 | |
| 264 input_sink_pin_ = sink_filter_->GetPin(0); | |
| 265 | |
| 266 hr = graph_builder_.CreateInstance(CLSID_FilterGraph, NULL, | |
| 267 CLSCTX_INPROC_SERVER); | |
| 268 if (FAILED(hr)) { | |
| 269 DLOG(ERROR) << "Failed to create graph builder: " | |
| 270 << logging::SystemErrorCodeToString(hr); | |
| 271 return false; | |
| 272 } | |
| 273 | |
| 274 hr = capture_graph_builder_.CreateInstance(CLSID_CaptureGraphBuilder2, NULL, | |
| 275 CLSCTX_INPROC); | |
| 276 if (FAILED(hr)) { | |
| 277 DLOG(ERROR) << "Failed to create the Capture Graph Builder: " | |
| 278 << logging::SystemErrorCodeToString(hr); | |
| 279 return false; | |
| 280 } | |
| 281 | |
| 282 hr = capture_graph_builder_->SetFiltergraph(graph_builder_.get()); | |
| 283 if (FAILED(hr)) { | |
| 284 DLOG(ERROR) << "Failed to give graph to capture graph builder: " | |
| 285 << logging::SystemErrorCodeToString(hr); | |
| 286 return false; | |
| 287 } | |
| 288 | |
| 289 hr = graph_builder_.QueryInterface(media_control_.Receive()); | |
| 290 if (FAILED(hr)) { | |
| 291 DLOG(ERROR) << "Failed to create media control builder: " | |
| 292 << logging::SystemErrorCodeToString(hr); | |
| 293 return false; | |
| 294 } | |
| 295 | |
| 296 hr = graph_builder_->AddFilter(capture_filter_.get(), NULL); | |
| 297 if (FAILED(hr)) { | |
| 298 DLOG(ERROR) << "Failed to add the capture device to the graph: " | |
| 299 << logging::SystemErrorCodeToString(hr); | |
| 300 return false; | |
| 301 } | |
| 302 | |
| 303 hr = graph_builder_->AddFilter(sink_filter_.get(), NULL); | |
| 304 if (FAILED(hr)) { | |
| 305 DLOG(ERROR) << "Failed to add the sink filter to the graph: " | |
| 306 << logging::SystemErrorCodeToString(hr); | |
| 307 return false; | |
| 308 } | |
| 309 | |
| 310 // The following code builds the upstream portions of the graph, | |
| 311 // for example if a capture device uses a Windows Driver Model (WDM) | |
| 312 // driver, the graph may require certain filters upstream from the | |
| 313 // WDM Video Capture filter, such as a TV Tuner filter or an Analog | |
| 314 // Video Crossbar filter. We try using the more prevalent | |
| 315 // MEDIATYPE_Interleaved first. | |
| 316 base::win::ScopedComPtr<IAMStreamConfig> stream_config; | |
| 317 | |
| 318 hr = capture_graph_builder_->FindInterface( | |
| 319 &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Interleaved, capture_filter_.get(), | |
| 320 IID_IAMStreamConfig, (void**)stream_config.Receive()); | |
| 321 if (FAILED(hr)) { | |
| 322 hr = capture_graph_builder_->FindInterface( | |
| 323 &PIN_CATEGORY_CAPTURE, &MEDIATYPE_Video, capture_filter_.get(), | |
| 324 IID_IAMStreamConfig, (void**)stream_config.Receive()); | |
| 325 DLOG_IF(ERROR, FAILED(hr)) << "Failed to find CapFilter:IAMStreamConfig: " | |
| 326 << logging::SystemErrorCodeToString(hr); | |
| 327 } | |
| 328 | |
| 329 return CreateCapabilityMap(); | |
| 330 } | |
| 331 | |
| 332 void VideoCaptureDeviceWin::AllocateAndStart( | |
| 333 const VideoCaptureParams& params, | |
| 334 std::unique_ptr<VideoCaptureDevice::Client> client) { | |
| 335 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 336 if (state_ != kIdle) | |
| 337 return; | |
| 338 | |
| 339 client_ = std::move(client); | |
| 340 | |
| 341 // Get the camera capability that best match the requested format. | |
| 342 const CapabilityWin found_capability = | |
| 343 GetBestMatchedCapability(params.requested_format, capabilities_); | |
| 344 | |
| 345 // Reduce the frame rate if the requested frame rate is lower | |
| 346 // than the capability. | |
| 347 const float frame_rate = | |
| 348 std::min(params.requested_format.frame_rate, | |
| 349 found_capability.supported_format.frame_rate); | |
| 350 | |
| 351 ScopedComPtr<IAMStreamConfig> stream_config; | |
| 352 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | |
| 353 if (FAILED(hr)) { | |
| 354 SetErrorState(FROM_HERE, "Can't get the Capture format settings"); | |
| 355 return; | |
| 356 } | |
| 357 | |
| 358 int count = 0, size = 0; | |
| 359 hr = stream_config->GetNumberOfCapabilities(&count, &size); | |
| 360 if (FAILED(hr)) { | |
| 361 SetErrorState(FROM_HERE, "Failed to GetNumberOfCapabilities"); | |
| 362 return; | |
| 363 } | |
| 364 | |
| 365 std::unique_ptr<BYTE[]> caps(new BYTE[size]); | |
| 366 ScopedMediaType media_type; | |
| 367 | |
| 368 // Get the windows capability from the capture device. | |
| 369 // GetStreamCaps can return S_FALSE which we consider an error. Therefore the | |
| 370 // FAILED macro can't be used. | |
| 371 hr = stream_config->GetStreamCaps(found_capability.stream_index, | |
| 372 media_type.Receive(), caps.get()); | |
| 373 if (hr != S_OK) { | |
| 374 SetErrorState(FROM_HERE, "Failed to get capture device capabilities"); | |
| 375 return; | |
| 376 } | |
| 377 if (media_type->formattype == FORMAT_VideoInfo) { | |
| 378 VIDEOINFOHEADER* h = | |
| 379 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | |
| 380 if (frame_rate > 0) | |
| 381 h->AvgTimePerFrame = kSecondsToReferenceTime / frame_rate; | |
| 382 } | |
| 383 // Set the sink filter to request this format. | |
| 384 sink_filter_->SetRequestedMediaFormat( | |
| 385 found_capability.supported_format.pixel_format, frame_rate, | |
| 386 found_capability.info_header); | |
| 387 // Order the capture device to use this format. | |
| 388 hr = stream_config->SetFormat(media_type.get()); | |
| 389 if (FAILED(hr)) { | |
| 390 // TODO(grunell): Log the error. http://crbug.com/405016. | |
| 391 SetErrorState(FROM_HERE, "Failed to set capture device output format"); | |
| 392 return; | |
| 393 } | |
| 394 | |
| 395 SetAntiFlickerInCaptureFilter(params); | |
| 396 | |
| 397 if (media_type->subtype == kMediaSubTypeHDYC) { | |
| 398 // HDYC pixel format, used by the DeckLink capture card, needs an AVI | |
| 399 // decompressor filter after source, let |graph_builder_| add it. | |
| 400 hr = graph_builder_->Connect(output_capture_pin_.get(), | |
| 401 input_sink_pin_.get()); | |
| 402 } else { | |
| 403 hr = graph_builder_->ConnectDirect(output_capture_pin_.get(), | |
| 404 input_sink_pin_.get(), NULL); | |
| 405 } | |
| 406 | |
| 407 if (FAILED(hr)) { | |
| 408 SetErrorState(FROM_HERE, "Failed to connect the Capture graph."); | |
| 409 return; | |
| 410 } | |
| 411 | |
| 412 hr = media_control_->Pause(); | |
| 413 if (FAILED(hr)) { | |
| 414 SetErrorState( | |
| 415 FROM_HERE, | |
| 416 "Failed to pause the Capture device, is it already occupied?"); | |
| 417 return; | |
| 418 } | |
| 419 | |
| 420 // Get the format back from the sink filter after the filter have been | |
| 421 // connected. | |
| 422 capture_format_ = sink_filter_->ResultingFormat(); | |
| 423 | |
| 424 // Start capturing. | |
| 425 hr = media_control_->Run(); | |
| 426 if (FAILED(hr)) { | |
| 427 SetErrorState(FROM_HERE, "Failed to start the Capture device."); | |
| 428 return; | |
| 429 } | |
| 430 | |
| 431 state_ = kCapturing; | |
| 432 } | |
| 433 | |
| 434 void VideoCaptureDeviceWin::StopAndDeAllocate() { | |
| 435 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 436 if (state_ != kCapturing) | |
| 437 return; | |
| 438 | |
| 439 HRESULT hr = media_control_->Stop(); | |
| 440 if (FAILED(hr)) { | |
| 441 SetErrorState(FROM_HERE, "Failed to stop the capture graph."); | |
| 442 return; | |
| 443 } | |
| 444 | |
| 445 graph_builder_->Disconnect(output_capture_pin_.get()); | |
| 446 graph_builder_->Disconnect(input_sink_pin_.get()); | |
| 447 | |
| 448 client_.reset(); | |
| 449 state_ = kIdle; | |
| 450 } | |
| 451 | |
| 452 // Implements SinkFilterObserver::SinkFilterObserver. | |
| 453 void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer, | |
| 454 int length, | |
| 455 base::TimeDelta timestamp) { | |
| 456 if (first_ref_time_.is_null()) | |
| 457 first_ref_time_ = base::TimeTicks::Now(); | |
| 458 | |
| 459 // There is a chance that the platform does not provide us with the timestamp, | |
| 460 // in which case, we use reference time to calculate a timestamp. | |
| 461 if (timestamp == media::kNoTimestamp()) | |
| 462 timestamp = base::TimeTicks::Now() - first_ref_time_; | |
| 463 | |
| 464 client_->OnIncomingCapturedData(buffer, length, capture_format_, 0, | |
| 465 base::TimeTicks::Now(), timestamp); | |
| 466 } | |
| 467 | |
| 468 bool VideoCaptureDeviceWin::CreateCapabilityMap() { | |
| 469 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 470 ScopedComPtr<IAMStreamConfig> stream_config; | |
| 471 HRESULT hr = output_capture_pin_.QueryInterface(stream_config.Receive()); | |
| 472 if (FAILED(hr)) { | |
| 473 DPLOG(ERROR) << "Failed to get IAMStreamConfig interface from " | |
| 474 "capture device: " << logging::SystemErrorCodeToString(hr); | |
| 475 return false; | |
| 476 } | |
| 477 | |
| 478 // Get interface used for getting the frame rate. | |
| 479 ScopedComPtr<IAMVideoControl> video_control; | |
| 480 hr = capture_filter_.QueryInterface(video_control.Receive()); | |
| 481 DLOG_IF(WARNING, FAILED(hr)) << "IAMVideoControl Interface NOT SUPPORTED: " | |
| 482 << logging::SystemErrorCodeToString(hr); | |
| 483 | |
| 484 int count = 0, size = 0; | |
| 485 hr = stream_config->GetNumberOfCapabilities(&count, &size); | |
| 486 if (FAILED(hr)) { | |
| 487 DLOG(ERROR) << "Failed to GetNumberOfCapabilities: " | |
| 488 << logging::SystemErrorCodeToString(hr); | |
| 489 return false; | |
| 490 } | |
| 491 | |
| 492 std::unique_ptr<BYTE[]> caps(new BYTE[size]); | |
| 493 for (int stream_index = 0; stream_index < count; ++stream_index) { | |
| 494 ScopedMediaType media_type; | |
| 495 hr = stream_config->GetStreamCaps(stream_index, media_type.Receive(), | |
| 496 caps.get()); | |
| 497 // GetStreamCaps() may return S_FALSE, so don't use FAILED() or SUCCEED() | |
| 498 // macros here since they'll trigger incorrectly. | |
| 499 if (hr != S_OK) { | |
| 500 DLOG(ERROR) << "Failed to GetStreamCaps: " | |
| 501 << logging::SystemErrorCodeToString(hr); | |
| 502 return false; | |
| 503 } | |
| 504 | |
| 505 if (media_type->majortype == MEDIATYPE_Video && | |
| 506 media_type->formattype == FORMAT_VideoInfo) { | |
| 507 VideoCaptureFormat format; | |
| 508 format.pixel_format = | |
| 509 TranslateMediaSubtypeToPixelFormat(media_type->subtype); | |
| 510 if (format.pixel_format == PIXEL_FORMAT_UNKNOWN) | |
| 511 continue; | |
| 512 | |
| 513 VIDEOINFOHEADER* h = | |
| 514 reinterpret_cast<VIDEOINFOHEADER*>(media_type->pbFormat); | |
| 515 format.frame_size.SetSize(h->bmiHeader.biWidth, h->bmiHeader.biHeight); | |
| 516 | |
| 517 // Try to get a better |time_per_frame| from IAMVideoControl. If not, use | |
| 518 // the value from VIDEOINFOHEADER. | |
| 519 REFERENCE_TIME time_per_frame = h->AvgTimePerFrame; | |
| 520 if (video_control.get()) { | |
| 521 ScopedCoMem<LONGLONG> max_fps; | |
| 522 LONG list_size = 0; | |
| 523 const SIZE size = {format.frame_size.width(), | |
| 524 format.frame_size.height()}; | |
| 525 hr = video_control->GetFrameRateList(output_capture_pin_.get(), | |
| 526 stream_index, size, &list_size, | |
| 527 &max_fps); | |
| 528 // Can't assume the first value will return the max fps. | |
| 529 // Sometimes |list_size| will be > 0, but max_fps will be NULL. Some | |
| 530 // drivers may return an HRESULT of S_FALSE which SUCCEEDED() translates | |
| 531 // into success, so explicitly check S_OK. See http://crbug.com/306237. | |
| 532 if (hr == S_OK && list_size > 0 && max_fps) { | |
| 533 time_per_frame = | |
| 534 *std::min_element(max_fps.get(), max_fps.get() + list_size); | |
| 535 } | |
| 536 } | |
| 537 | |
| 538 format.frame_rate = | |
| 539 (time_per_frame > 0) | |
| 540 ? (kSecondsToReferenceTime / static_cast<float>(time_per_frame)) | |
| 541 : 0.0; | |
| 542 | |
| 543 capabilities_.emplace_back(stream_index, format, h->bmiHeader); | |
| 544 } | |
| 545 } | |
| 546 | |
| 547 return !capabilities_.empty(); | |
| 548 } | |
| 549 | |
| 550 // Set the power line frequency removal in |capture_filter_| if available. | |
| 551 void VideoCaptureDeviceWin::SetAntiFlickerInCaptureFilter( | |
| 552 const VideoCaptureParams& params) { | |
| 553 const PowerLineFrequency power_line_frequency = GetPowerLineFrequency(params); | |
| 554 if (power_line_frequency != media::PowerLineFrequency::FREQUENCY_50HZ && | |
| 555 power_line_frequency != media::PowerLineFrequency::FREQUENCY_60HZ) { | |
| 556 return; | |
| 557 } | |
| 558 ScopedComPtr<IKsPropertySet> ks_propset; | |
| 559 DWORD type_support = 0; | |
| 560 HRESULT hr; | |
| 561 if (SUCCEEDED(hr = ks_propset.QueryFrom(capture_filter_.get())) && | |
| 562 SUCCEEDED(hr = ks_propset->QuerySupported( | |
| 563 PROPSETID_VIDCAP_VIDEOPROCAMP, | |
| 564 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, | |
| 565 &type_support)) && | |
| 566 (type_support & KSPROPERTY_SUPPORT_SET)) { | |
| 567 KSPROPERTY_VIDEOPROCAMP_S data = {}; | |
| 568 data.Property.Set = PROPSETID_VIDCAP_VIDEOPROCAMP; | |
| 569 data.Property.Id = KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY; | |
| 570 data.Property.Flags = KSPROPERTY_TYPE_SET; | |
| 571 data.Value = | |
| 572 (power_line_frequency == media::PowerLineFrequency::FREQUENCY_50HZ) ? 1 | |
| 573 : 2; | |
| 574 data.Flags = KSPROPERTY_VIDEOPROCAMP_FLAGS_MANUAL; | |
| 575 hr = ks_propset->Set(PROPSETID_VIDCAP_VIDEOPROCAMP, | |
| 576 KSPROPERTY_VIDEOPROCAMP_POWERLINE_FREQUENCY, &data, | |
| 577 sizeof(data), &data, sizeof(data)); | |
| 578 DLOG_IF(ERROR, FAILED(hr)) << "Anti-flicker setting failed: " | |
| 579 << logging::SystemErrorCodeToString(hr); | |
| 580 DVLOG_IF(2, SUCCEEDED(hr)) << "Anti-flicker set correctly."; | |
| 581 } else { | |
| 582 DVLOG(2) << "Anti-flicker setting not supported."; | |
| 583 } | |
| 584 } | |
| 585 | |
| 586 void VideoCaptureDeviceWin::SetErrorState( | |
| 587 const tracked_objects::Location& from_here, | |
| 588 const std::string& reason) { | |
| 589 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 590 state_ = kError; | |
| 591 client_->OnError(from_here, reason); | |
| 592 } | |
| 593 } // namespace media | |
| OLD | NEW |