| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/capture/video/win/video_capture_device_win.h" | 5 #include "media/capture/video/win/video_capture_device_win.h" |
| 6 | 6 |
| 7 #include <ks.h> | 7 #include <ks.h> |
| 8 #include <ksmedia.h> | 8 #include <ksmedia.h> |
| 9 #include <objbase.h> | 9 #include <objbase.h> |
| 10 #include <vidcap.h> | |
| 11 | 10 |
| 12 #include <algorithm> | 11 #include <algorithm> |
| 13 #include <list> | 12 #include <list> |
| 14 #include <utility> | 13 #include <utility> |
| 15 | 14 |
| 16 #include "base/macros.h" | 15 #include "base/macros.h" |
| 17 #include "base/strings/sys_string_conversions.h" | 16 #include "base/strings/sys_string_conversions.h" |
| 18 #include "base/win/scoped_co_mem.h" | 17 #include "base/win/scoped_co_mem.h" |
| 19 #include "base/win/scoped_variant.h" | 18 #include "base/win/scoped_variant.h" |
| 20 #include "media/base/timestamp_constants.h" | 19 #include "media/base/timestamp_constants.h" |
| (...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 259 void VideoCaptureDeviceWin::ScopedMediaType::DeleteMediaType( | 258 void VideoCaptureDeviceWin::ScopedMediaType::DeleteMediaType( |
| 260 AM_MEDIA_TYPE* mt) { | 259 AM_MEDIA_TYPE* mt) { |
| 261 if (mt != NULL) { | 260 if (mt != NULL) { |
| 262 FreeMediaType(mt); | 261 FreeMediaType(mt); |
| 263 CoTaskMemFree(mt); | 262 CoTaskMemFree(mt); |
| 264 } | 263 } |
| 265 } | 264 } |
| 266 | 265 |
| 267 VideoCaptureDeviceWin::VideoCaptureDeviceWin( | 266 VideoCaptureDeviceWin::VideoCaptureDeviceWin( |
| 268 const VideoCaptureDeviceDescriptor& device_descriptor) | 267 const VideoCaptureDeviceDescriptor& device_descriptor) |
| 269 : device_descriptor_(device_descriptor), state_(kIdle) { | 268 : device_descriptor_(device_descriptor), |
| 269 state_(kIdle), |
| 270 white_balance_mode_manual_(false), |
| 271 exposure_mode_manual_(false) { |
| 270 // TODO(mcasas): Check that CoInitializeEx() has been called with the | 272 // TODO(mcasas): Check that CoInitializeEx() has been called with the |
| 271 // appropriate Apartment model, i.e., Single Threaded. | 273 // appropriate Apartment model, i.e., Single Threaded. |
| 272 } | 274 } |
| 273 | 275 |
| 274 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { | 276 VideoCaptureDeviceWin::~VideoCaptureDeviceWin() { |
| 275 DCHECK(thread_checker_.CalledOnValidThread()); | 277 DCHECK(thread_checker_.CalledOnValidThread()); |
| 276 if (media_control_.Get()) | 278 if (media_control_.Get()) |
| 277 media_control_->Stop(); | 279 media_control_->Stop(); |
| 278 | 280 |
| 279 if (graph_builder_.Get()) { | 281 if (graph_builder_.Get()) { |
| (...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 424 // Set the sink filter to request this format. | 426 // Set the sink filter to request this format. |
| 425 sink_filter_->SetRequestedMediaFormat( | 427 sink_filter_->SetRequestedMediaFormat( |
| 426 found_capability.supported_format.pixel_format, frame_rate, | 428 found_capability.supported_format.pixel_format, frame_rate, |
| 427 found_capability.info_header); | 429 found_capability.info_header); |
| 428 // Order the capture device to use this format. | 430 // Order the capture device to use this format. |
| 429 hr = stream_config->SetFormat(media_type.get()); | 431 hr = stream_config->SetFormat(media_type.get()); |
| 430 if (FAILED(hr)) { | 432 if (FAILED(hr)) { |
| 431 SetErrorState(FROM_HERE, "Failed to set capture device output format", hr); | 433 SetErrorState(FROM_HERE, "Failed to set capture device output format", hr); |
| 432 return; | 434 return; |
| 433 } | 435 } |
| 436 capture_format_ = found_capability.supported_format; |
| 434 | 437 |
| 435 SetAntiFlickerInCaptureFilter(params); | 438 SetAntiFlickerInCaptureFilter(params); |
| 436 | 439 |
| 437 if (media_type->subtype == kMediaSubTypeHDYC) { | 440 if (media_type->subtype == kMediaSubTypeHDYC) { |
| 438 // HDYC pixel format, used by the DeckLink capture card, needs an AVI | 441 // HDYC pixel format, used by the DeckLink capture card, needs an AVI |
| 439 // decompressor filter after source, let |graph_builder_| add it. | 442 // decompressor filter after source, let |graph_builder_| add it. |
| 440 hr = graph_builder_->Connect(output_capture_pin_.Get(), | 443 hr = graph_builder_->Connect(output_capture_pin_.Get(), |
| 441 input_sink_pin_.Get()); | 444 input_sink_pin_.Get()); |
| 442 } else { | 445 } else { |
| 443 hr = graph_builder_->ConnectDirect(output_capture_pin_.Get(), | 446 hr = graph_builder_->ConnectDirect(output_capture_pin_.Get(), |
| (...skipping 13 matching lines...) Expand all Loading... |
| 457 | 460 |
| 458 // Start capturing. | 461 // Start capturing. |
| 459 hr = media_control_->Run(); | 462 hr = media_control_->Run(); |
| 460 if (FAILED(hr)) { | 463 if (FAILED(hr)) { |
| 461 SetErrorState(FROM_HERE, "Failed to start the Capture device.", hr); | 464 SetErrorState(FROM_HERE, "Failed to start the Capture device.", hr); |
| 462 return; | 465 return; |
| 463 } | 466 } |
| 464 | 467 |
| 465 client_->OnStarted(); | 468 client_->OnStarted(); |
| 466 state_ = kCapturing; | 469 state_ = kCapturing; |
| 467 } | |
| 468 | |
| 469 void VideoCaptureDeviceWin::StopAndDeAllocate() { | |
| 470 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 471 if (state_ != kCapturing) | |
| 472 return; | |
| 473 | |
| 474 HRESULT hr = media_control_->Stop(); | |
| 475 if (FAILED(hr)) { | |
| 476 SetErrorState(FROM_HERE, "Failed to stop the capture graph.", hr); | |
| 477 return; | |
| 478 } | |
| 479 | |
| 480 graph_builder_->Disconnect(output_capture_pin_.Get()); | |
| 481 graph_builder_->Disconnect(input_sink_pin_.Get()); | |
| 482 | |
| 483 client_.reset(); | |
| 484 state_ = kIdle; | |
| 485 } | |
| 486 | |
| 487 void VideoCaptureDeviceWin::TakePhoto(TakePhotoCallback callback) { | |
| 488 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 489 // DirectShow has other means of capturing still pictures, e.g. connecting a | |
| 490 // SampleGrabber filter to a PIN_CATEGORY_STILL of |capture_filter_|. This | |
| 491 // way, however, is not widespread and proves too cumbersome, so we just grab | |
| 492 // the next captured frame instead. | |
| 493 take_photo_callbacks_.push(std::move(callback)); | |
| 494 } | |
| 495 | |
| 496 void VideoCaptureDeviceWin::GetPhotoCapabilities( | |
| 497 GetPhotoCapabilitiesCallback callback) { | |
| 498 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 499 | 470 |
| 500 base::win::ScopedComPtr<IKsTopologyInfo> info; | 471 base::win::ScopedComPtr<IKsTopologyInfo> info; |
| 501 HRESULT hr = capture_filter_.CopyTo(info.GetAddressOf()); | 472 hr = capture_filter_.CopyTo(info.GetAddressOf()); |
| 502 if (FAILED(hr)) { | 473 if (FAILED(hr)) { |
| 503 SetErrorState(FROM_HERE, "Failed to obtain the topology info.", hr); | 474 SetErrorState(FROM_HERE, "Failed to obtain the topology info.", hr); |
| 504 return; | 475 return; |
| 505 } | 476 } |
| 506 | 477 |
| 507 DWORD num_nodes = 0; | 478 DWORD num_nodes = 0; |
| 508 hr = info->get_NumNodes(&num_nodes); | 479 hr = info->get_NumNodes(&num_nodes); |
| 509 if (FAILED(hr)) { | 480 if (FAILED(hr)) { |
| 510 SetErrorState(FROM_HERE, "Failed to obtain the number of nodes.", hr); | 481 SetErrorState(FROM_HERE, "Failed to obtain the number of nodes.", hr); |
| 511 return; | 482 return; |
| 512 } | 483 } |
| 513 | 484 |
| 514 // Every UVC camera is expected to have a single ICameraControl and a single | 485 // Every UVC camera is expected to have a single ICameraControl and a single |
| 515 // IVideoProcAmp nodes, and both are needed; ignore any unlikely later ones. | 486 // IVideoProcAmp nodes, and both are needed; ignore any unlikely later ones. |
| 516 GUID node_type; | 487 GUID node_type; |
| 517 base::win::ScopedComPtr<ICameraControl> camera_control; | |
| 518 for (size_t i = 0; i < num_nodes; i++) { | 488 for (size_t i = 0; i < num_nodes; i++) { |
| 519 info->get_NodeType(i, &node_type); | 489 info->get_NodeType(i, &node_type); |
| 520 if (IsEqualGUID(node_type, KSNODETYPE_VIDEO_CAMERA_TERMINAL)) { | 490 if (IsEqualGUID(node_type, KSNODETYPE_VIDEO_CAMERA_TERMINAL)) { |
| 521 hr = info->CreateNodeInstance(i, IID_PPV_ARGS(&camera_control)); | 491 hr = info->CreateNodeInstance(i, IID_PPV_ARGS(&camera_control_)); |
| 522 if (SUCCEEDED(hr)) | 492 if (SUCCEEDED(hr)) |
| 523 break; | 493 break; |
| 524 SetErrorState(FROM_HERE, "Failed to retrieve the ICameraControl.", hr); | 494 SetErrorState(FROM_HERE, "Failed to retrieve the ICameraControl.", hr); |
| 525 return; | 495 return; |
| 526 } | 496 } |
| 527 } | 497 } |
| 528 if (!camera_control) | |
| 529 return; | |
| 530 base::win::ScopedComPtr<IVideoProcAmp> video_control; | |
| 531 for (size_t i = 0; i < num_nodes; i++) { | 498 for (size_t i = 0; i < num_nodes; i++) { |
| 532 info->get_NodeType(i, &node_type); | 499 info->get_NodeType(i, &node_type); |
| 533 if (IsEqualGUID(node_type, KSNODETYPE_VIDEO_PROCESSING)) { | 500 if (IsEqualGUID(node_type, KSNODETYPE_VIDEO_PROCESSING)) { |
| 534 hr = info->CreateNodeInstance(i, IID_PPV_ARGS(&video_control)); | 501 hr = info->CreateNodeInstance(i, IID_PPV_ARGS(&video_control_)); |
| 535 if (SUCCEEDED(hr)) | 502 if (SUCCEEDED(hr)) |
| 536 break; | 503 break; |
| 537 SetErrorState(FROM_HERE, "Failed to retrieve the IVideoProcAmp.", hr); | 504 SetErrorState(FROM_HERE, "Failed to retrieve the IVideoProcAmp.", hr); |
| 538 return; | 505 return; |
| 539 } | 506 } |
| 540 } | 507 } |
| 541 if (!video_control) | 508 } |
| 509 |
| 510 void VideoCaptureDeviceWin::StopAndDeAllocate() { |
| 511 DCHECK(thread_checker_.CalledOnValidThread()); |
| 512 if (state_ != kCapturing) |
| 513 return; |
| 514 |
| 515 HRESULT hr = media_control_->Stop(); |
| 516 if (FAILED(hr)) { |
| 517 SetErrorState(FROM_HERE, "Failed to stop the capture graph.", hr); |
| 518 return; |
| 519 } |
| 520 |
| 521 graph_builder_->Disconnect(output_capture_pin_.Get()); |
| 522 graph_builder_->Disconnect(input_sink_pin_.Get()); |
| 523 |
| 524 client_.reset(); |
| 525 state_ = kIdle; |
| 526 } |
| 527 |
| 528 void VideoCaptureDeviceWin::TakePhoto(TakePhotoCallback callback) { |
| 529 DCHECK(thread_checker_.CalledOnValidThread()); |
| 530 // DirectShow has other means of capturing still pictures, e.g. connecting a |
| 531 // SampleGrabber filter to a PIN_CATEGORY_STILL of |capture_filter_|. This |
| 532 // way, however, is not widespread and proves too cumbersome, so we just grab |
| 533 // the next captured frame instead. |
| 534 take_photo_callbacks_.push(std::move(callback)); |
| 535 } |
| 536 |
| 537 void VideoCaptureDeviceWin::GetPhotoCapabilities( |
| 538 GetPhotoCapabilitiesCallback callback) { |
| 539 DCHECK(thread_checker_.CalledOnValidThread()); |
| 540 |
| 541 if (!camera_control_ || !video_control_) |
| 542 return; | 542 return; |
| 543 | 543 |
| 544 auto photo_capabilities = mojom::PhotoCapabilities::New(); | 544 auto photo_capabilities = mojom::PhotoCapabilities::New(); |
| 545 | 545 |
| 546 photo_capabilities->exposure_compensation = RetrieveControlRangeAndCurrent( | 546 photo_capabilities->exposure_compensation = RetrieveControlRangeAndCurrent( |
| 547 [camera_control](auto... args) { | 547 [this](auto... args) { |
| 548 return camera_control->getRange_Exposure(args...); | 548 return this->camera_control_->getRange_Exposure(args...); |
| 549 }, | 549 }, |
| 550 [camera_control](auto... args) { | 550 [this](auto... args) { |
| 551 return camera_control->get_Exposure(args...); | 551 return this->camera_control_->get_Exposure(args...); |
| 552 }, | 552 }, |
| 553 &photo_capabilities->supported_exposure_modes, | 553 &photo_capabilities->supported_exposure_modes, |
| 554 &photo_capabilities->current_exposure_mode); | 554 &photo_capabilities->current_exposure_mode); |
| 555 | 555 |
| 556 photo_capabilities->color_temperature = RetrieveControlRangeAndCurrent( | 556 photo_capabilities->color_temperature = RetrieveControlRangeAndCurrent( |
| 557 [video_control](auto... args) { | 557 [this](auto... args) { |
| 558 return video_control->getRange_WhiteBalance(args...); | 558 return this->video_control_->getRange_WhiteBalance(args...); |
| 559 }, | 559 }, |
| 560 [video_control](auto... args) { | 560 [this](auto... args) { |
| 561 return video_control->get_WhiteBalance(args...); | 561 return this->video_control_->get_WhiteBalance(args...); |
| 562 }, | 562 }, |
| 563 &photo_capabilities->supported_white_balance_modes, | 563 &photo_capabilities->supported_white_balance_modes, |
| 564 &photo_capabilities->current_white_balance_mode); | 564 &photo_capabilities->current_white_balance_mode); |
| 565 | 565 |
| 566 // Ignore the returned Focus control range and status. | 566 // Ignore the returned Focus control range and status. |
| 567 RetrieveControlRangeAndCurrent( | 567 RetrieveControlRangeAndCurrent( |
| 568 [camera_control](auto... args) { | 568 [this](auto... args) { |
| 569 return camera_control->getRange_Focus(args...); | 569 return this->camera_control_->getRange_Focus(args...); |
| 570 }, | 570 }, |
| 571 [camera_control](auto... args) { | 571 [this](auto... args) { |
| 572 return camera_control->get_Focus(args...); | 572 return this->camera_control_->get_Focus(args...); |
| 573 }, | 573 }, |
| 574 &photo_capabilities->supported_focus_modes, | 574 &photo_capabilities->supported_focus_modes, |
| 575 &photo_capabilities->current_focus_mode); | 575 &photo_capabilities->current_focus_mode); |
| 576 | 576 |
| 577 photo_capabilities->iso = mojom::Range::New(); | 577 photo_capabilities->iso = mojom::Range::New(); |
| 578 | 578 |
| 579 photo_capabilities->brightness = RetrieveControlRangeAndCurrent( | 579 photo_capabilities->brightness = RetrieveControlRangeAndCurrent( |
| 580 [video_control](auto... args) { | 580 [this](auto... args) { |
| 581 return video_control->getRange_Brightness(args...); | 581 return this->video_control_->getRange_Brightness(args...); |
| 582 }, | 582 }, |
| 583 [video_control](auto... args) { | 583 [this](auto... args) { |
| 584 return video_control->get_Brightness(args...); | 584 return this->video_control_->get_Brightness(args...); |
| 585 }); | 585 }); |
| 586 photo_capabilities->contrast = RetrieveControlRangeAndCurrent( | 586 photo_capabilities->contrast = RetrieveControlRangeAndCurrent( |
| 587 [video_control](auto... args) { | 587 [this](auto... args) { |
| 588 return video_control->getRange_Contrast(args...); | 588 return this->video_control_->getRange_Contrast(args...); |
| 589 }, | 589 }, |
| 590 [video_control](auto... args) { | 590 [this](auto... args) { |
| 591 return video_control->get_Contrast(args...); | 591 return this->video_control_->get_Contrast(args...); |
| 592 }); | 592 }); |
| 593 photo_capabilities->saturation = RetrieveControlRangeAndCurrent( | 593 photo_capabilities->saturation = RetrieveControlRangeAndCurrent( |
| 594 [video_control](auto... args) { | 594 [this](auto... args) { |
| 595 return video_control->getRange_Saturation(args...); | 595 return this->video_control_->getRange_Saturation(args...); |
| 596 }, | 596 }, |
| 597 [video_control](auto... args) { | 597 [this](auto... args) { |
| 598 return video_control->get_Saturation(args...); | 598 return this->video_control_->get_Saturation(args...); |
| 599 }); | 599 }); |
| 600 photo_capabilities->sharpness = RetrieveControlRangeAndCurrent( | 600 photo_capabilities->sharpness = RetrieveControlRangeAndCurrent( |
| 601 [video_control](auto... args) { | 601 [this](auto... args) { |
| 602 return video_control->getRange_Sharpness(args...); | 602 return this->video_control_->getRange_Sharpness(args...); |
| 603 }, | 603 }, |
| 604 [video_control](auto... args) { | 604 [this](auto... args) { |
| 605 return video_control->get_Sharpness(args...); | 605 return this->video_control_->get_Sharpness(args...); |
| 606 }); | 606 }); |
| 607 | 607 |
| 608 photo_capabilities->zoom = RetrieveControlRangeAndCurrent( | 608 photo_capabilities->zoom = RetrieveControlRangeAndCurrent( |
| 609 [camera_control](auto... args) { | 609 [this](auto... args) { |
| 610 return camera_control->getRange_Zoom(args...); | 610 return this->camera_control_->getRange_Zoom(args...); |
| 611 }, | 611 }, |
| 612 [camera_control](auto... args) { | 612 [this](auto... args) { |
| 613 return camera_control->get_Zoom(args...); | 613 return this->camera_control_->get_Zoom(args...); |
| 614 }); | 614 }); |
| 615 | 615 |
| 616 photo_capabilities->red_eye_reduction = mojom::RedEyeReduction::NEVER; | 616 photo_capabilities->red_eye_reduction = mojom::RedEyeReduction::NEVER; |
| 617 photo_capabilities->height = mojom::Range::New(); | 617 photo_capabilities->height = mojom::Range::New( |
| 618 photo_capabilities->width = mojom::Range::New(); | 618 capture_format_.frame_size.height(), capture_format_.frame_size.height(), |
| 619 capture_format_.frame_size.height(), 0 /* step */); |
| 620 photo_capabilities->width = mojom::Range::New( |
| 621 capture_format_.frame_size.width(), capture_format_.frame_size.width(), |
| 622 capture_format_.frame_size.width(), 0 /* step */); |
| 619 photo_capabilities->torch = false; | 623 photo_capabilities->torch = false; |
| 620 | 624 |
| 621 callback.Run(std::move(photo_capabilities)); | 625 callback.Run(std::move(photo_capabilities)); |
| 622 } | 626 } |
| 623 | 627 |
| 628 void VideoCaptureDeviceWin::SetPhotoOptions( |
| 629 mojom::PhotoSettingsPtr settings, |
| 630 VideoCaptureDevice::SetPhotoOptionsCallback callback) { |
| 631 DCHECK(thread_checker_.CalledOnValidThread()); |
| 632 |
| 633 if (!camera_control_ || !video_control_) |
| 634 return; |
| 635 |
| 636 if (settings->has_zoom) { |
| 637 HRESULT hr = |
| 638 camera_control_->put_Zoom(settings->zoom, CameraControl_Flags_Manual); |
| 639 DLOG_IF_FAILED_WITH_HRESULT("Zoom config failed", hr); |
| 640 } |
| 641 |
| 642 if (settings->has_white_balance_mode) { |
| 643 if (settings->white_balance_mode == mojom::MeteringMode::CONTINUOUS) { |
| 644 HRESULT hr = |
| 645 video_control_->put_WhiteBalance(0L, VideoProcAmp_Flags_Auto); |
| 646 DLOG_IF_FAILED_WITH_HRESULT("Auto white balance config failed", hr); |
| 647 |
| 648 white_balance_mode_manual_ = false; |
| 649 } else { |
| 650 white_balance_mode_manual_ = true; |
| 651 } |
| 652 } |
| 653 if (white_balance_mode_manual_ && settings->has_color_temperature) { |
| 654 HRESULT hr = video_control_->put_WhiteBalance(settings->color_temperature, |
| 655 CameraControl_Flags_Manual); |
| 656 DLOG_IF_FAILED_WITH_HRESULT("Color temperature config failed", hr); |
| 657 } |
| 658 |
| 659 if (settings->has_exposure_mode) { |
| 660 if (settings->exposure_mode == mojom::MeteringMode::CONTINUOUS) { |
| 661 HRESULT hr = camera_control_->put_Exposure(0L, VideoProcAmp_Flags_Auto); |
| 662 DLOG_IF_FAILED_WITH_HRESULT("Auto exposure config failed", hr); |
| 663 |
| 664 exposure_mode_manual_ = false; |
| 665 } else { |
| 666 exposure_mode_manual_ = true; |
| 667 } |
| 668 } |
| 669 if (exposure_mode_manual_ && settings->has_exposure_compensation) { |
| 670 HRESULT hr = camera_control_->put_Exposure(settings->exposure_compensation, |
| 671 CameraControl_Flags_Manual); |
| 672 DLOG_IF_FAILED_WITH_HRESULT("Exposure Compensation config failed", hr); |
| 673 } |
| 674 |
| 675 if (settings->has_brightness) { |
| 676 HRESULT hr = video_control_->put_Brightness(settings->brightness, |
| 677 CameraControl_Flags_Manual); |
| 678 DLOG_IF_FAILED_WITH_HRESULT("Brightness config failed", hr); |
| 679 } |
| 680 if (settings->has_contrast) { |
| 681 HRESULT hr = video_control_->put_Contrast(settings->contrast, |
| 682 CameraControl_Flags_Manual); |
| 683 DLOG_IF_FAILED_WITH_HRESULT("Contrast config failed", hr); |
| 684 } |
| 685 if (settings->has_saturation) { |
| 686 HRESULT hr = video_control_->put_Saturation(settings->saturation, |
| 687 CameraControl_Flags_Manual); |
| 688 DLOG_IF_FAILED_WITH_HRESULT("Saturation config failed", hr); |
| 689 } |
| 690 if (settings->has_sharpness) { |
| 691 HRESULT hr = video_control_->put_Sharpness(settings->sharpness, |
| 692 CameraControl_Flags_Manual); |
| 693 DLOG_IF_FAILED_WITH_HRESULT("Sharpness config failed", hr); |
| 694 } |
| 695 |
| 696 callback.Run(true); |
| 697 } |
| 624 // Implements SinkFilterObserver::SinkFilterObserver. | 698 // Implements SinkFilterObserver::SinkFilterObserver. |
| 625 void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer, | 699 void VideoCaptureDeviceWin::FrameReceived(const uint8_t* buffer, |
| 626 int length, | 700 int length, |
| 627 const VideoCaptureFormat& format, | 701 const VideoCaptureFormat& format, |
| 628 base::TimeDelta timestamp) { | 702 base::TimeDelta timestamp) { |
| 629 if (first_ref_time_.is_null()) | 703 if (first_ref_time_.is_null()) |
| 630 first_ref_time_ = base::TimeTicks::Now(); | 704 first_ref_time_ = base::TimeTicks::Now(); |
| 631 | 705 |
| 632 // There is a chance that the platform does not provide us with the timestamp, | 706 // There is a chance that the platform does not provide us with the timestamp, |
| 633 // in which case, we use reference time to calculate a timestamp. | 707 // in which case, we use reference time to calculate a timestamp. |
| (...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 758 void VideoCaptureDeviceWin::SetErrorState( | 832 void VideoCaptureDeviceWin::SetErrorState( |
| 759 const tracked_objects::Location& from_here, | 833 const tracked_objects::Location& from_here, |
| 760 const std::string& reason, | 834 const std::string& reason, |
| 761 HRESULT hr) { | 835 HRESULT hr) { |
| 762 DCHECK(thread_checker_.CalledOnValidThread()); | 836 DCHECK(thread_checker_.CalledOnValidThread()); |
| 763 DLOG_IF_FAILED_WITH_HRESULT(reason, hr); | 837 DLOG_IF_FAILED_WITH_HRESULT(reason, hr); |
| 764 state_ = kError; | 838 state_ = kError; |
| 765 client_->OnError(from_here, reason); | 839 client_->OnError(from_here, reason); |
| 766 } | 840 } |
| 767 } // namespace media | 841 } // namespace media |
| OLD | NEW |