| OLD | NEW |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h" | 5 #include "media/gpu/media_foundation_video_encode_accelerator_win.h" |
| 6 | 6 |
| 7 #pragma warning(push) | 7 #pragma warning(push) |
| 8 #pragma warning(disable : 4800) // Disable warning for added padding. | 8 #pragma warning(disable : 4800) // Disable warning for added padding. |
| 9 | 9 |
| 10 #include <codecapi.h> | 10 #include <codecapi.h> |
| (...skipping 339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 350 | 350 |
| 351 base::win::ScopedCoMem<CLSID> CLSIDs; | 351 base::win::ScopedCoMem<CLSID> CLSIDs; |
| 352 uint32_t count = 0; | 352 uint32_t count = 0; |
| 353 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info, | 353 HRESULT hr = MFTEnum(MFT_CATEGORY_VIDEO_ENCODER, flags, &input_info, |
| 354 &output_info, NULL, &CLSIDs, &count); | 354 &output_info, NULL, &CLSIDs, &count); |
| 355 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); | 355 RETURN_ON_HR_FAILURE(hr, "Couldn't enumerate hardware encoder", false); |
| 356 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); | 356 RETURN_ON_FAILURE((count > 0), "No HW encoder found", false); |
| 357 DVLOG(3) << "HW encoder(s) found: " << count; | 357 DVLOG(3) << "HW encoder(s) found: " << count; |
| 358 hr = encoder_.CreateInstance(CLSIDs[0]); | 358 hr = encoder_.CreateInstance(CLSIDs[0]); |
| 359 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); | 359 RETURN_ON_HR_FAILURE(hr, "Couldn't activate hardware encoder", false); |
| 360 RETURN_ON_FAILURE((encoder_.get() != nullptr), | 360 RETURN_ON_FAILURE((encoder_.Get() != nullptr), |
| 361 "No HW encoder instance created", false); | 361 "No HW encoder instance created", false); |
| 362 return true; | 362 return true; |
| 363 } | 363 } |
| 364 | 364 |
| 365 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() { | 365 bool MediaFoundationVideoEncodeAccelerator::InitializeInputOutputSamples() { |
| 366 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); | 366 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); |
| 367 | 367 |
| 368 DWORD input_count = 0; | 368 DWORD input_count = 0; |
| 369 DWORD output_count = 0; | 369 DWORD output_count = 0; |
| 370 HRESULT hr = encoder_->GetStreamCount(&input_count, &output_count); | 370 HRESULT hr = encoder_->GetStreamCount(&input_count, &output_count); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 392 | 392 |
| 393 // Initialize output parameters. | 393 // Initialize output parameters. |
| 394 hr = MFCreateMediaType(imf_output_media_type_.Receive()); | 394 hr = MFCreateMediaType(imf_output_media_type_.Receive()); |
| 395 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | 395 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); |
| 396 hr = imf_output_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | 396 hr = imf_output_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 397 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); | 397 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); |
| 398 hr = imf_output_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); | 398 hr = imf_output_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264); |
| 399 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); | 399 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); |
| 400 hr = imf_output_media_type_->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); | 400 hr = imf_output_media_type_->SetUINT32(MF_MT_AVG_BITRATE, target_bitrate_); |
| 401 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); | 401 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); |
| 402 hr = MFSetAttributeRatio(imf_output_media_type_.get(), MF_MT_FRAME_RATE, | 402 hr = MFSetAttributeRatio(imf_output_media_type_.Get(), MF_MT_FRAME_RATE, |
| 403 frame_rate_, 1); | 403 frame_rate_, 1); |
| 404 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); | 404 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); |
| 405 hr = MFSetAttributeSize(imf_output_media_type_.get(), MF_MT_FRAME_SIZE, | 405 hr = MFSetAttributeSize(imf_output_media_type_.Get(), MF_MT_FRAME_SIZE, |
| 406 input_visible_size_.width(), | 406 input_visible_size_.width(), |
| 407 input_visible_size_.height()); | 407 input_visible_size_.height()); |
| 408 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | 408 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 409 hr = imf_output_media_type_->SetUINT32(MF_MT_INTERLACE_MODE, | 409 hr = imf_output_media_type_->SetUINT32(MF_MT_INTERLACE_MODE, |
| 410 MFVideoInterlace_Progressive); | 410 MFVideoInterlace_Progressive); |
| 411 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); | 411 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); |
| 412 hr = imf_output_media_type_->SetUINT32(MF_MT_MPEG2_PROFILE, | 412 hr = imf_output_media_type_->SetUINT32(MF_MT_MPEG2_PROFILE, |
| 413 eAVEncH264VProfile_Base); | 413 eAVEncH264VProfile_Base); |
| 414 RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false); | 414 RETURN_ON_HR_FAILURE(hr, "Couldn't set codec profile", false); |
| 415 hr = encoder_->SetOutputType(output_stream_id_, imf_output_media_type_.get(), | 415 hr = encoder_->SetOutputType(output_stream_id_, imf_output_media_type_.Get(), |
| 416 0); | 416 0); |
| 417 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); | 417 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); |
| 418 | 418 |
| 419 // Initialize input parameters. | 419 // Initialize input parameters. |
| 420 hr = MFCreateMediaType(imf_input_media_type_.Receive()); | 420 hr = MFCreateMediaType(imf_input_media_type_.Receive()); |
| 421 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); | 421 RETURN_ON_HR_FAILURE(hr, "Couldn't create media type", false); |
| 422 hr = imf_input_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); | 422 hr = imf_input_media_type_->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); |
| 423 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); | 423 RETURN_ON_HR_FAILURE(hr, "Couldn't set media type", false); |
| 424 hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); | 424 hr = imf_input_media_type_->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_YV12); |
| 425 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); | 425 RETURN_ON_HR_FAILURE(hr, "Couldn't set video format", false); |
| 426 hr = MFSetAttributeRatio(imf_input_media_type_.get(), MF_MT_FRAME_RATE, | 426 hr = MFSetAttributeRatio(imf_input_media_type_.Get(), MF_MT_FRAME_RATE, |
| 427 frame_rate_, 1); | 427 frame_rate_, 1); |
| 428 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); | 428 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame rate", false); |
| 429 hr = MFSetAttributeSize(imf_input_media_type_.get(), MF_MT_FRAME_SIZE, | 429 hr = MFSetAttributeSize(imf_input_media_type_.Get(), MF_MT_FRAME_SIZE, |
| 430 input_visible_size_.width(), | 430 input_visible_size_.width(), |
| 431 input_visible_size_.height()); | 431 input_visible_size_.height()); |
| 432 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | 432 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 433 hr = imf_input_media_type_->SetUINT32(MF_MT_INTERLACE_MODE, | 433 hr = imf_input_media_type_->SetUINT32(MF_MT_INTERLACE_MODE, |
| 434 MFVideoInterlace_Progressive); | 434 MFVideoInterlace_Progressive); |
| 435 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); | 435 RETURN_ON_HR_FAILURE(hr, "Couldn't set interlace mode", false); |
| 436 hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.get(), 0); | 436 hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.Get(), 0); |
| 437 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); | 437 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); |
| 438 | 438 |
| 439 return SUCCEEDED(hr); | 439 return SUCCEEDED(hr); |
| 440 } | 440 } |
| 441 | 441 |
| 442 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { | 442 bool MediaFoundationVideoEncodeAccelerator::SetEncoderModes() { |
| 443 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); | 443 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); |
| 444 RETURN_ON_FAILURE((encoder_.get() != nullptr), | 444 RETURN_ON_FAILURE((encoder_.Get() != nullptr), |
| 445 "No HW encoder instance created", false); | 445 "No HW encoder instance created", false); |
| 446 | 446 |
| 447 HRESULT hr = encoder_.QueryInterface(codec_api_.Receive()); | 447 HRESULT hr = encoder_.QueryInterface(codec_api_.Receive()); |
| 448 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); | 448 RETURN_ON_HR_FAILURE(hr, "Couldn't get ICodecAPI", false); |
| 449 VARIANT var; | 449 VARIANT var; |
| 450 var.vt = VT_UI4; | 450 var.vt = VT_UI4; |
| 451 var.ulVal = eAVEncCommonRateControlMode_CBR; | 451 var.ulVal = eAVEncCommonRateControlMode_CBR; |
| 452 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); | 452 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); |
| 453 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); | 453 RETURN_ON_HR_FAILURE(hr, "Couldn't set CommonRateControlMode", false); |
| 454 var.ulVal = target_bitrate_; | 454 var.ulVal = target_bitrate_; |
| 455 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); | 455 hr = codec_api_->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); |
| 456 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); | 456 RETURN_ON_HR_FAILURE(hr, "Couldn't set bitrate", false); |
| 457 var.ulVal = eAVEncAdaptiveMode_Resolution; | 457 var.ulVal = eAVEncAdaptiveMode_Resolution; |
| 458 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); | 458 hr = codec_api_->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); |
| 459 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); | 459 RETURN_ON_HR_FAILURE(hr, "Couldn't set FrameRate", false); |
| 460 var.vt = VT_BOOL; | 460 var.vt = VT_BOOL; |
| 461 var.boolVal = VARIANT_TRUE; | 461 var.boolVal = VARIANT_TRUE; |
| 462 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); | 462 hr = codec_api_->SetValue(&CODECAPI_AVLowLatencyMode, &var); |
| 463 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); | 463 RETURN_ON_HR_FAILURE(hr, "Couldn't set LowLatencyMode", false); |
| 464 return SUCCEEDED(hr); | 464 return SUCCEEDED(hr); |
| 465 } | 465 } |
| 466 | 466 |
| 467 bool MediaFoundationVideoEncodeAccelerator::IsResolutionSupported( | 467 bool MediaFoundationVideoEncodeAccelerator::IsResolutionSupported( |
| 468 const gfx::Size& resolution) { | 468 const gfx::Size& resolution) { |
| 469 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); | 469 DCHECK(main_client_task_runner_->BelongsToCurrentThread()); |
| 470 DCHECK(encoder_); | 470 DCHECK(encoder_); |
| 471 | 471 |
| 472 HRESULT hr = | 472 HRESULT hr = |
| 473 MFSetAttributeSize(imf_output_media_type_.get(), MF_MT_FRAME_SIZE, | 473 MFSetAttributeSize(imf_output_media_type_.Get(), MF_MT_FRAME_SIZE, |
| 474 resolution.width(), resolution.height()); | 474 resolution.width(), resolution.height()); |
| 475 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | 475 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 476 hr = encoder_->SetOutputType(output_stream_id_, imf_output_media_type_.get(), | 476 hr = encoder_->SetOutputType(output_stream_id_, imf_output_media_type_.Get(), |
| 477 0); | 477 0); |
| 478 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); | 478 RETURN_ON_HR_FAILURE(hr, "Couldn't set output media type", false); |
| 479 | 479 |
| 480 hr = MFSetAttributeSize(imf_input_media_type_.get(), MF_MT_FRAME_SIZE, | 480 hr = MFSetAttributeSize(imf_input_media_type_.Get(), MF_MT_FRAME_SIZE, |
| 481 resolution.width(), resolution.height()); | 481 resolution.width(), resolution.height()); |
| 482 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); | 482 RETURN_ON_HR_FAILURE(hr, "Couldn't set frame size", false); |
| 483 hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.get(), 0); | 483 hr = encoder_->SetInputType(input_stream_id_, imf_input_media_type_.Get(), 0); |
| 484 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); | 484 RETURN_ON_HR_FAILURE(hr, "Couldn't set input media type", false); |
| 485 | 485 |
| 486 return true; | 486 return true; |
| 487 } | 487 } |
| 488 | 488 |
| 489 void MediaFoundationVideoEncodeAccelerator::NotifyError( | 489 void MediaFoundationVideoEncodeAccelerator::NotifyError( |
| 490 VideoEncodeAccelerator::Error error) { | 490 VideoEncodeAccelerator::Error error) { |
| 491 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 491 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 492 main_client_task_runner_->PostTask( | 492 main_client_task_runner_->PostTask( |
| 493 FROM_HERE, base::Bind(&Client::NotifyError, main_client_, error)); | 493 FROM_HERE, base::Bind(&Client::NotifyError, main_client_, error)); |
| 494 } | 494 } |
| 495 | 495 |
| 496 void MediaFoundationVideoEncodeAccelerator::EncodeTask( | 496 void MediaFoundationVideoEncodeAccelerator::EncodeTask( |
| 497 scoped_refptr<VideoFrame> frame, | 497 scoped_refptr<VideoFrame> frame, |
| 498 bool force_keyframe) { | 498 bool force_keyframe) { |
| 499 DVLOG(3) << __func__; | 499 DVLOG(3) << __func__; |
| 500 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 500 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 501 | 501 |
| 502 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer; | 502 base::win::ScopedComPtr<IMFMediaBuffer> input_buffer; |
| 503 input_sample_->GetBufferByIndex(0, input_buffer.Receive()); | 503 input_sample_->GetBufferByIndex(0, input_buffer.Receive()); |
| 504 | 504 |
| 505 { | 505 { |
| 506 MediaBufferScopedPointer scoped_buffer(input_buffer.get()); | 506 MediaBufferScopedPointer scoped_buffer(input_buffer.Get()); |
| 507 DCHECK(scoped_buffer.get()); | 507 DCHECK(scoped_buffer.get()); |
| 508 libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), | 508 libyuv::I420Copy(frame->visible_data(VideoFrame::kYPlane), |
| 509 frame->stride(VideoFrame::kYPlane), | 509 frame->stride(VideoFrame::kYPlane), |
| 510 frame->visible_data(VideoFrame::kVPlane), | 510 frame->visible_data(VideoFrame::kVPlane), |
| 511 frame->stride(VideoFrame::kVPlane), | 511 frame->stride(VideoFrame::kVPlane), |
| 512 frame->visible_data(VideoFrame::kUPlane), | 512 frame->visible_data(VideoFrame::kUPlane), |
| 513 frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), | 513 frame->stride(VideoFrame::kUPlane), scoped_buffer.get(), |
| 514 frame->stride(VideoFrame::kYPlane), | 514 frame->stride(VideoFrame::kYPlane), |
| 515 scoped_buffer.get() + u_plane_offset_, | 515 scoped_buffer.get() + u_plane_offset_, |
| 516 frame->stride(VideoFrame::kUPlane), | 516 frame->stride(VideoFrame::kUPlane), |
| 517 scoped_buffer.get() + v_plane_offset_, | 517 scoped_buffer.get() + v_plane_offset_, |
| 518 frame->stride(VideoFrame::kVPlane), | 518 frame->stride(VideoFrame::kVPlane), |
| 519 input_visible_size_.width(), input_visible_size_.height()); | 519 input_visible_size_.width(), input_visible_size_.height()); |
| 520 } | 520 } |
| 521 | 521 |
| 522 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * | 522 input_sample_->SetSampleTime(frame->timestamp().InMicroseconds() * |
| 523 kOneMicrosecondInMFSampleTimeUnits); | 523 kOneMicrosecondInMFSampleTimeUnits); |
| 524 UINT64 sample_duration = 1; | 524 UINT64 sample_duration = 1; |
| 525 HRESULT hr = | 525 HRESULT hr = |
| 526 MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration); | 526 MFFrameRateToAverageTimePerFrame(frame_rate_, 1, &sample_duration); |
| 527 RETURN_ON_HR_FAILURE(hr, "Couldn't calculate sample duration", ); | 527 RETURN_ON_HR_FAILURE(hr, "Couldn't calculate sample duration", ); |
| 528 input_sample_->SetSampleDuration(sample_duration); | 528 input_sample_->SetSampleDuration(sample_duration); |
| 529 | 529 |
| 530 // Release frame after input is copied. | 530 // Release frame after input is copied. |
| 531 frame = nullptr; | 531 frame = nullptr; |
| 532 | 532 |
| 533 hr = encoder_->ProcessInput(input_stream_id_, input_sample_.get(), 0); | 533 hr = encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0); |
| 534 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try | 534 // According to MSDN, if encoder returns MF_E_NOTACCEPTING, we need to try |
| 535 // processing the output. This error indicates that encoder does not accept | 535 // processing the output. This error indicates that encoder does not accept |
| 536 // any more input data. | 536 // any more input data. |
| 537 if (hr == MF_E_NOTACCEPTING) { | 537 if (hr == MF_E_NOTACCEPTING) { |
| 538 DVLOG(3) << "MF_E_NOTACCEPTING"; | 538 DVLOG(3) << "MF_E_NOTACCEPTING"; |
| 539 ProcessOutput(); | 539 ProcessOutput(); |
| 540 hr = encoder_->ProcessInput(input_stream_id_, input_sample_.get(), 0); | 540 hr = encoder_->ProcessInput(input_stream_id_, input_sample_.Get(), 0); |
| 541 if (!SUCCEEDED(hr)) { | 541 if (!SUCCEEDED(hr)) { |
| 542 NotifyError(kPlatformFailureError); | 542 NotifyError(kPlatformFailureError); |
| 543 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | 543 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); |
| 544 } | 544 } |
| 545 } else if (!SUCCEEDED(hr)) { | 545 } else if (!SUCCEEDED(hr)) { |
| 546 NotifyError(kPlatformFailureError); | 546 NotifyError(kPlatformFailureError); |
| 547 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); | 547 RETURN_ON_HR_FAILURE(hr, "Couldn't encode", ); |
| 548 } | 548 } |
| 549 DVLOG(3) << "Sent for encode " << hr; | 549 DVLOG(3) << "Sent for encode " << hr; |
| 550 | 550 |
| 551 ProcessOutput(); | 551 ProcessOutput(); |
| 552 } | 552 } |
| 553 | 553 |
| 554 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { | 554 void MediaFoundationVideoEncodeAccelerator::ProcessOutput() { |
| 555 DVLOG(3) << __func__; | 555 DVLOG(3) << __func__; |
| 556 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); | 556 DCHECK(encoder_thread_task_runner_->BelongsToCurrentThread()); |
| 557 | 557 |
| 558 DWORD output_status = 0; | 558 DWORD output_status = 0; |
| 559 HRESULT hr = encoder_->GetOutputStatus(&output_status); | 559 HRESULT hr = encoder_->GetOutputStatus(&output_status); |
| 560 RETURN_ON_HR_FAILURE(hr, "Couldn't get output status", ); | 560 RETURN_ON_HR_FAILURE(hr, "Couldn't get output status", ); |
| 561 if (output_status != MFT_OUTPUT_STATUS_SAMPLE_READY) { | 561 if (output_status != MFT_OUTPUT_STATUS_SAMPLE_READY) { |
| 562 DVLOG(3) << "Output isnt ready"; | 562 DVLOG(3) << "Output isnt ready"; |
| 563 return; | 563 return; |
| 564 } | 564 } |
| 565 | 565 |
| 566 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; | 566 MFT_OUTPUT_DATA_BUFFER output_data_buffer = {0}; |
| 567 output_data_buffer.dwStreamID = 0; | 567 output_data_buffer.dwStreamID = 0; |
| 568 output_data_buffer.dwStatus = 0; | 568 output_data_buffer.dwStatus = 0; |
| 569 output_data_buffer.pEvents = NULL; | 569 output_data_buffer.pEvents = NULL; |
| 570 output_data_buffer.pSample = output_sample_.get(); | 570 output_data_buffer.pSample = output_sample_.Get(); |
| 571 DWORD status = 0; | 571 DWORD status = 0; |
| 572 hr = encoder_->ProcessOutput(output_stream_id_, 1, &output_data_buffer, | 572 hr = encoder_->ProcessOutput(output_stream_id_, 1, &output_data_buffer, |
| 573 &status); | 573 &status); |
| 574 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { | 574 if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { |
| 575 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT" << status; | 575 DVLOG(3) << "MF_E_TRANSFORM_NEED_MORE_INPUT" << status; |
| 576 return; | 576 return; |
| 577 } | 577 } |
| 578 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); | 578 RETURN_ON_HR_FAILURE(hr, "Couldn't get encoded data", ); |
| 579 DVLOG(3) << "Got encoded data " << hr; | 579 DVLOG(3) << "Got encoded data " << hr; |
| 580 | 580 |
| 581 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; | 581 base::win::ScopedComPtr<IMFMediaBuffer> output_buffer; |
| 582 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive()); | 582 hr = output_sample_->GetBufferByIndex(0, output_buffer.Receive()); |
| 583 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); | 583 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer by index", ); |
| 584 DWORD size = 0; | 584 DWORD size = 0; |
| 585 hr = output_buffer->GetCurrentLength(&size); | 585 hr = output_buffer->GetCurrentLength(&size); |
| 586 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); | 586 RETURN_ON_HR_FAILURE(hr, "Couldn't get buffer length", ); |
| 587 | 587 |
| 588 base::TimeDelta timestamp; | 588 base::TimeDelta timestamp; |
| 589 LONGLONG sample_time; | 589 LONGLONG sample_time; |
| 590 hr = output_sample_->GetSampleTime(&sample_time); | 590 hr = output_sample_->GetSampleTime(&sample_time); |
| 591 if (SUCCEEDED(hr)) { | 591 if (SUCCEEDED(hr)) { |
| 592 timestamp = base::TimeDelta::FromMicroseconds( | 592 timestamp = base::TimeDelta::FromMicroseconds( |
| 593 sample_time / kOneMicrosecondInMFSampleTimeUnits); | 593 sample_time / kOneMicrosecondInMFSampleTimeUnits); |
| 594 } | 594 } |
| 595 | 595 |
| 596 const bool keyframe = MFGetAttributeUINT32( | 596 const bool keyframe = MFGetAttributeUINT32( |
| 597 output_sample_.get(), MFSampleExtension_CleanPoint, false); | 597 output_sample_.Get(), MFSampleExtension_CleanPoint, false); |
| 598 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " | 598 DVLOG(3) << "We HAVE encoded data with size:" << size << " keyframe " |
| 599 << keyframe; | 599 << keyframe; |
| 600 | 600 |
| 601 if (bitstream_buffer_queue_.empty()) { | 601 if (bitstream_buffer_queue_.empty()) { |
| 602 DVLOG(3) << "No bitstream buffers."; | 602 DVLOG(3) << "No bitstream buffers."; |
| 603 // We need to copy the output so that encoding can continue. | 603 // We need to copy the output so that encoding can continue. |
| 604 std::unique_ptr<EncodeOutput> encode_output( | 604 std::unique_ptr<EncodeOutput> encode_output( |
| 605 new EncodeOutput(size, keyframe, timestamp)); | 605 new EncodeOutput(size, keyframe, timestamp)); |
| 606 { | 606 { |
| 607 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | 607 MediaBufferScopedPointer scoped_buffer(output_buffer.Get()); |
| 608 memcpy(encode_output->memory(), scoped_buffer.get(), size); | 608 memcpy(encode_output->memory(), scoped_buffer.get(), size); |
| 609 } | 609 } |
| 610 encoder_output_queue_.push_back(std::move(encode_output)); | 610 encoder_output_queue_.push_back(std::move(encode_output)); |
| 611 return; | 611 return; |
| 612 } | 612 } |
| 613 | 613 |
| 614 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> | 614 std::unique_ptr<MediaFoundationVideoEncodeAccelerator::BitstreamBufferRef> |
| 615 buffer_ref = std::move(bitstream_buffer_queue_.front()); | 615 buffer_ref = std::move(bitstream_buffer_queue_.front()); |
| 616 bitstream_buffer_queue_.pop_front(); | 616 bitstream_buffer_queue_.pop_front(); |
| 617 | 617 |
| 618 { | 618 { |
| 619 MediaBufferScopedPointer scoped_buffer(output_buffer.get()); | 619 MediaBufferScopedPointer scoped_buffer(output_buffer.Get()); |
| 620 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size); | 620 memcpy(buffer_ref->shm->memory(), scoped_buffer.get(), size); |
| 621 } | 621 } |
| 622 | 622 |
| 623 encode_client_task_runner_->PostTask( | 623 encode_client_task_runner_->PostTask( |
| 624 FROM_HERE, base::Bind(&Client::BitstreamBufferReady, encode_client_, | 624 FROM_HERE, base::Bind(&Client::BitstreamBufferReady, encode_client_, |
| 625 buffer_ref->id, size, keyframe, timestamp)); | 625 buffer_ref->id, size, keyframe, timestamp)); |
| 626 | 626 |
| 627 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT | 627 // Keep calling ProcessOutput recursively until MF_E_TRANSFORM_NEED_MORE_INPUT |
| 628 // is returned to flush out all the output. | 628 // is returned to flush out all the output. |
| 629 ProcessOutput(); | 629 ProcessOutput(); |
| (...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 696 void MediaFoundationVideoEncodeAccelerator::ReleaseEncoderResources() { | 696 void MediaFoundationVideoEncodeAccelerator::ReleaseEncoderResources() { |
| 697 encoder_.Reset(); | 697 encoder_.Reset(); |
| 698 codec_api_.Reset(); | 698 codec_api_.Reset(); |
| 699 imf_input_media_type_.Reset(); | 699 imf_input_media_type_.Reset(); |
| 700 imf_output_media_type_.Reset(); | 700 imf_output_media_type_.Reset(); |
| 701 input_sample_.Reset(); | 701 input_sample_.Reset(); |
| 702 output_sample_.Reset(); | 702 output_sample_.Reset(); |
| 703 } | 703 } |
| 704 | 704 |
| 705 } // namespace content | 705 } // namespace content |
| OLD | NEW |