Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
| 6 | 6 |
| 7 #include <string.h> | 7 #include <string.h> |
| 8 | 8 |
| 9 #include <deque> | |
| 10 | |
| 9 #include "base/bind.h" | 11 #include "base/bind.h" |
| 10 #include "base/location.h" | 12 #include "base/location.h" |
| 11 #include "base/logging.h" | 13 #include "base/logging.h" |
| 12 #include "base/macros.h" | 14 #include "base/macros.h" |
| 13 #include "base/memory/scoped_vector.h" | 15 #include "base/memory/scoped_vector.h" |
| 14 #include "base/metrics/histogram_macros.h" | 16 #include "base/metrics/histogram_macros.h" |
| 15 #include "base/numerics/safe_conversions.h" | 17 #include "base/numerics/safe_conversions.h" |
| 16 #include "base/rand_util.h" | 18 #include "base/rand_util.h" |
| 17 #include "base/single_thread_task_runner.h" | 19 #include "base/single_thread_task_runner.h" |
| 18 #include "base/synchronization/lock.h" | 20 #include "base/synchronization/lock.h" |
| 19 #include "base/synchronization/waitable_event.h" | 21 #include "base/synchronization/waitable_event.h" |
| 20 #include "base/threading/thread_task_runner_handle.h" | 22 #include "base/threading/thread_task_runner_handle.h" |
| 23 #include "base/time/time.h" | |
| 21 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
| 22 #include "media/base/bitstream_buffer.h" | 25 #include "media/base/bitstream_buffer.h" |
| 23 #include "media/base/video_frame.h" | 26 #include "media/base/video_frame.h" |
| 24 #include "media/base/video_util.h" | 27 #include "media/base/video_util.h" |
| 25 #include "media/filters/h264_parser.h" | 28 #include "media/filters/h264_parser.h" |
| 26 #include "media/renderers/gpu_video_accelerator_factories.h" | 29 #include "media/renderers/gpu_video_accelerator_factories.h" |
| 27 #include "media/video/video_encode_accelerator.h" | 30 #include "media/video/video_encode_accelerator.h" |
| 28 #include "third_party/libyuv/include/libyuv.h" | 31 #include "third_party/libyuv/include/libyuv.h" |
| 29 #include "third_party/webrtc/base/timeutils.h" | 32 #include "third_party/webrtc/base/timeutils.h" |
| 30 | 33 |
| 31 namespace content { | 34 namespace content { |
| 32 | 35 |
| 33 namespace { | 36 namespace { |
| 34 | 37 |
| 38 struct RTCTimestamps { | |
| 39 RTCTimestamps(const base::TimeDelta& media_timestamp, int32_t rtp_timestamp) | |
| 40 : media_timestamp_in_microseconds(media_timestamp.InMicroseconds()), | |
| 41 rtp_timestamp(rtp_timestamp) {} | |
| 42 const int64_t media_timestamp_in_microseconds; | |
| 43 const int32_t rtp_timestamp; | |
| 44 | |
| 45 private: | |
| 46 DISALLOW_IMPLICIT_CONSTRUCTORS(RTCTimestamps); | |
| 47 }; | |
| 48 | |
| 35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to | 49 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to |
| 36 // media::VideoCodecProfile. | 50 // media::VideoCodecProfile. |
| 37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( | 51 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( |
| 38 webrtc::VideoCodecType type, | 52 webrtc::VideoCodecType type, |
| 39 const webrtc::VideoCodec* codec_settings) { | 53 const webrtc::VideoCodec* codec_settings) { |
| 40 DCHECK_EQ(type, codec_settings->codecType); | 54 DCHECK_EQ(type, codec_settings->codecType); |
| 41 switch (type) { | 55 switch (type) { |
| 42 case webrtc::kVideoCodecVP8: | 56 case webrtc::kVideoCodecVP8: |
| 43 return media::VP8PROFILE_ANY; | 57 return media::VP8PROFILE_ANY; |
| 44 case webrtc::kVideoCodecH264: | 58 case webrtc::kVideoCodecH264: |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 201 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 215 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| 202 // Do this by waiting on the |async_waiter_| and returning the return value in | 216 // Do this by waiting on the |async_waiter_| and returning the return value in |
| 203 // |async_retval_| when initialization completes, encoding completes, or | 217 // |async_retval_| when initialization completes, encoding completes, or |
| 204 // an error occurs. | 218 // an error occurs. |
| 205 base::WaitableEvent* async_waiter_; | 219 base::WaitableEvent* async_waiter_; |
| 206 int32_t* async_retval_; | 220 int32_t* async_retval_; |
| 207 | 221 |
| 208 // The underlying VEA to perform encoding on. | 222 // The underlying VEA to perform encoding on. |
| 209 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; | 223 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; |
| 210 | 224 |
| 225 // Used to match the encoded frame timestamp with WebRTC's given RTP | |
| 226 // timestamp. | |
| 227 std::deque<RTCTimestamps> pending_timestamps_; | |
| 228 | |
| 211 // Next input frame. Since there is at most one next frame, a single-element | 229 // Next input frame. Since there is at most one next frame, a single-element |
| 212 // queue is sufficient. | 230 // queue is sufficient. |
| 213 const webrtc::VideoFrame* input_next_frame_; | 231 const webrtc::VideoFrame* input_next_frame_; |
| 214 | 232 |
| 215 // Whether to encode a keyframe next. | 233 // Whether to encode a keyframe next. |
| 216 bool input_next_frame_keyframe_; | 234 bool input_next_frame_keyframe_; |
| 217 | 235 |
| 218 // Frame sizes. | 236 // Frame sizes. |
| 219 gfx::Size input_frame_coded_size_; | 237 gfx::Size input_frame_coded_size_; |
| 220 gfx::Size input_visible_size_; | 238 gfx::Size input_visible_size_; |
| (...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 439 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | 457 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| 440 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | 458 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
| 441 output_buffers_free_count_++; | 459 output_buffers_free_count_++; |
| 442 } | 460 } |
| 443 DCHECK_EQ(GetStatus(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); | 461 DCHECK_EQ(GetStatus(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); |
| 444 SetStatus(WEBRTC_VIDEO_CODEC_OK); | 462 SetStatus(WEBRTC_VIDEO_CODEC_OK); |
| 445 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 463 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 446 } | 464 } |
| 447 | 465 |
| 448 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, | 466 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
| 449 size_t payload_size, | 467 size_t payload_size, |
| 450 bool key_frame, | 468 bool key_frame, |
| 451 base::TimeDelta timestamp) { | 469 base::TimeDelta timestamp) { |
| 452 DVLOG(3) << "Impl::BitstreamBufferReady(): bitstream_buffer_id=" | 470 DVLOG(3) << "Impl::BitstreamBufferReady(): bitstream_buffer_id=" |
| 453 << bitstream_buffer_id << ", payload_size=" << payload_size | 471 << bitstream_buffer_id << ", payload_size=" << payload_size |
| 454 << ", key_frame=" << key_frame | 472 << ", key_frame=" << key_frame |
| 455 << ", timestamp ms=" << timestamp.InMilliseconds(); | 473 << ", timestamp ms=" << timestamp.InMilliseconds(); |
| 456 DCHECK(thread_checker_.CalledOnValidThread()); | 474 DCHECK(thread_checker_.CalledOnValidThread()); |
| 457 | 475 |
| 458 if (bitstream_buffer_id < 0 || | 476 if (bitstream_buffer_id < 0 || |
| 459 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { | 477 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { |
| 460 LogAndNotifyError(FROM_HERE, "invalid bitstream_buffer_id", | 478 LogAndNotifyError(FROM_HERE, "invalid bitstream_buffer_id", |
| 461 media::VideoEncodeAccelerator::kPlatformFailureError); | 479 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 462 return; | 480 return; |
| 463 } | 481 } |
| 464 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | 482 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
| 465 if (payload_size > output_buffer->mapped_size()) { | 483 if (payload_size > output_buffer->mapped_size()) { |
| 466 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 484 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
| 467 media::VideoEncodeAccelerator::kPlatformFailureError); | 485 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 468 return; | 486 return; |
| 469 } | 487 } |
| 470 output_buffers_free_count_--; | 488 output_buffers_free_count_--; |
| 471 | 489 |
| 472 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 490 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
| 473 int64_t capture_time_us, capture_time_ms; | 491 int64_t capture_time_us, capture_time_ms; |
| 474 uint32_t rtp_timestamp; | 492 base::Optional<uint32_t> rtp_timestamp; |
| 475 | 493 |
| 476 if (!timestamp.is_zero()) { | 494 if (!timestamp.is_zero()) { |
| 477 capture_time_us = timestamp.InMicroseconds();; | 495 capture_time_us = timestamp.InMicroseconds(); |
| 478 capture_time_ms = timestamp.InMilliseconds(); | 496 capture_time_ms = timestamp.InMilliseconds(); |
| 497 // Pop timestamps until we have a match. | |
| 498 while (!pending_timestamps_.empty()) { | |
| 499 const auto& front_timestamps = pending_timestamps_.front(); | |
| 500 if (front_timestamps.media_timestamp_in_microseconds == | |
| 501 timestamp.InMicroseconds()) { | |
| 502 rtp_timestamp = front_timestamps.rtp_timestamp; | |
| 503 pending_timestamps_.pop_front(); | |
| 504 break; | |
| 505 } | |
| 506 pending_timestamps_.pop_front(); | |
| 507 } | |
|
pbos-webrtc
2017/02/15 20:44:50
Can we say that if pending_timestamps_ ever does n
emircan
2017/02/15 21:03:16
Done.
nisse-chromium (ooo August 14)
2017/02/16 07:49:50
That sounds like a failure event we might want som
| |
| 508 DCHECK(rtp_timestamp.has_value()); | |
| 479 } else { | 509 } else { |
| 480 // Fallback to the current time if encoder does not provide timestamp. | 510 // Fallback to the current time if encoder does not provide timestamp. |
| 481 capture_time_us = rtc::TimeMicros(); | 511 capture_time_us = rtc::TimeMicros(); |
| 482 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; | 512 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| 513 pending_timestamps_.clear(); | |
| 483 } | 514 } |
| 484 // RTP timestamp can wrap around. Get the lower 32 bits. | 515 |
| 485 rtp_timestamp = static_cast<uint32_t>( | 516 if (!rtp_timestamp.has_value()) { |
| 486 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | 517 // RTP timestamp can wrap around. Get the lower 32 bits. |
| 518 rtp_timestamp = static_cast<uint32_t>( | |
| 519 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | |
| 520 } | |
| 487 | 521 |
| 488 webrtc::EncodedImage image( | 522 webrtc::EncodedImage image( |
| 489 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 523 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
| 490 output_buffer->mapped_size()); | 524 output_buffer->mapped_size()); |
| 491 image._encodedWidth = input_visible_size_.width(); | 525 image._encodedWidth = input_visible_size_.width(); |
| 492 image._encodedHeight = input_visible_size_.height(); | 526 image._encodedHeight = input_visible_size_.height(); |
| 493 image._timeStamp = rtp_timestamp; | 527 image._timeStamp = rtp_timestamp.value(); |
| 494 image.capture_time_ms_ = capture_time_ms; | 528 image.capture_time_ms_ = capture_time_ms; |
| 495 image._frameType = | 529 image._frameType = |
| 496 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 530 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| 497 image._completeFrame = true; | 531 image._completeFrame = true; |
| 498 | 532 |
| 499 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); | 533 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); |
| 500 // Picture ID must wrap after reaching the maximum. | 534 // Picture ID must wrap after reaching the maximum. |
| 501 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 535 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
| 502 } | 536 } |
| 503 | 537 |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 603 frame->visible_rect().width(), | 637 frame->visible_rect().width(), |
| 604 frame->visible_rect().height(), | 638 frame->visible_rect().height(), |
| 605 libyuv::kFilterBox)) { | 639 libyuv::kFilterBox)) { |
| 606 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", | 640 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", |
| 607 media::VideoEncodeAccelerator::kPlatformFailureError); | 641 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 608 return; | 642 return; |
| 609 } | 643 } |
| 610 } | 644 } |
| 611 frame->AddDestructionObserver(media::BindToCurrentLoop( | 645 frame->AddDestructionObserver(media::BindToCurrentLoop( |
| 612 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); | 646 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); |
| 647 DCHECK(std::find_if(pending_timestamps_.begin(), pending_timestamps_.end(), | |
| 648 [&frame](const RTCTimestamps& entry) { | |
| 649 return entry.media_timestamp_in_microseconds == | |
| 650 frame->timestamp().InMicroseconds(); | |
| 651 }) == pending_timestamps_.end()); | |
| 652 pending_timestamps_.emplace_back(frame->timestamp(), next_frame->timestamp()); | |
| 653 | |
| 613 video_encoder_->Encode(frame, next_frame_keyframe); | 654 video_encoder_->Encode(frame, next_frame_keyframe); |
| 614 input_buffers_free_.pop_back(); | 655 input_buffers_free_.pop_back(); |
| 615 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 656 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 616 } | 657 } |
| 617 | 658 |
| 618 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | 659 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
| 619 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | 660 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
| 620 DCHECK(thread_checker_.CalledOnValidThread()); | 661 DCHECK(thread_checker_.CalledOnValidThread()); |
| 621 DCHECK_GE(index, 0); | 662 DCHECK_GE(index, 0); |
| 622 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); | 663 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); |
| (...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 880 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 921 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
| 881 init_retval == WEBRTC_VIDEO_CODEC_OK); | 922 init_retval == WEBRTC_VIDEO_CODEC_OK); |
| 882 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 923 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
| 883 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 924 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
| 884 profile, | 925 profile, |
| 885 media::VIDEO_CODEC_PROFILE_MAX + 1); | 926 media::VIDEO_CODEC_PROFILE_MAX + 1); |
| 886 } | 927 } |
| 887 } | 928 } |
| 888 | 929 |
| 889 } // namespace content | 930 } // namespace content |
| OLD | NEW |