Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
| 6 | 6 |
| 7 #include <string.h> | 7 #include <string.h> |
| 8 | 8 |
| 9 #include <deque> | |
| 10 | |
| 9 #include "base/bind.h" | 11 #include "base/bind.h" |
| 10 #include "base/location.h" | 12 #include "base/location.h" |
| 11 #include "base/logging.h" | 13 #include "base/logging.h" |
| 12 #include "base/macros.h" | 14 #include "base/macros.h" |
| 13 #include "base/memory/scoped_vector.h" | 15 #include "base/memory/scoped_vector.h" |
| 14 #include "base/metrics/histogram_macros.h" | 16 #include "base/metrics/histogram_macros.h" |
| 15 #include "base/numerics/safe_conversions.h" | 17 #include "base/numerics/safe_conversions.h" |
| 16 #include "base/rand_util.h" | 18 #include "base/rand_util.h" |
| 17 #include "base/single_thread_task_runner.h" | 19 #include "base/single_thread_task_runner.h" |
| 18 #include "base/synchronization/lock.h" | 20 #include "base/synchronization/lock.h" |
| 19 #include "base/synchronization/waitable_event.h" | 21 #include "base/synchronization/waitable_event.h" |
| 20 #include "base/threading/thread_task_runner_handle.h" | 22 #include "base/threading/thread_task_runner_handle.h" |
| 23 #include "base/time/time.h" | |
| 21 #include "media/base/bind_to_current_loop.h" | 24 #include "media/base/bind_to_current_loop.h" |
| 22 #include "media/base/bitstream_buffer.h" | 25 #include "media/base/bitstream_buffer.h" |
| 23 #include "media/base/video_frame.h" | 26 #include "media/base/video_frame.h" |
| 24 #include "media/base/video_util.h" | 27 #include "media/base/video_util.h" |
| 25 #include "media/filters/h264_parser.h" | 28 #include "media/filters/h264_parser.h" |
| 26 #include "media/renderers/gpu_video_accelerator_factories.h" | 29 #include "media/renderers/gpu_video_accelerator_factories.h" |
| 27 #include "media/video/video_encode_accelerator.h" | 30 #include "media/video/video_encode_accelerator.h" |
| 28 #include "third_party/libyuv/include/libyuv.h" | 31 #include "third_party/libyuv/include/libyuv.h" |
| 29 #include "third_party/webrtc/base/timeutils.h" | 32 #include "third_party/webrtc/base/timeutils.h" |
| 30 | 33 |
| 31 namespace content { | 34 namespace content { |
| 32 | 35 |
| 33 namespace { | 36 namespace { |
| 34 | 37 |
| 38 struct RTCTimestamps { | |
| 39 RTCTimestamps(base::TimeDelta media_timestamp, int32_t rtp_timestamp) | |
| 40 : media_timestamp_in_microseconds(media_timestamp.InMicroseconds()), | |
| 41 rtp_timestamp(rtp_timestamp) {} | |
| 42 const int64_t media_timestamp_in_microseconds; | |
| 43 const int32_t rtp_timestamp; | |
| 44 | |
| 45 private: | |
| 46 DISALLOW_IMPLICIT_CONSTRUCTORS(RTCTimestamps); | |
| 47 }; | |
| 48 | |
| 35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to | 49 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to |
| 36 // media::VideoCodecProfile. | 50 // media::VideoCodecProfile. |
| 37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( | 51 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( |
| 38 webrtc::VideoCodecType type, | 52 webrtc::VideoCodecType type, |
| 39 const webrtc::VideoCodec* codec_settings) { | 53 const webrtc::VideoCodec* codec_settings) { |
| 40 DCHECK_EQ(type, codec_settings->codecType); | 54 DCHECK_EQ(type, codec_settings->codecType); |
| 41 switch (type) { | 55 switch (type) { |
| 42 case webrtc::kVideoCodecVP8: | 56 case webrtc::kVideoCodecVP8: |
| 43 return media::VP8PROFILE_ANY; | 57 return media::VP8PROFILE_ANY; |
| 44 case webrtc::kVideoCodecH264: | 58 case webrtc::kVideoCodecH264: |
| (...skipping 156 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 201 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 215 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| 202 // Do this by waiting on the |async_waiter_| and returning the return value in | 216 // Do this by waiting on the |async_waiter_| and returning the return value in |
| 203 // |async_retval_| when initialization completes, encoding completes, or | 217 // |async_retval_| when initialization completes, encoding completes, or |
| 204 // an error occurs. | 218 // an error occurs. |
| 205 base::WaitableEvent* async_waiter_; | 219 base::WaitableEvent* async_waiter_; |
| 206 int32_t* async_retval_; | 220 int32_t* async_retval_; |
| 207 | 221 |
| 208 // The underlying VEA to perform encoding on. | 222 // The underlying VEA to perform encoding on. |
| 209 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; | 223 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; |
| 210 | 224 |
| 225 // Used to match the encoded frame timestamp with WebRTC's given RTP | |
| 226 // timestamp. | |
| 227 std::deque<RTCTimestamps> pending_timestamps_; | |
| 228 | |
| 211 // Next input frame. Since there is at most one next frame, a single-element | 229 // Next input frame. Since there is at most one next frame, a single-element |
| 212 // queue is sufficient. | 230 // queue is sufficient. |
| 213 const webrtc::VideoFrame* input_next_frame_; | 231 const webrtc::VideoFrame* input_next_frame_; |
| 214 | 232 |
| 215 // Whether to encode a keyframe next. | 233 // Whether to encode a keyframe next. |
| 216 bool input_next_frame_keyframe_; | 234 bool input_next_frame_keyframe_; |
| 217 | 235 |
| 218 // Frame sizes. | 236 // Frame sizes. |
| 219 gfx::Size input_frame_coded_size_; | 237 gfx::Size input_frame_coded_size_; |
| 220 gfx::Size input_visible_size_; | 238 gfx::Size input_visible_size_; |
| (...skipping 243 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 464 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | 482 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
| 465 if (payload_size > output_buffer->mapped_size()) { | 483 if (payload_size > output_buffer->mapped_size()) { |
| 466 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 484 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
| 467 media::VideoEncodeAccelerator::kPlatformFailureError); | 485 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 468 return; | 486 return; |
| 469 } | 487 } |
| 470 output_buffers_free_count_--; | 488 output_buffers_free_count_--; |
| 471 | 489 |
| 472 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 490 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
| 473 int64_t capture_time_us, capture_time_ms; | 491 int64_t capture_time_us, capture_time_ms; |
| 474 uint32_t rtp_timestamp; | 492 uint32_t rtp_timestamp = 0; |
|
pbos
2017/02/10 00:05:08
Can you make this optional?
emircan
2017/02/10 17:14:55
Done.
| |
| 475 | 493 |
| 476 if (!timestamp.is_zero()) { | 494 if (!timestamp.is_zero()) { |
| 477 capture_time_us = timestamp.InMicroseconds();; | 495 capture_time_us = timestamp.InMicroseconds(); |
| 478 capture_time_ms = timestamp.InMilliseconds(); | 496 capture_time_ms = timestamp.InMilliseconds(); |
| 497 // Pop timestamps until we have a match. | |
| 498 while (!pending_timestamps_.empty()) { | |
|
pbos
2017/02/10 00:05:08
This should never be empty, right? If so there's a
emircan
2017/02/10 17:14:55
Yes. However I want to loop until all the dropped
| |
| 499 const auto& front_timestamps = pending_timestamps_.front(); | |
| 500 if (front_timestamps.media_timestamp_in_microseconds == | |
| 501 timestamp.InMicroseconds()) { | |
| 502 rtp_timestamp = front_timestamps.rtp_timestamp; | |
| 503 pending_timestamps_.pop_front(); | |
| 504 break; | |
| 505 } | |
| 506 pending_timestamps_.pop_front(); | |
| 507 } | |
|
pbos
2017/02/10 00:05:07
Can you DCHECK after this brace that rtp_timestamp
emircan
2017/02/10 17:14:55
Again, suppose HW VEA is somewhat broken and retur
| |
| 479 } else { | 508 } else { |
| 480 // Fallback to the current time if encoder does not provide timestamp. | 509 // Fallback to the current time if encoder does not provide timestamp. |
| 481 capture_time_us = rtc::TimeMicros(); | 510 capture_time_us = rtc::TimeMicros(); |
| 482 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; | 511 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| 512 pending_timestamps_.clear(); | |
| 483 } | 513 } |
| 484 // RTP timestamp can wrap around. Get the lower 32 bits. | 514 |
| 485 rtp_timestamp = static_cast<uint32_t>( | 515 if (rtp_timestamp == 0) { |
|
pbos
2017/02/10 00:05:08
0 is a valid RTP timestamp. Can you gate this on w
emircan
2017/02/10 17:14:55
Done.
| |
| 486 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | 516 // RTP timestamp can wrap around. Get the lower 32 bits. |
| 517 rtp_timestamp = static_cast<uint32_t>( | |
| 518 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | |
| 519 } | |
| 487 | 520 |
| 488 webrtc::EncodedImage image( | 521 webrtc::EncodedImage image( |
| 489 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 522 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
| 490 output_buffer->mapped_size()); | 523 output_buffer->mapped_size()); |
| 491 image._encodedWidth = input_visible_size_.width(); | 524 image._encodedWidth = input_visible_size_.width(); |
| 492 image._encodedHeight = input_visible_size_.height(); | 525 image._encodedHeight = input_visible_size_.height(); |
| 493 image._timeStamp = rtp_timestamp; | 526 image._timeStamp = rtp_timestamp; |
| 494 image.capture_time_ms_ = capture_time_ms; | 527 image.capture_time_ms_ = capture_time_ms; |
| 495 image._frameType = | 528 image._frameType = |
| 496 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 529 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| (...skipping 106 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 603 frame->visible_rect().width(), | 636 frame->visible_rect().width(), |
| 604 frame->visible_rect().height(), | 637 frame->visible_rect().height(), |
| 605 libyuv::kFilterBox)) { | 638 libyuv::kFilterBox)) { |
| 606 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", | 639 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", |
| 607 media::VideoEncodeAccelerator::kPlatformFailureError); | 640 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 608 return; | 641 return; |
| 609 } | 642 } |
| 610 } | 643 } |
| 611 frame->AddDestructionObserver(media::BindToCurrentLoop( | 644 frame->AddDestructionObserver(media::BindToCurrentLoop( |
| 612 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); | 645 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); |
| 646 DCHECK(std::find_if(pending_timestamps_.begin(), pending_timestamps_.end(), | |
| 647 [&frame](const RTCTimestamps& entry) { | |
| 648 return entry.media_timestamp_in_microseconds == | |
| 649 frame->timestamp().InMicroseconds(); | |
| 650 }) == pending_timestamps_.end()); | |
| 651 pending_timestamps_.emplace_back(frame->timestamp(), next_frame->timestamp()); | |
| 652 | |
| 613 video_encoder_->Encode(frame, next_frame_keyframe); | 653 video_encoder_->Encode(frame, next_frame_keyframe); |
| 614 input_buffers_free_.pop_back(); | 654 input_buffers_free_.pop_back(); |
| 615 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 655 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 616 } | 656 } |
| 617 | 657 |
| 618 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | 658 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
| 619 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | 659 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
| 620 DCHECK(thread_checker_.CalledOnValidThread()); | 660 DCHECK(thread_checker_.CalledOnValidThread()); |
| 621 DCHECK_GE(index, 0); | 661 DCHECK_GE(index, 0); |
| 622 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); | 662 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); |
| (...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 880 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 920 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
| 881 init_retval == WEBRTC_VIDEO_CODEC_OK); | 921 init_retval == WEBRTC_VIDEO_CODEC_OK); |
| 882 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 922 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
| 883 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 923 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
| 884 profile, | 924 profile, |
| 885 media::VIDEO_CODEC_PROFILE_MAX + 1); | 925 media::VIDEO_CODEC_PROFILE_MAX + 1); |
| 886 } | 926 } |
| 887 } | 927 } |
| 888 | 928 |
| 889 } // namespace content | 929 } // namespace content |
| OLD | NEW |