| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
| 6 | 6 |
| 7 #include <string.h> | 7 #include <string.h> |
| 8 | 8 |
| 9 #include <deque> |
| 9 #include <memory> | 10 #include <memory> |
| 10 #include <vector> | 11 #include <vector> |
| 11 | 12 |
| 12 #include "base/bind.h" | 13 #include "base/bind.h" |
| 13 #include "base/location.h" | 14 #include "base/location.h" |
| 14 #include "base/logging.h" | 15 #include "base/logging.h" |
| 15 #include "base/macros.h" | 16 #include "base/macros.h" |
| 16 #include "base/metrics/histogram_macros.h" | 17 #include "base/metrics/histogram_macros.h" |
| 17 #include "base/numerics/safe_conversions.h" | 18 #include "base/numerics/safe_conversions.h" |
| 18 #include "base/rand_util.h" | 19 #include "base/rand_util.h" |
| 19 #include "base/single_thread_task_runner.h" | 20 #include "base/single_thread_task_runner.h" |
| 20 #include "base/synchronization/lock.h" | 21 #include "base/synchronization/lock.h" |
| 21 #include "base/synchronization/waitable_event.h" | 22 #include "base/synchronization/waitable_event.h" |
| 22 #include "base/threading/thread_task_runner_handle.h" | 23 #include "base/threading/thread_task_runner_handle.h" |
| 24 #include "base/time/time.h" |
| 23 #include "media/base/bind_to_current_loop.h" | 25 #include "media/base/bind_to_current_loop.h" |
| 24 #include "media/base/bitstream_buffer.h" | 26 #include "media/base/bitstream_buffer.h" |
| 25 #include "media/base/video_frame.h" | 27 #include "media/base/video_frame.h" |
| 26 #include "media/base/video_util.h" | 28 #include "media/base/video_util.h" |
| 27 #include "media/filters/h264_parser.h" | 29 #include "media/filters/h264_parser.h" |
| 28 #include "media/renderers/gpu_video_accelerator_factories.h" | 30 #include "media/renderers/gpu_video_accelerator_factories.h" |
| 29 #include "media/video/video_encode_accelerator.h" | 31 #include "media/video/video_encode_accelerator.h" |
| 30 #include "third_party/libyuv/include/libyuv.h" | 32 #include "third_party/libyuv/include/libyuv.h" |
| 31 #include "third_party/webrtc/base/timeutils.h" | 33 #include "third_party/webrtc/base/timeutils.h" |
| 32 | 34 |
| 33 namespace content { | 35 namespace content { |
| 34 | 36 |
| 35 namespace { | 37 namespace { |
| 36 | 38 |
| 39 struct RTCTimestamps { |
| 40 RTCTimestamps(const base::TimeDelta& media_timestamp, int32_t rtp_timestamp) |
| 41 : media_timestamp_(media_timestamp), rtp_timestamp(rtp_timestamp) {} |
| 42 const base::TimeDelta media_timestamp_; |
| 43 const int32_t rtp_timestamp; |
| 44 |
| 45 private: |
| 46 DISALLOW_IMPLICIT_CONSTRUCTORS(RTCTimestamps); |
| 47 }; |
| 48 |
| 37 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to | 49 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to |
| 38 // media::VideoCodecProfile. | 50 // media::VideoCodecProfile. |
| 39 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( | 51 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( |
| 40 webrtc::VideoCodecType type, | 52 webrtc::VideoCodecType type, |
| 41 const webrtc::VideoCodec* codec_settings) { | 53 const webrtc::VideoCodec* codec_settings) { |
| 42 DCHECK_EQ(type, codec_settings->codecType); | 54 DCHECK_EQ(type, codec_settings->codecType); |
| 43 switch (type) { | 55 switch (type) { |
| 44 case webrtc::kVideoCodecVP8: | 56 case webrtc::kVideoCodecVP8: |
| 45 return media::VP8PROFILE_ANY; | 57 return media::VP8PROFILE_ANY; |
| 46 case webrtc::kVideoCodecVP9: | 58 case webrtc::kVideoCodecVP9: |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 205 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 217 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| 206 // Do this by waiting on the |async_waiter_| and returning the return value in | 218 // Do this by waiting on the |async_waiter_| and returning the return value in |
| 207 // |async_retval_| when initialization completes, encoding completes, or | 219 // |async_retval_| when initialization completes, encoding completes, or |
| 208 // an error occurs. | 220 // an error occurs. |
| 209 base::WaitableEvent* async_waiter_; | 221 base::WaitableEvent* async_waiter_; |
| 210 int32_t* async_retval_; | 222 int32_t* async_retval_; |
| 211 | 223 |
| 212 // The underlying VEA to perform encoding on. | 224 // The underlying VEA to perform encoding on. |
| 213 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; | 225 std::unique_ptr<media::VideoEncodeAccelerator> video_encoder_; |
| 214 | 226 |
| 227 // Used to match the encoded frame timestamp with WebRTC's given RTP |
| 228 // timestamp. |
| 229 std::deque<RTCTimestamps> pending_timestamps_; |
| 230 |
| 231 // Indicates that timestamp match failed and we should no longer attempt |
| 232 // matching. |
| 233 bool failed_timestamp_match_; |
| 234 |
| 215 // Next input frame. Since there is at most one next frame, a single-element | 235 // Next input frame. Since there is at most one next frame, a single-element |
| 216 // queue is sufficient. | 236 // queue is sufficient. |
| 217 const webrtc::VideoFrame* input_next_frame_; | 237 const webrtc::VideoFrame* input_next_frame_; |
| 218 | 238 |
| 219 // Whether to encode a keyframe next. | 239 // Whether to encode a keyframe next. |
| 220 bool input_next_frame_keyframe_; | 240 bool input_next_frame_keyframe_; |
| 221 | 241 |
| 222 // Frame sizes. | 242 // Frame sizes. |
| 223 gfx::Size input_frame_coded_size_; | 243 gfx::Size input_frame_coded_size_; |
| 224 gfx::Size input_visible_size_; | 244 gfx::Size input_visible_size_; |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 257 // Instead, we cache an error status here and return it the next time an | 277 // Instead, we cache an error status here and return it the next time an |
| 258 // interface entry point is called. This is protected by |status_lock_|. | 278 // interface entry point is called. This is protected by |status_lock_|. |
| 259 int32_t status_; | 279 int32_t status_; |
| 260 | 280 |
| 261 DISALLOW_COPY_AND_ASSIGN(Impl); | 281 DISALLOW_COPY_AND_ASSIGN(Impl); |
| 262 }; | 282 }; |
| 263 | 283 |
| 264 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, | 284 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
| 265 webrtc::VideoCodecType video_codec_type) | 285 webrtc::VideoCodecType video_codec_type) |
| 266 : gpu_factories_(gpu_factories), | 286 : gpu_factories_(gpu_factories), |
| 267 async_waiter_(NULL), | 287 async_waiter_(nullptr), |
| 268 async_retval_(NULL), | 288 async_retval_(nullptr), |
| 269 input_next_frame_(NULL), | 289 failed_timestamp_match_(false), |
| 290 input_next_frame_(nullptr), |
| 270 input_next_frame_keyframe_(false), | 291 input_next_frame_keyframe_(false), |
| 271 output_buffers_free_count_(0), | 292 output_buffers_free_count_(0), |
| 272 last_capture_time_ms_(-1), | 293 last_capture_time_ms_(-1), |
| 273 encoded_image_callback_(nullptr), | 294 encoded_image_callback_(nullptr), |
| 274 video_codec_type_(video_codec_type), | 295 video_codec_type_(video_codec_type), |
| 275 status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { | 296 status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
| 276 thread_checker_.DetachFromThread(); | 297 thread_checker_.DetachFromThread(); |
| 277 // Picture ID should start on a random number. | 298 // Picture ID should start on a random number. |
| 278 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); | 299 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); |
| 279 } | 300 } |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 448 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | 469 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| 449 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | 470 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
| 450 output_buffers_free_count_++; | 471 output_buffers_free_count_++; |
| 451 } | 472 } |
| 452 DCHECK_EQ(GetStatus(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); | 473 DCHECK_EQ(GetStatus(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); |
| 453 SetStatus(WEBRTC_VIDEO_CODEC_OK); | 474 SetStatus(WEBRTC_VIDEO_CODEC_OK); |
| 454 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 475 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 455 } | 476 } |
| 456 | 477 |
| 457 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, | 478 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
| 458 size_t payload_size, | 479 size_t payload_size, |
| 459 bool key_frame, | 480 bool key_frame, |
| 460 base::TimeDelta timestamp) { | 481 base::TimeDelta timestamp) { |
| 461 DVLOG(3) << "Impl::BitstreamBufferReady(): bitstream_buffer_id=" | 482 DVLOG(3) << "Impl::BitstreamBufferReady(): bitstream_buffer_id=" |
| 462 << bitstream_buffer_id << ", payload_size=" << payload_size | 483 << bitstream_buffer_id << ", payload_size=" << payload_size |
| 463 << ", key_frame=" << key_frame | 484 << ", key_frame=" << key_frame |
| 464 << ", timestamp ms=" << timestamp.InMilliseconds(); | 485 << ", timestamp ms=" << timestamp.InMilliseconds(); |
| 465 DCHECK(thread_checker_.CalledOnValidThread()); | 486 DCHECK(thread_checker_.CalledOnValidThread()); |
| 466 | 487 |
| 467 if (bitstream_buffer_id < 0 || | 488 if (bitstream_buffer_id < 0 || |
| 468 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { | 489 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { |
| 469 LogAndNotifyError(FROM_HERE, "invalid bitstream_buffer_id", | 490 LogAndNotifyError(FROM_HERE, "invalid bitstream_buffer_id", |
| 470 media::VideoEncodeAccelerator::kPlatformFailureError); | 491 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 471 return; | 492 return; |
| 472 } | 493 } |
| 473 base::SharedMemory* output_buffer = | 494 base::SharedMemory* output_buffer = |
| 474 output_buffers_[bitstream_buffer_id].get(); | 495 output_buffers_[bitstream_buffer_id].get(); |
| 475 if (payload_size > output_buffer->mapped_size()) { | 496 if (payload_size > output_buffer->mapped_size()) { |
| 476 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 497 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
| 477 media::VideoEncodeAccelerator::kPlatformFailureError); | 498 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 478 return; | 499 return; |
| 479 } | 500 } |
| 480 output_buffers_free_count_--; | 501 output_buffers_free_count_--; |
| 481 | 502 |
| 482 // Derive the capture time in ms from system clock. Make sure that it is | 503 // Derive the capture time in ms from system clock. Make sure that it is |
| 483 // greater than the last. | 504 // greater than the last. |
| 484 const int64_t capture_time_us = rtc::TimeMicros(); | 505 const int64_t capture_time_us = rtc::TimeMicros(); |
| 485 int64_t capture_time_ms = | 506 int64_t capture_time_ms = |
| 486 capture_time_us / base::Time::kMicrosecondsPerMillisecond; | 507 capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| 487 capture_time_ms = std::max(capture_time_ms, last_capture_time_ms_ + 1); | 508 capture_time_ms = std::max(capture_time_ms, last_capture_time_ms_ + 1); |
| 488 last_capture_time_ms_ = capture_time_ms; | 509 last_capture_time_ms_ = capture_time_ms; |
| 489 | 510 |
| 490 // Fallback to the current time if encoder does not provide timestamp. | 511 // Find RTP timestamp by going through |pending_timestamps_|. Derive it from |
| 491 const int64_t encoder_time_us = | 512 // capture time otherwise. |
| 492 timestamp.is_zero() ? capture_time_us : timestamp.InMicroseconds(); | 513 base::Optional<uint32_t> rtp_timestamp; |
| 493 | 514 if (!timestamp.is_zero() && !failed_timestamp_match_) { |
| 494 // Derive the RTP timestamp (in 90KHz ticks). It can wrap around, get the | 515 // Pop timestamps until we have a match. |
| 495 // lower 32 bits. | 516 while (!pending_timestamps_.empty()) { |
| 496 const uint32_t rtp_timestamp = static_cast<uint32_t>( | 517 const auto& front_timestamps = pending_timestamps_.front(); |
| 497 encoder_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | 518 if (front_timestamps.media_timestamp_ == timestamp) { |
| 519 rtp_timestamp = front_timestamps.rtp_timestamp; |
| 520 pending_timestamps_.pop_front(); |
| 521 break; |
| 522 } |
| 523 pending_timestamps_.pop_front(); |
| 524 } |
| 525 DCHECK(rtp_timestamp.has_value()); |
| 526 } |
| 527 if (!rtp_timestamp.has_value()) { |
| 528 failed_timestamp_match_ = true; |
| 529 pending_timestamps_.clear(); |
| 530 // RTP timestamp can wrap around. Get the lower 32 bits. |
| 531 rtp_timestamp = static_cast<uint32_t>( |
| 532 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
| 533 } |
| 498 | 534 |
| 499 webrtc::EncodedImage image( | 535 webrtc::EncodedImage image( |
| 500 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 536 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
| 501 output_buffer->mapped_size()); | 537 output_buffer->mapped_size()); |
| 502 image._encodedWidth = input_visible_size_.width(); | 538 image._encodedWidth = input_visible_size_.width(); |
| 503 image._encodedHeight = input_visible_size_.height(); | 539 image._encodedHeight = input_visible_size_.height(); |
| 504 image._timeStamp = rtp_timestamp; | 540 image._timeStamp = rtp_timestamp.value(); |
| 505 image.capture_time_ms_ = capture_time_ms; | 541 image.capture_time_ms_ = capture_time_ms; |
| 506 image._frameType = | 542 image._frameType = |
| 507 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 543 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| 508 image._completeFrame = true; | 544 image._completeFrame = true; |
| 509 | 545 |
| 510 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); | 546 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); |
| 511 // Picture ID must wrap after reaching the maximum. | 547 // Picture ID must wrap after reaching the maximum. |
| 512 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 548 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
| 513 } | 549 } |
| 514 | 550 |
| (...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 614 frame->visible_rect().width(), | 650 frame->visible_rect().width(), |
| 615 frame->visible_rect().height(), | 651 frame->visible_rect().height(), |
| 616 libyuv::kFilterBox)) { | 652 libyuv::kFilterBox)) { |
| 617 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", | 653 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", |
| 618 media::VideoEncodeAccelerator::kPlatformFailureError); | 654 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 619 return; | 655 return; |
| 620 } | 656 } |
| 621 } | 657 } |
| 622 frame->AddDestructionObserver(media::BindToCurrentLoop( | 658 frame->AddDestructionObserver(media::BindToCurrentLoop( |
| 623 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); | 659 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); |
| 660 if (!failed_timestamp_match_) { |
| 661 DCHECK(std::find_if(pending_timestamps_.begin(), pending_timestamps_.end(), |
| 662 [&frame](const RTCTimestamps& entry) { |
| 663 return entry.media_timestamp_ == frame->timestamp(); |
| 664 }) == pending_timestamps_.end()); |
| 665 pending_timestamps_.emplace_back(frame->timestamp(), |
| 666 next_frame->timestamp()); |
| 667 } |
| 624 video_encoder_->Encode(frame, next_frame_keyframe); | 668 video_encoder_->Encode(frame, next_frame_keyframe); |
| 625 input_buffers_free_.pop_back(); | 669 input_buffers_free_.pop_back(); |
| 626 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 670 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
| 627 } | 671 } |
| 628 | 672 |
| 629 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | 673 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
| 630 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | 674 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
| 631 DCHECK(thread_checker_.CalledOnValidThread()); | 675 DCHECK(thread_checker_.CalledOnValidThread()); |
| 632 DCHECK_GE(index, 0); | 676 DCHECK_GE(index, 0); |
| 633 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); | 677 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); |
| (...skipping 257 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 891 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 935 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
| 892 init_retval == WEBRTC_VIDEO_CODEC_OK); | 936 init_retval == WEBRTC_VIDEO_CODEC_OK); |
| 893 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 937 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
| 894 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 938 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
| 895 profile, | 939 profile, |
| 896 media::VIDEO_CODEC_PROFILE_MAX + 1); | 940 media::VIDEO_CODEC_PROFILE_MAX + 1); |
| 897 } | 941 } |
| 898 } | 942 } |
| 899 | 943 |
| 900 } // namespace content | 944 } // namespace content |
| OLD | NEW |