| Index: content/renderer/media/gpu/rtc_video_encoder.cc
|
| diff --git a/content/renderer/media/gpu/rtc_video_encoder.cc b/content/renderer/media/gpu/rtc_video_encoder.cc
|
| index 988185b489dcb5097fe334683473301432f41c1b..13c2414bac56c15ff6863d765e77db23c6853a00 100644
|
| --- a/content/renderer/media/gpu/rtc_video_encoder.cc
|
| +++ b/content/renderer/media/gpu/rtc_video_encoder.cc
|
| @@ -236,6 +236,10 @@ class RTCVideoEncoder::Impl
|
| // 15 bits running index of the VP8 frames. See VP8 RTP spec for details.
|
| uint16_t picture_id_;
|
|
|
| + // |capture_time_ms_| field of the last returned webrtc::EncodedImage from
|
| + // BitstreamBufferReady().
|
| + int64_t last_capture_time_ms_;
|
| +
|
| // webrtc::VideoEncoder encode complete callback.
|
| webrtc::EncodedImageCallback* encoded_image_callback_;
|
|
|
| @@ -263,6 +267,7 @@ RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories,
|
| input_next_frame_(NULL),
|
| input_next_frame_keyframe_(false),
|
| output_buffers_free_count_(0),
|
| + last_capture_time_ms_(-1),
|
| encoded_image_callback_(nullptr),
|
| video_codec_type_(video_codec_type),
|
| status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) {
|
| @@ -471,21 +476,22 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
|
| }
|
| output_buffers_free_count_--;
|
|
|
| - // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
|
| - int64_t capture_time_us, capture_time_ms;
|
| - uint32_t rtp_timestamp;
|
| -
|
| - if (!timestamp.is_zero()) {
|
| - capture_time_us = timestamp.InMicroseconds();;
|
| - capture_time_ms = timestamp.InMilliseconds();
|
| - } else {
|
| - // Fallback to the current time if encoder does not provide timestamp.
|
| - capture_time_us = rtc::TimeMicros();
|
| - capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond;
|
| - }
|
| - // RTP timestamp can wrap around. Get the lower 32 bits.
|
| - rtp_timestamp = static_cast<uint32_t>(
|
| - capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
|
| + // Derive the capture time in ms from system clock. Make sure that it is
|
| + // greater than the last.
|
| + const int64_t capture_time_us = rtc::TimeMicros();
|
| + int64_t capture_time_ms =
|
| + capture_time_us / base::Time::kMicrosecondsPerMillisecond;
|
| + capture_time_ms = std::max(capture_time_ms, last_capture_time_ms_ + 1);
|
| + last_capture_time_ms_ = capture_time_ms;
|
| +
|
| + // Fallback to the current time if encoder does not provide timestamp.
|
| + const int64_t encoder_time_us =
|
| + timestamp.is_zero() ? capture_time_us : timestamp.InMicroseconds();
|
| +
|
| + // Derive the RTP timestamp (in 90KHz ticks). It can wrap around, get the
|
| + // lower 32 bits.
|
| + const uint32_t rtp_timestamp = static_cast<uint32_t>(
|
| + encoder_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
|
|
|
| webrtc::EncodedImage image(
|
| reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
|
|
|