| Index: content/renderer/media/rtc_video_encoder.cc
|
| diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc
|
| index 4ce16d53834017eb35d2ea0a9922398047144ef6..df1a458dda1e904390783b23786dd0734f9fe8e7 100644
|
| --- a/content/renderer/media/rtc_video_encoder.cc
|
| +++ b/content/renderer/media/rtc_video_encoder.cc
|
| @@ -473,19 +473,24 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
|
| }
|
| output_buffers_free_count_--;
|
|
|
| - // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
|
| - // This is based on how input timestamps are calculated in
|
| - // webrtc/video/video_capture_input.cc.
|
| - const uint32_t rtp_timestamp =
|
| - static_cast<uint32_t>(timestamp.InMilliseconds()) * 90;
|
| + uint32_t capture_time_ms = 0;
|
| + //if (timestamp.is_zero()) {
|
| + // Some platform does not pass captured timestamp through. Use the current
|
| + // time instead.
|
| + capture_time_ms = rtc::TimeMicros() / 1000;
|
| + //} else {
|
| + // capture_time_ms = static_cast<uint32_t>(timestamp.InMilliseconds());
|
| + //}
|
|
|
| webrtc::EncodedImage image(
|
| reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
|
| output_buffer->mapped_size());
|
| image._encodedWidth = input_visible_size_.width();
|
| image._encodedHeight = input_visible_size_.height();
|
| - image._timeStamp = rtp_timestamp;
|
| - image.capture_time_ms_ = timestamp.InMilliseconds();
|
| + // This is based on how input timestamps are calculated in
|
| + // webrtc/video/video_capture_input.cc.
|
| + image._timeStamp = capture_time_ms * 90;
|
| + image.capture_time_ms_ = capture_time_ms;
|
| image._frameType =
|
| (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
|
| image._completeFrame = true;
|
|
|