Index: content/renderer/media/rtc_video_encoder.cc |
diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc |
index cb7752ba848c66bb1f0957f2e07e7267341824c0..3e93017540e44b269ea049a584352b8e6d74346d 100644 |
--- a/content/renderer/media/rtc_video_encoder.cc |
+++ b/content/renderer/media/rtc_video_encoder.cc |
@@ -473,17 +473,21 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
} |
output_buffers_free_count_--; |
- // CrOS Nyan provides invalid timestamp. Use the current time for now. |
- // TODO(wuchengli): use the timestamp in BitstreamBufferReady after Nyan is |
- // fixed. http://crbug.com/620565. |
- const int64_t capture_time_us = rtc::TimeMicros(); |
- |
// Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
- const int64_t capture_time_ms = |
- capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
+ int64_t capture_time_ms; |
+ uint32_t rtp_timestamp; |
- const uint32_t rtp_timestamp = static_cast<uint32_t>( |
+ if (!timestamp.is_zero()) { |
+ capture_time_ms = timestamp.InMilliseconds(); |
+ rtp_timestamp = static_cast<uint32_t>(timestamp.InMilliseconds()) * 90; |
+ } else { |
+ // Fallback to the current time if encoder does not provide timestamp. |
+ const int64_t capture_time_us = rtc::TimeMicros(); |
+ capture_time_ms = |
+ capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
+ rtp_timestamp = static_cast<uint32_t>( |
capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
+ } |
webrtc::EncodedImage image( |
reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
@@ -567,13 +571,18 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { |
} |
if (requires_copy) { |
+ base::TimeDelta timestamp; |
shenghao1
2016/06/29 08:39:17
Consider to change to:
base::TimeDelta timestamp =
wuchengli
2016/06/29 08:48:42
Done. I also ran git cl format.
|
+ if (frame) |
+ timestamp = frame->timestamp(); |
+ else |
+ timestamp = base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()); |
base::SharedMemory* input_buffer = input_buffers_[index]; |
frame = media::VideoFrame::WrapExternalSharedMemory( |
media::PIXEL_FORMAT_I420, input_frame_coded_size_, |
gfx::Rect(input_visible_size_), input_visible_size_, |
reinterpret_cast<uint8_t*>(input_buffer->memory()), |
input_buffer->mapped_size(), input_buffer->handle(), 0, |
- base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms())); |
+ timestamp); |
if (!frame.get()) { |
LogAndNotifyError(FROM_HERE, "failed to create frame", |
media::VideoEncodeAccelerator::kPlatformFailureError); |