Chromium Code Reviews| Index: content/renderer/media/rtc_video_encoder.cc |
| diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc |
| index cb7752ba848c66bb1f0957f2e07e7267341824c0..ddbe9c695969546b238ad57b4bfb41cd7443e291 100644 |
| --- a/content/renderer/media/rtc_video_encoder.cc |
| +++ b/content/renderer/media/rtc_video_encoder.cc |
| @@ -473,17 +473,20 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
| } |
| output_buffers_free_count_--; |
| - // CrOS Nyan provides invalid timestamp. Use the current time for now. |
| - // TODO(wuchengli): use the timestamp in BitstreamBufferReady after Nyan is |
| - // fixed. http://crbug.com/620565. |
| - const int64_t capture_time_us = rtc::TimeMicros(); |
| - |
| // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
| - const int64_t capture_time_ms = |
| - capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| + int64_t capture_time_ms; |
| + uint32_t rtp_timestamp; |
| - const uint32_t rtp_timestamp = static_cast<uint32_t>( |
| - capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
| + if (!timestamp.is_zero()) { |
| + capture_time_ms = timestamp.InMilliseconds(); |
| + rtp_timestamp = static_cast<uint32_t>(timestamp.InMilliseconds()) * 90; |
|
mcasas
2016/06/29 14:55:50
nit: s/timestamp.InMilliseconds()/capture_time_ms/
wuchengli
2016/07/01 06:19:09
Now I used the same calculation for both cases bec
|
| + } else { |
| + // Fallback to the current time if encoder does not provide timestamp. |
| + const int64_t capture_time_us = rtc::TimeMicros(); |
| + capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| + rtp_timestamp = static_cast<uint32_t>( |
|
mcasas
2016/06/29 14:55:50
nit: We'd probably want |rtp_timestamp| to wrap
ar
wuchengli
2016/07/01 06:19:09
Yes. We want RTC timestamp to wrap around. I added
|
| + capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
| + } |
| webrtc::EncodedImage image( |
| reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
| @@ -567,13 +570,15 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { |
| } |
| if (requires_copy) { |
| + base::TimeDelta timestamp = |
|
mcasas
2016/06/29 14:55:50
nit: const
wuchengli
2016/07/01 06:19:09
Done.
|
| + frame ? frame->timestamp() |
| + : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()); |
| base::SharedMemory* input_buffer = input_buffers_[index]; |
| frame = media::VideoFrame::WrapExternalSharedMemory( |
| media::PIXEL_FORMAT_I420, input_frame_coded_size_, |
| gfx::Rect(input_visible_size_), input_visible_size_, |
| reinterpret_cast<uint8_t*>(input_buffer->memory()), |
| - input_buffer->mapped_size(), input_buffer->handle(), 0, |
| - base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms())); |
| + input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); |
| if (!frame.get()) { |
| LogAndNotifyError(FROM_HERE, "failed to create frame", |
| media::VideoEncodeAccelerator::kPlatformFailureError); |