Index: content/renderer/media/gpu/rtc_video_encoder.cc |
diff --git a/content/renderer/media/gpu/rtc_video_encoder.cc b/content/renderer/media/gpu/rtc_video_encoder.cc |
index 7ec22b2f3d0a9996ed3ea0f422d9432e8c303567..ccc0b2a13225545fab9e488bc7a9482daa41346f 100644 |
--- a/content/renderer/media/gpu/rtc_video_encoder.cc |
+++ b/content/renderer/media/gpu/rtc_video_encoder.cc |
@@ -32,6 +32,9 @@ namespace content { |
namespace { |
+// Used for timestamp conversions. |
+static const int64_t kMsToRtpTimestamp = 90; |
+ |
// Translate from webrtc::VideoCodecType and webrtc::VideoCodec to |
// media::VideoCodecProfile. |
media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( |
@@ -474,27 +477,24 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
output_buffers_free_count_--; |
// Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
- int64_t capture_time_us, capture_time_ms; |
- uint32_t rtp_timestamp; |
- |
+ int64_t capture_time_ms, rtp_timestamp; |
if (!timestamp.is_zero()) { |
- capture_time_us = timestamp.InMicroseconds();; |
- capture_time_ms = timestamp.InMilliseconds(); |
+ // Get RTP timestamp value. |
+ rtp_timestamp = timestamp.ToInternalValue(); |
+ capture_time_ms = rtp_timestamp / kMsToRtpTimestamp; |
} else { |
// Fallback to the current time if encoder does not provide timestamp. |
- capture_time_us = rtc::TimeMicros(); |
- capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
+ capture_time_ms = |
+ rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond; |
+ rtp_timestamp = capture_time_ms * kMsToRtpTimestamp; |
wuchengli
2016/08/11 06:06:03
We should multiply by 90 first to have the higher
emircan
2016/08/11 16:49:15
Done.
|
} |
- // RTP timestamp can wrap around. Get the lower 32 bits. |
- rtp_timestamp = static_cast<uint32_t>( |
- capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
webrtc::EncodedImage image( |
reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
output_buffer->mapped_size()); |
image._encodedWidth = input_visible_size_.width(); |
image._encodedHeight = input_visible_size_.height(); |
- image._timeStamp = rtp_timestamp; |
+ image._timeStamp = static_cast<int32_t>(rtp_timestamp); |
wuchengli
2016/08/11 06:06:03
The type of _timeStamp is uint32_t.
https://cs.chr
emircan
2016/08/11 16:49:15
Done.
|
image.capture_time_ms_ = capture_time_ms; |
image._frameType = |
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
@@ -571,15 +571,13 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { |
} |
if (requires_copy) { |
- const base::TimeDelta timestamp = |
- frame ? frame->timestamp() |
- : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()); |
base::SharedMemory* input_buffer = input_buffers_[index]; |
frame = media::VideoFrame::WrapExternalSharedMemory( |
media::PIXEL_FORMAT_I420, input_frame_coded_size_, |
gfx::Rect(input_visible_size_), input_visible_size_, |
reinterpret_cast<uint8_t*>(input_buffer->memory()), |
- input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); |
+ input_buffer->mapped_size(), input_buffer->handle(), 0, |
+ base::TimeDelta()); |
if (!frame.get()) { |
LogAndNotifyError(FROM_HERE, "failed to create frame", |
media::VideoEncodeAccelerator::kPlatformFailureError); |
@@ -606,6 +604,9 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() { |
return; |
} |
} |
+ // Use the timestamp set from WebRTC and set it in 90 kHz. |
+ frame->set_timestamp( |
+ base::TimeDelta::FromInternalValue(next_frame->timestamp())); |
frame->AddDestructionObserver(media::BindToCurrentLoop( |
base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); |
video_encoder_->Encode(frame, next_frame_keyframe); |