| Index: content/renderer/media/gpu/rtc_video_encoder.cc
|
| diff --git a/content/renderer/media/gpu/rtc_video_encoder.cc b/content/renderer/media/gpu/rtc_video_encoder.cc
|
| index 1718e9704e56e7a7618433fc21967e3441bbb186..da955a90f75a0f5250da7110d88d16e69c436d76 100644
|
| --- a/content/renderer/media/gpu/rtc_video_encoder.cc
|
| +++ b/content/renderer/media/gpu/rtc_video_encoder.cc
|
| @@ -32,9 +32,6 @@
|
|
|
| namespace {
|
|
|
| -// Used for timestamp conversions.
|
| -static const int64_t kMsToRtpTimestamp = 90;
|
| -
|
| // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
|
| // media::VideoCodecProfile.
|
| media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
|
| @@ -477,25 +474,27 @@
|
| output_buffers_free_count_--;
|
|
|
| // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
|
| - int64_t rtp_timestamp, capture_time_ms;
|
| + int64_t capture_time_us, capture_time_ms;
|
| + uint32_t rtp_timestamp;
|
| +
|
| if (!timestamp.is_zero()) {
|
| - // Get RTP timestamp value.
|
| - rtp_timestamp = timestamp.ToInternalValue();
|
| - capture_time_ms = rtp_timestamp / kMsToRtpTimestamp;
|
| + capture_time_us = timestamp.InMicroseconds();;
|
| + capture_time_ms = timestamp.InMilliseconds();
|
| } else {
|
| // Fallback to the current time if encoder does not provide timestamp.
|
| - rtp_timestamp = rtc::TimeMicros() * kMsToRtpTimestamp /
|
| - base::Time::kMicrosecondsPerMillisecond;
|
| - capture_time_ms =
|
| - rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond;
|
| - }
|
| + capture_time_us = rtc::TimeMicros();
|
| + capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond;
|
| + }
|
| + // RTP timestamp can wrap around. Get the lower 32 bits.
|
| + rtp_timestamp = static_cast<uint32_t>(
|
| + capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
|
|
|
| webrtc::EncodedImage image(
|
| reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
|
| output_buffer->mapped_size());
|
| image._encodedWidth = input_visible_size_.width();
|
| image._encodedHeight = input_visible_size_.height();
|
| - image._timeStamp = static_cast<uint32_t>(rtp_timestamp);
|
| + image._timeStamp = rtp_timestamp;
|
| image.capture_time_ms_ = capture_time_ms;
|
| image._frameType =
|
| (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
|
| @@ -572,13 +571,15 @@
|
| }
|
|
|
| if (requires_copy) {
|
| + const base::TimeDelta timestamp =
|
| + frame ? frame->timestamp()
|
| + : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms());
|
| base::SharedMemory* input_buffer = input_buffers_[index];
|
| frame = media::VideoFrame::WrapExternalSharedMemory(
|
| media::PIXEL_FORMAT_I420, input_frame_coded_size_,
|
| gfx::Rect(input_visible_size_), input_visible_size_,
|
| reinterpret_cast<uint8_t*>(input_buffer->memory()),
|
| - input_buffer->mapped_size(), input_buffer->handle(), 0,
|
| - base::TimeDelta());
|
| + input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp);
|
| if (!frame.get()) {
|
| LogAndNotifyError(FROM_HERE, "failed to create frame",
|
| media::VideoEncodeAccelerator::kPlatformFailureError);
|
| @@ -610,9 +611,6 @@
|
| return;
|
| }
|
| }
|
| - // Use the timestamp set from WebRTC and set it in 90 kHz.
|
| - frame->set_timestamp(
|
| - base::TimeDelta::FromInternalValue(next_frame->timestamp()));
|
| frame->AddDestructionObserver(media::BindToCurrentLoop(
|
| base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
|
| video_encoder_->Encode(frame, next_frame_keyframe);
|
|
|