Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(213)

Side by Side Diff: content/renderer/media/gpu/rtc_video_encoder.cc

Issue 2205623002: Use webrtc::VideoFrame timestamp in RTCVideoEncoder (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Change to using 90 kHZ. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/gpu/rtc_video_encoder.h" 5 #include "content/renderer/media/gpu/rtc_video_encoder.h"
6 6
7 #include <string.h> 7 #include <string.h>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/location.h" 10 #include "base/location.h"
(...skipping 14 matching lines...) Expand all
25 #include "media/filters/h264_parser.h" 25 #include "media/filters/h264_parser.h"
26 #include "media/renderers/gpu_video_accelerator_factories.h" 26 #include "media/renderers/gpu_video_accelerator_factories.h"
27 #include "media/video/video_encode_accelerator.h" 27 #include "media/video/video_encode_accelerator.h"
28 #include "third_party/libyuv/include/libyuv.h" 28 #include "third_party/libyuv/include/libyuv.h"
29 #include "third_party/webrtc/base/timeutils.h" 29 #include "third_party/webrtc/base/timeutils.h"
30 30
31 namespace content { 31 namespace content {
32 32
33 namespace { 33 namespace {
34 34
35 // Used for timestamp conversions.
36 static const int64_t kMsToRtpTimestamp = 90;
37
35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to 38 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
36 // media::VideoCodecProfile. 39 // media::VideoCodecProfile.
37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( 40 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
38 webrtc::VideoCodecType type, 41 webrtc::VideoCodecType type,
39 const webrtc::VideoCodec* codec_settings) { 42 const webrtc::VideoCodec* codec_settings) {
40 DCHECK_EQ(type, codec_settings->codecType); 43 DCHECK_EQ(type, codec_settings->codecType);
41 switch (type) { 44 switch (type) {
42 case webrtc::kVideoCodecVP8: 45 case webrtc::kVideoCodecVP8:
43 return media::VP8PROFILE_ANY; 46 return media::VP8PROFILE_ANY;
44 case webrtc::kVideoCodecH264: { 47 case webrtc::kVideoCodecH264: {
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after
467 } 470 }
468 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; 471 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
469 if (payload_size > output_buffer->mapped_size()) { 472 if (payload_size > output_buffer->mapped_size()) {
470 LogAndNotifyError(FROM_HERE, "invalid payload_size", 473 LogAndNotifyError(FROM_HERE, "invalid payload_size",
471 media::VideoEncodeAccelerator::kPlatformFailureError); 474 media::VideoEncodeAccelerator::kPlatformFailureError);
472 return; 475 return;
473 } 476 }
474 output_buffers_free_count_--; 477 output_buffers_free_count_--;
475 478
476 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). 479 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
477 int64_t capture_time_us, capture_time_ms; 480 int64_t capture_time_ms, rtp_timestamp;
478 uint32_t rtp_timestamp;
479
480 if (!timestamp.is_zero()) { 481 if (!timestamp.is_zero()) {
481 capture_time_us = timestamp.InMicroseconds();; 482 // Get RTP timestamp value.
482 capture_time_ms = timestamp.InMilliseconds(); 483 rtp_timestamp = timestamp.ToInternalValue();
484 capture_time_ms = rtp_timestamp / kMsToRtpTimestamp;
483 } else { 485 } else {
484 // Fallback to the current time if encoder does not provide timestamp. 486 // Fallback to the current time if encoder does not provide timestamp.
485 capture_time_us = rtc::TimeMicros(); 487 capture_time_ms =
486 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; 488 rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond;
489 rtp_timestamp = capture_time_ms * kMsToRtpTimestamp;
wuchengli 2016/08/11 06:06:03 We should multiply by 90 first to have the higher
emircan 2016/08/11 16:49:15 Done.
487 } 490 }
488 // RTP timestamp can wrap around. Get the lower 32 bits.
489 rtp_timestamp = static_cast<uint32_t>(
490 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
491 491
492 webrtc::EncodedImage image( 492 webrtc::EncodedImage image(
493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, 493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
494 output_buffer->mapped_size()); 494 output_buffer->mapped_size());
495 image._encodedWidth = input_visible_size_.width(); 495 image._encodedWidth = input_visible_size_.width();
496 image._encodedHeight = input_visible_size_.height(); 496 image._encodedHeight = input_visible_size_.height();
497 image._timeStamp = rtp_timestamp; 497 image._timeStamp = static_cast<int32_t>(rtp_timestamp);
wuchengli 2016/08/11 06:06:03 The type of _timeStamp is uint32_t. https://cs.chr
emircan 2016/08/11 16:49:15 Done.
498 image.capture_time_ms_ = capture_time_ms; 498 image.capture_time_ms_ = capture_time_ms;
499 image._frameType = 499 image._frameType =
500 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 500 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
501 image._completeFrame = true; 501 image._completeFrame = true;
502 502
503 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); 503 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_);
504 // Picture ID must wrap after reaching the maximum. 504 // Picture ID must wrap after reaching the maximum.
505 picture_id_ = (picture_id_ + 1) & 0x7FFF; 505 picture_id_ = (picture_id_ + 1) & 0x7FFF;
506 } 506 }
507 507
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 scoped_refptr<media::VideoFrame> frame; 564 scoped_refptr<media::VideoFrame> frame;
565 if (next_frame->video_frame_buffer()->native_handle()) { 565 if (next_frame->video_frame_buffer()->native_handle()) {
566 frame = static_cast<media::VideoFrame*>( 566 frame = static_cast<media::VideoFrame*>(
567 next_frame->video_frame_buffer()->native_handle()); 567 next_frame->video_frame_buffer()->native_handle());
568 requires_copy = RequiresSizeChange(frame); 568 requires_copy = RequiresSizeChange(frame);
569 } else { 569 } else {
570 requires_copy = true; 570 requires_copy = true;
571 } 571 }
572 572
573 if (requires_copy) { 573 if (requires_copy) {
574 const base::TimeDelta timestamp =
575 frame ? frame->timestamp()
576 : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms());
577 base::SharedMemory* input_buffer = input_buffers_[index]; 574 base::SharedMemory* input_buffer = input_buffers_[index];
578 frame = media::VideoFrame::WrapExternalSharedMemory( 575 frame = media::VideoFrame::WrapExternalSharedMemory(
579 media::PIXEL_FORMAT_I420, input_frame_coded_size_, 576 media::PIXEL_FORMAT_I420, input_frame_coded_size_,
580 gfx::Rect(input_visible_size_), input_visible_size_, 577 gfx::Rect(input_visible_size_), input_visible_size_,
581 reinterpret_cast<uint8_t*>(input_buffer->memory()), 578 reinterpret_cast<uint8_t*>(input_buffer->memory()),
582 input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); 579 input_buffer->mapped_size(), input_buffer->handle(), 0,
580 base::TimeDelta());
583 if (!frame.get()) { 581 if (!frame.get()) {
584 LogAndNotifyError(FROM_HERE, "failed to create frame", 582 LogAndNotifyError(FROM_HERE, "failed to create frame",
585 media::VideoEncodeAccelerator::kPlatformFailureError); 583 media::VideoEncodeAccelerator::kPlatformFailureError);
586 return; 584 return;
587 } 585 }
588 // Do a strided copy of the input frame to match the input requirements for 586 // Do a strided copy of the input frame to match the input requirements for
589 // the encoder. 587 // the encoder.
590 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 588 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
591 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(), 589 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(),
592 next_frame->video_frame_buffer()->StrideY(), 590 next_frame->video_frame_buffer()->StrideY(),
593 next_frame->video_frame_buffer()->DataU(), 591 next_frame->video_frame_buffer()->DataU(),
594 next_frame->video_frame_buffer()->StrideU(), 592 next_frame->video_frame_buffer()->StrideU(),
595 next_frame->video_frame_buffer()->DataV(), 593 next_frame->video_frame_buffer()->DataV(),
596 next_frame->video_frame_buffer()->StrideV(), 594 next_frame->video_frame_buffer()->StrideV(),
597 frame->data(media::VideoFrame::kYPlane), 595 frame->data(media::VideoFrame::kYPlane),
598 frame->stride(media::VideoFrame::kYPlane), 596 frame->stride(media::VideoFrame::kYPlane),
599 frame->data(media::VideoFrame::kUPlane), 597 frame->data(media::VideoFrame::kUPlane),
600 frame->stride(media::VideoFrame::kUPlane), 598 frame->stride(media::VideoFrame::kUPlane),
601 frame->data(media::VideoFrame::kVPlane), 599 frame->data(media::VideoFrame::kVPlane),
602 frame->stride(media::VideoFrame::kVPlane), 600 frame->stride(media::VideoFrame::kVPlane),
603 next_frame->width(), next_frame->height())) { 601 next_frame->width(), next_frame->height())) {
604 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", 602 LogAndNotifyError(FROM_HERE, "Failed to copy buffer",
605 media::VideoEncodeAccelerator::kPlatformFailureError); 603 media::VideoEncodeAccelerator::kPlatformFailureError);
606 return; 604 return;
607 } 605 }
608 } 606 }
607 // Use the timestamp set from WebRTC and set it in 90 kHz.
608 frame->set_timestamp(
609 base::TimeDelta::FromInternalValue(next_frame->timestamp()));
609 frame->AddDestructionObserver(media::BindToCurrentLoop( 610 frame->AddDestructionObserver(media::BindToCurrentLoop(
610 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); 611 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
611 video_encoder_->Encode(frame, next_frame_keyframe); 612 video_encoder_->Encode(frame, next_frame_keyframe);
612 input_buffers_free_.pop_back(); 613 input_buffers_free_.pop_back();
613 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); 614 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
614 } 615 }
615 616
616 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { 617 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
617 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; 618 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
618 DCHECK(thread_checker_.CalledOnValidThread()); 619 DCHECK(thread_checker_.CalledOnValidThread());
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
874 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", 875 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
875 init_retval == WEBRTC_VIDEO_CODEC_OK); 876 init_retval == WEBRTC_VIDEO_CODEC_OK);
876 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { 877 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
877 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", 878 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
878 profile, 879 profile,
879 media::VIDEO_CODEC_PROFILE_MAX + 1); 880 media::VIDEO_CODEC_PROFILE_MAX + 1);
880 } 881 }
881 } 882 }
882 883
883 } // namespace content 884 } // namespace content
OLDNEW
« no previous file with comments | « no previous file | media/gpu/video_encode_accelerator_unittest.cc » ('j') | media/gpu/video_encode_accelerator_unittest.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698