Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(91)

Side by Side Diff: content/renderer/media/gpu/rtc_video_encoder.cc

Issue 2205623002: Use webrtc::VideoFrame timestamp in RTCVideoEncoder (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: wuchengli@ comments. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/gpu/rtc_video_encoder.h" 5 #include "content/renderer/media/gpu/rtc_video_encoder.h"
6 6
7 #include <string.h> 7 #include <string.h>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/location.h" 10 #include "base/location.h"
(...skipping 14 matching lines...) Expand all
25 #include "media/filters/h264_parser.h" 25 #include "media/filters/h264_parser.h"
26 #include "media/renderers/gpu_video_accelerator_factories.h" 26 #include "media/renderers/gpu_video_accelerator_factories.h"
27 #include "media/video/video_encode_accelerator.h" 27 #include "media/video/video_encode_accelerator.h"
28 #include "third_party/libyuv/include/libyuv.h" 28 #include "third_party/libyuv/include/libyuv.h"
29 #include "third_party/webrtc/base/timeutils.h" 29 #include "third_party/webrtc/base/timeutils.h"
30 30
31 namespace content { 31 namespace content {
32 32
33 namespace { 33 namespace {
34 34
35 // Used for timestamp conversions.
36 static const int64_t kMsToRtpTimestamp = 90;
37
35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to 38 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
36 // media::VideoCodecProfile. 39 // media::VideoCodecProfile.
37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( 40 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
38 webrtc::VideoCodecType type, 41 webrtc::VideoCodecType type,
39 const webrtc::VideoCodec* codec_settings) { 42 const webrtc::VideoCodec* codec_settings) {
40 DCHECK_EQ(type, codec_settings->codecType); 43 DCHECK_EQ(type, codec_settings->codecType);
41 switch (type) { 44 switch (type) {
42 case webrtc::kVideoCodecVP8: 45 case webrtc::kVideoCodecVP8:
43 return media::VP8PROFILE_ANY; 46 return media::VP8PROFILE_ANY;
44 case webrtc::kVideoCodecH264: { 47 case webrtc::kVideoCodecH264: {
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after
467 } 470 }
468 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; 471 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
469 if (payload_size > output_buffer->mapped_size()) { 472 if (payload_size > output_buffer->mapped_size()) {
470 LogAndNotifyError(FROM_HERE, "invalid payload_size", 473 LogAndNotifyError(FROM_HERE, "invalid payload_size",
471 media::VideoEncodeAccelerator::kPlatformFailureError); 474 media::VideoEncodeAccelerator::kPlatformFailureError);
472 return; 475 return;
473 } 476 }
474 output_buffers_free_count_--; 477 output_buffers_free_count_--;
475 478
476 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). 479 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
477 int64_t capture_time_us, capture_time_ms; 480 int64_t rtp_timestamp, capture_time_ms;
478 uint32_t rtp_timestamp;
479
480 if (!timestamp.is_zero()) { 481 if (!timestamp.is_zero()) {
481 capture_time_us = timestamp.InMicroseconds();; 482 // Get RTP timestamp value.
482 capture_time_ms = timestamp.InMilliseconds(); 483 rtp_timestamp = timestamp.ToInternalValue();
484 capture_time_ms = rtp_timestamp / kMsToRtpTimestamp;
483 } else { 485 } else {
484 // Fallback to the current time if encoder does not provide timestamp. 486 // Fallback to the current time if encoder does not provide timestamp.
485 capture_time_us = rtc::TimeMicros(); 487 rtp_timestamp = rtc::TimeMicros() * kMsToRtpTimestamp /
486 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; 488 base::Time::kMicrosecondsPerMillisecond;
489 capture_time_ms =
490 rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond;
487 } 491 }
488 // RTP timestamp can wrap around. Get the lower 32 bits.
489 rtp_timestamp = static_cast<uint32_t>(
490 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond);
491 492
492 webrtc::EncodedImage image( 493 webrtc::EncodedImage image(
493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, 494 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
494 output_buffer->mapped_size()); 495 output_buffer->mapped_size());
495 image._encodedWidth = input_visible_size_.width(); 496 image._encodedWidth = input_visible_size_.width();
496 image._encodedHeight = input_visible_size_.height(); 497 image._encodedHeight = input_visible_size_.height();
497 image._timeStamp = rtp_timestamp; 498 image._timeStamp = static_cast<uint32_t>(rtp_timestamp);
498 image.capture_time_ms_ = capture_time_ms; 499 image.capture_time_ms_ = capture_time_ms;
499 image._frameType = 500 image._frameType =
500 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 501 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
501 image._completeFrame = true; 502 image._completeFrame = true;
502 503
503 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); 504 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_);
504 // Picture ID must wrap after reaching the maximum. 505 // Picture ID must wrap after reaching the maximum.
505 picture_id_ = (picture_id_ + 1) & 0x7FFF; 506 picture_id_ = (picture_id_ + 1) & 0x7FFF;
506 } 507 }
507 508
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 scoped_refptr<media::VideoFrame> frame; 565 scoped_refptr<media::VideoFrame> frame;
565 if (next_frame->video_frame_buffer()->native_handle()) { 566 if (next_frame->video_frame_buffer()->native_handle()) {
566 frame = static_cast<media::VideoFrame*>( 567 frame = static_cast<media::VideoFrame*>(
567 next_frame->video_frame_buffer()->native_handle()); 568 next_frame->video_frame_buffer()->native_handle());
568 requires_copy = RequiresSizeChange(frame); 569 requires_copy = RequiresSizeChange(frame);
569 } else { 570 } else {
570 requires_copy = true; 571 requires_copy = true;
571 } 572 }
572 573
573 if (requires_copy) { 574 if (requires_copy) {
574 const base::TimeDelta timestamp =
575 frame ? frame->timestamp()
576 : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms());
577 base::SharedMemory* input_buffer = input_buffers_[index]; 575 base::SharedMemory* input_buffer = input_buffers_[index];
578 frame = media::VideoFrame::WrapExternalSharedMemory( 576 frame = media::VideoFrame::WrapExternalSharedMemory(
579 media::PIXEL_FORMAT_I420, input_frame_coded_size_, 577 media::PIXEL_FORMAT_I420, input_frame_coded_size_,
580 gfx::Rect(input_visible_size_), input_visible_size_, 578 gfx::Rect(input_visible_size_), input_visible_size_,
581 reinterpret_cast<uint8_t*>(input_buffer->memory()), 579 reinterpret_cast<uint8_t*>(input_buffer->memory()),
582 input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); 580 input_buffer->mapped_size(), input_buffer->handle(), 0,
581 base::TimeDelta());
583 if (!frame.get()) { 582 if (!frame.get()) {
584 LogAndNotifyError(FROM_HERE, "failed to create frame", 583 LogAndNotifyError(FROM_HERE, "failed to create frame",
585 media::VideoEncodeAccelerator::kPlatformFailureError); 584 media::VideoEncodeAccelerator::kPlatformFailureError);
586 return; 585 return;
587 } 586 }
588 587
589 // Do a strided copy and scale (if necessary) the input frame to match 588 // Do a strided copy and scale (if necessary) the input frame to match
590 // the input requirements for the encoder. 589 // the input requirements for the encoder.
591 // TODO(sheu): Support zero-copy from WebRTC. http://crbug.com/269312 590 // TODO(sheu): Support zero-copy from WebRTC. http://crbug.com/269312
592 // TODO(magjed): Downscale with kFilterBox in an image pyramid instead. 591 // TODO(magjed): Downscale with kFilterBox in an image pyramid instead.
(...skipping 11 matching lines...) Expand all
604 frame->visible_data(media::VideoFrame::kVPlane), 603 frame->visible_data(media::VideoFrame::kVPlane),
605 frame->stride(media::VideoFrame::kVPlane), 604 frame->stride(media::VideoFrame::kVPlane),
606 frame->visible_rect().width(), 605 frame->visible_rect().width(),
607 frame->visible_rect().height(), 606 frame->visible_rect().height(),
608 libyuv::kFilterBox)) { 607 libyuv::kFilterBox)) {
609 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", 608 LogAndNotifyError(FROM_HERE, "Failed to copy buffer",
610 media::VideoEncodeAccelerator::kPlatformFailureError); 609 media::VideoEncodeAccelerator::kPlatformFailureError);
611 return; 610 return;
612 } 611 }
613 } 612 }
613 // Use the timestamp set from WebRTC and set it in 90 kHz.
614 frame->set_timestamp(
615 base::TimeDelta::FromInternalValue(next_frame->timestamp()));
614 frame->AddDestructionObserver(media::BindToCurrentLoop( 616 frame->AddDestructionObserver(media::BindToCurrentLoop(
615 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); 617 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
616 video_encoder_->Encode(frame, next_frame_keyframe); 618 video_encoder_->Encode(frame, next_frame_keyframe);
617 input_buffers_free_.pop_back(); 619 input_buffers_free_.pop_back();
618 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); 620 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
619 } 621 }
620 622
621 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { 623 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
622 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; 624 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
623 DCHECK(thread_checker_.CalledOnValidThread()); 625 DCHECK(thread_checker_.CalledOnValidThread());
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
879 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", 881 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
880 init_retval == WEBRTC_VIDEO_CODEC_OK); 882 init_retval == WEBRTC_VIDEO_CODEC_OK);
881 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { 883 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
882 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", 884 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
883 profile, 885 profile,
884 media::VIDEO_CODEC_PROFILE_MAX + 1); 886 media::VIDEO_CODEC_PROFILE_MAX + 1);
885 } 887 }
886 } 888 }
887 889
888 } // namespace content 890 } // namespace content
OLDNEW
« no previous file with comments | « no previous file | media/gpu/video_encode_accelerator_unittest.cc » ('j') | media/gpu/video_encode_accelerator_unittest.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698