Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(332)

Side by Side Diff: content/renderer/media/gpu/rtc_video_encoder.cc

Issue 2205623002: Use webrtc::VideoFrame timestamp in RTCVideoEncoder (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/renderer/media/gpu/rtc_video_encoder.h" 5 #include "content/renderer/media/gpu/rtc_video_encoder.h"
6 6
7 #include <string.h> 7 #include <string.h>
8 8
9 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/location.h" 10 #include "base/location.h"
(...skipping 14 matching lines...) Expand all
25 #include "media/filters/h264_parser.h" 25 #include "media/filters/h264_parser.h"
26 #include "media/renderers/gpu_video_accelerator_factories.h" 26 #include "media/renderers/gpu_video_accelerator_factories.h"
27 #include "media/video/video_encode_accelerator.h" 27 #include "media/video/video_encode_accelerator.h"
28 #include "third_party/libyuv/include/libyuv.h" 28 #include "third_party/libyuv/include/libyuv.h"
29 #include "third_party/webrtc/base/timeutils.h" 29 #include "third_party/webrtc/base/timeutils.h"
30 30
31 namespace content { 31 namespace content {
32 32
33 namespace { 33 namespace {
34 34
35 // Used for timestamp conversions.
36 static const int kMsToRtpTimestamp = 90;
37
35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to 38 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to
36 // media::VideoCodecProfile. 39 // media::VideoCodecProfile.
37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( 40 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile(
38 webrtc::VideoCodecType type, 41 webrtc::VideoCodecType type,
39 const webrtc::VideoCodec* codec_settings) { 42 const webrtc::VideoCodec* codec_settings) {
40 DCHECK_EQ(type, codec_settings->codecType); 43 DCHECK_EQ(type, codec_settings->codecType);
41 switch (type) { 44 switch (type) {
42 case webrtc::kVideoCodecVP8: 45 case webrtc::kVideoCodecVP8:
43 return media::VP8PROFILE_ANY; 46 return media::VP8PROFILE_ANY;
44 case webrtc::kVideoCodecH264: { 47 case webrtc::kVideoCodecH264: {
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
479 482
480 if (!timestamp.is_zero()) { 483 if (!timestamp.is_zero()) {
481 capture_time_us = timestamp.InMicroseconds();; 484 capture_time_us = timestamp.InMicroseconds();;
482 capture_time_ms = timestamp.InMilliseconds(); 485 capture_time_ms = timestamp.InMilliseconds();
483 } else { 486 } else {
484 // Fallback to the current time if encoder does not provide timestamp. 487 // Fallback to the current time if encoder does not provide timestamp.
485 capture_time_us = rtc::TimeMicros(); 488 capture_time_us = rtc::TimeMicros();
486 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; 489 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond;
487 } 490 }
488 // RTP timestamp can wrap around. Get the lower 32 bits. 491 // RTP timestamp can wrap around. Get the lower 32 bits.
489 rtp_timestamp = static_cast<uint32_t>( 492 rtp_timestamp =
490 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); 493 static_cast<uint32_t>(capture_time_us * kMsToRtpTimestamp /
494 base::Time::kMicrosecondsPerMillisecond);
491 495
492 webrtc::EncodedImage image( 496 webrtc::EncodedImage image(
493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, 497 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
494 output_buffer->mapped_size()); 498 output_buffer->mapped_size());
495 image._encodedWidth = input_visible_size_.width(); 499 image._encodedWidth = input_visible_size_.width();
496 image._encodedHeight = input_visible_size_.height(); 500 image._encodedHeight = input_visible_size_.height();
497 image._timeStamp = rtp_timestamp; 501 image._timeStamp = rtp_timestamp;
498 image.capture_time_ms_ = capture_time_ms; 502 image.capture_time_ms_ = capture_time_ms;
499 image._frameType = 503 image._frameType =
500 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); 504 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
564 scoped_refptr<media::VideoFrame> frame; 568 scoped_refptr<media::VideoFrame> frame;
565 if (next_frame->video_frame_buffer()->native_handle()) { 569 if (next_frame->video_frame_buffer()->native_handle()) {
566 frame = static_cast<media::VideoFrame*>( 570 frame = static_cast<media::VideoFrame*>(
567 next_frame->video_frame_buffer()->native_handle()); 571 next_frame->video_frame_buffer()->native_handle());
568 requires_copy = RequiresSizeChange(frame); 572 requires_copy = RequiresSizeChange(frame);
569 } else { 573 } else {
570 requires_copy = true; 574 requires_copy = true;
571 } 575 }
572 576
573 if (requires_copy) { 577 if (requires_copy) {
574 const base::TimeDelta timestamp =
575 frame ? frame->timestamp()
576 : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms());
577 base::SharedMemory* input_buffer = input_buffers_[index]; 578 base::SharedMemory* input_buffer = input_buffers_[index];
578 frame = media::VideoFrame::WrapExternalSharedMemory( 579 frame = media::VideoFrame::WrapExternalSharedMemory(
579 media::PIXEL_FORMAT_I420, input_frame_coded_size_, 580 media::PIXEL_FORMAT_I420, input_frame_coded_size_,
580 gfx::Rect(input_visible_size_), input_visible_size_, 581 gfx::Rect(input_visible_size_), input_visible_size_,
581 reinterpret_cast<uint8_t*>(input_buffer->memory()), 582 reinterpret_cast<uint8_t*>(input_buffer->memory()),
582 input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); 583 input_buffer->mapped_size(), input_buffer->handle(), 0,
584 base::TimeDelta());
583 if (!frame.get()) { 585 if (!frame.get()) {
584 LogAndNotifyError(FROM_HERE, "failed to create frame", 586 LogAndNotifyError(FROM_HERE, "failed to create frame",
585 media::VideoEncodeAccelerator::kPlatformFailureError); 587 media::VideoEncodeAccelerator::kPlatformFailureError);
586 return; 588 return;
587 } 589 }
588 // Do a strided copy of the input frame to match the input requirements for 590 // Do a strided copy of the input frame to match the input requirements for
589 // the encoder. 591 // the encoder.
590 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 592 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
591 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(), 593 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(),
592 next_frame->video_frame_buffer()->StrideY(), 594 next_frame->video_frame_buffer()->StrideY(),
593 next_frame->video_frame_buffer()->DataU(), 595 next_frame->video_frame_buffer()->DataU(),
594 next_frame->video_frame_buffer()->StrideU(), 596 next_frame->video_frame_buffer()->StrideU(),
595 next_frame->video_frame_buffer()->DataV(), 597 next_frame->video_frame_buffer()->DataV(),
596 next_frame->video_frame_buffer()->StrideV(), 598 next_frame->video_frame_buffer()->StrideV(),
597 frame->data(media::VideoFrame::kYPlane), 599 frame->data(media::VideoFrame::kYPlane),
598 frame->stride(media::VideoFrame::kYPlane), 600 frame->stride(media::VideoFrame::kYPlane),
599 frame->data(media::VideoFrame::kUPlane), 601 frame->data(media::VideoFrame::kUPlane),
600 frame->stride(media::VideoFrame::kUPlane), 602 frame->stride(media::VideoFrame::kUPlane),
601 frame->data(media::VideoFrame::kVPlane), 603 frame->data(media::VideoFrame::kVPlane),
602 frame->stride(media::VideoFrame::kVPlane), 604 frame->stride(media::VideoFrame::kVPlane),
603 next_frame->width(), next_frame->height())) { 605 next_frame->width(), next_frame->height())) {
604 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", 606 LogAndNotifyError(FROM_HERE, "Failed to copy buffer",
605 media::VideoEncodeAccelerator::kPlatformFailureError); 607 media::VideoEncodeAccelerator::kPlatformFailureError);
606 return; 608 return;
607 } 609 }
608 } 610 }
611 // Use the timestamp set from WebRTC and convert it from 90 kHz.
612 frame->set_timestamp(
613 base::TimeDelta::FromMicroseconds(
614 next_frame->timestamp() * base::Time::kMicrosecondsPerMillisecond /
pbos 2016/08/02 00:15:52 I think you can use base::TimeDelta::FromMilliseco
emircan 2016/08/02 22:05:40 next_frame->ntp_time_ms() is 0 according to the lo
615 kMsToRtpTimestamp));
609 frame->AddDestructionObserver(media::BindToCurrentLoop( 616 frame->AddDestructionObserver(media::BindToCurrentLoop(
610 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); 617 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)));
611 video_encoder_->Encode(frame, next_frame_keyframe); 618 video_encoder_->Encode(frame, next_frame_keyframe);
612 input_buffers_free_.pop_back(); 619 input_buffers_free_.pop_back();
613 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); 620 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
614 } 621 }
615 622
616 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { 623 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
617 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; 624 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
618 DCHECK(thread_checker_.CalledOnValidThread()); 625 DCHECK(thread_checker_.CalledOnValidThread());
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after
874 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", 881 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
875 init_retval == WEBRTC_VIDEO_CODEC_OK); 882 init_retval == WEBRTC_VIDEO_CODEC_OK);
876 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { 883 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
877 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", 884 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
878 profile, 885 profile,
879 media::VIDEO_CODEC_PROFILE_MAX + 1); 886 media::VIDEO_CODEC_PROFILE_MAX + 1);
880 } 887 }
881 } 888 }
882 889
883 } // namespace content 890 } // namespace content
OLDNEW
« no previous file with comments | « no previous file | media/gpu/video_encode_accelerator_unittest.cc » ('j') | media/gpu/video_encode_accelerator_unittest.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698