OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
6 | 6 |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/location.h" | 10 #include "base/location.h" |
(...skipping 14 matching lines...) Expand all Loading... |
25 #include "media/filters/h264_parser.h" | 25 #include "media/filters/h264_parser.h" |
26 #include "media/renderers/gpu_video_accelerator_factories.h" | 26 #include "media/renderers/gpu_video_accelerator_factories.h" |
27 #include "media/video/video_encode_accelerator.h" | 27 #include "media/video/video_encode_accelerator.h" |
28 #include "third_party/libyuv/include/libyuv.h" | 28 #include "third_party/libyuv/include/libyuv.h" |
29 #include "third_party/webrtc/base/timeutils.h" | 29 #include "third_party/webrtc/base/timeutils.h" |
30 | 30 |
31 namespace content { | 31 namespace content { |
32 | 32 |
33 namespace { | 33 namespace { |
34 | 34 |
35 // Used for timestamp conversions. | |
36 static const int64_t kMsToRtpTimestamp = 90; | |
37 | |
38 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to | 35 // Translate from webrtc::VideoCodecType and webrtc::VideoCodec to |
39 // media::VideoCodecProfile. | 36 // media::VideoCodecProfile. |
40 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( | 37 media::VideoCodecProfile WebRTCVideoCodecToVideoCodecProfile( |
41 webrtc::VideoCodecType type, | 38 webrtc::VideoCodecType type, |
42 const webrtc::VideoCodec* codec_settings) { | 39 const webrtc::VideoCodec* codec_settings) { |
43 DCHECK_EQ(type, codec_settings->codecType); | 40 DCHECK_EQ(type, codec_settings->codecType); |
44 switch (type) { | 41 switch (type) { |
45 case webrtc::kVideoCodecVP8: | 42 case webrtc::kVideoCodecVP8: |
46 return media::VP8PROFILE_ANY; | 43 return media::VP8PROFILE_ANY; |
47 case webrtc::kVideoCodecH264: { | 44 case webrtc::kVideoCodecH264: { |
(...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 } | 467 } |
471 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | 468 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
472 if (payload_size > output_buffer->mapped_size()) { | 469 if (payload_size > output_buffer->mapped_size()) { |
473 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 470 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
474 media::VideoEncodeAccelerator::kPlatformFailureError); | 471 media::VideoEncodeAccelerator::kPlatformFailureError); |
475 return; | 472 return; |
476 } | 473 } |
477 output_buffers_free_count_--; | 474 output_buffers_free_count_--; |
478 | 475 |
479 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 476 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
480 int64_t rtp_timestamp, capture_time_ms; | 477 int64_t capture_time_us, capture_time_ms; |
| 478 uint32_t rtp_timestamp; |
| 479 |
481 if (!timestamp.is_zero()) { | 480 if (!timestamp.is_zero()) { |
482 // Get RTP timestamp value. | 481 capture_time_us = timestamp.InMicroseconds();; |
483 rtp_timestamp = timestamp.ToInternalValue(); | 482 capture_time_ms = timestamp.InMilliseconds(); |
484 capture_time_ms = rtp_timestamp / kMsToRtpTimestamp; | |
485 } else { | 483 } else { |
486 // Fallback to the current time if encoder does not provide timestamp. | 484 // Fallback to the current time if encoder does not provide timestamp. |
487 rtp_timestamp = rtc::TimeMicros() * kMsToRtpTimestamp / | 485 capture_time_us = rtc::TimeMicros(); |
488 base::Time::kMicrosecondsPerMillisecond; | 486 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
489 capture_time_ms = | |
490 rtc::TimeMicros() / base::Time::kMicrosecondsPerMillisecond; | |
491 } | 487 } |
| 488 // RTP timestamp can wrap around. Get the lower 32 bits. |
| 489 rtp_timestamp = static_cast<uint32_t>( |
| 490 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
492 | 491 |
493 webrtc::EncodedImage image( | 492 webrtc::EncodedImage image( |
494 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
495 output_buffer->mapped_size()); | 494 output_buffer->mapped_size()); |
496 image._encodedWidth = input_visible_size_.width(); | 495 image._encodedWidth = input_visible_size_.width(); |
497 image._encodedHeight = input_visible_size_.height(); | 496 image._encodedHeight = input_visible_size_.height(); |
498 image._timeStamp = static_cast<uint32_t>(rtp_timestamp); | 497 image._timeStamp = rtp_timestamp; |
499 image.capture_time_ms_ = capture_time_ms; | 498 image.capture_time_ms_ = capture_time_ms; |
500 image._frameType = | 499 image._frameType = |
501 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 500 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
502 image._completeFrame = true; | 501 image._completeFrame = true; |
503 | 502 |
504 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); | 503 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); |
505 // Picture ID must wrap after reaching the maximum. | 504 // Picture ID must wrap after reaching the maximum. |
506 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 505 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
507 } | 506 } |
508 | 507 |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
565 scoped_refptr<media::VideoFrame> frame; | 564 scoped_refptr<media::VideoFrame> frame; |
566 if (next_frame->video_frame_buffer()->native_handle()) { | 565 if (next_frame->video_frame_buffer()->native_handle()) { |
567 frame = static_cast<media::VideoFrame*>( | 566 frame = static_cast<media::VideoFrame*>( |
568 next_frame->video_frame_buffer()->native_handle()); | 567 next_frame->video_frame_buffer()->native_handle()); |
569 requires_copy = RequiresSizeChange(frame); | 568 requires_copy = RequiresSizeChange(frame); |
570 } else { | 569 } else { |
571 requires_copy = true; | 570 requires_copy = true; |
572 } | 571 } |
573 | 572 |
574 if (requires_copy) { | 573 if (requires_copy) { |
| 574 const base::TimeDelta timestamp = |
| 575 frame ? frame->timestamp() |
| 576 : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()); |
575 base::SharedMemory* input_buffer = input_buffers_[index]; | 577 base::SharedMemory* input_buffer = input_buffers_[index]; |
576 frame = media::VideoFrame::WrapExternalSharedMemory( | 578 frame = media::VideoFrame::WrapExternalSharedMemory( |
577 media::PIXEL_FORMAT_I420, input_frame_coded_size_, | 579 media::PIXEL_FORMAT_I420, input_frame_coded_size_, |
578 gfx::Rect(input_visible_size_), input_visible_size_, | 580 gfx::Rect(input_visible_size_), input_visible_size_, |
579 reinterpret_cast<uint8_t*>(input_buffer->memory()), | 581 reinterpret_cast<uint8_t*>(input_buffer->memory()), |
580 input_buffer->mapped_size(), input_buffer->handle(), 0, | 582 input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); |
581 base::TimeDelta()); | |
582 if (!frame.get()) { | 583 if (!frame.get()) { |
583 LogAndNotifyError(FROM_HERE, "failed to create frame", | 584 LogAndNotifyError(FROM_HERE, "failed to create frame", |
584 media::VideoEncodeAccelerator::kPlatformFailureError); | 585 media::VideoEncodeAccelerator::kPlatformFailureError); |
585 return; | 586 return; |
586 } | 587 } |
587 | 588 |
588 // Do a strided copy and scale (if necessary) the input frame to match | 589 // Do a strided copy and scale (if necessary) the input frame to match |
589 // the input requirements for the encoder. | 590 // the input requirements for the encoder. |
590 // TODO(sheu): Support zero-copy from WebRTC. http://crbug.com/269312 | 591 // TODO(sheu): Support zero-copy from WebRTC. http://crbug.com/269312 |
591 // TODO(magjed): Downscale with kFilterBox in an image pyramid instead. | 592 // TODO(magjed): Downscale with kFilterBox in an image pyramid instead. |
(...skipping 11 matching lines...) Expand all Loading... |
603 frame->visible_data(media::VideoFrame::kVPlane), | 604 frame->visible_data(media::VideoFrame::kVPlane), |
604 frame->stride(media::VideoFrame::kVPlane), | 605 frame->stride(media::VideoFrame::kVPlane), |
605 frame->visible_rect().width(), | 606 frame->visible_rect().width(), |
606 frame->visible_rect().height(), | 607 frame->visible_rect().height(), |
607 libyuv::kFilterBox)) { | 608 libyuv::kFilterBox)) { |
608 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", | 609 LogAndNotifyError(FROM_HERE, "Failed to copy buffer", |
609 media::VideoEncodeAccelerator::kPlatformFailureError); | 610 media::VideoEncodeAccelerator::kPlatformFailureError); |
610 return; | 611 return; |
611 } | 612 } |
612 } | 613 } |
613 // Use the timestamp set from WebRTC and set it in 90 kHz. | |
614 frame->set_timestamp( | |
615 base::TimeDelta::FromInternalValue(next_frame->timestamp())); | |
616 frame->AddDestructionObserver(media::BindToCurrentLoop( | 614 frame->AddDestructionObserver(media::BindToCurrentLoop( |
617 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); | 615 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index))); |
618 video_encoder_->Encode(frame, next_frame_keyframe); | 616 video_encoder_->Encode(frame, next_frame_keyframe); |
619 input_buffers_free_.pop_back(); | 617 input_buffers_free_.pop_back(); |
620 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 618 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
621 } | 619 } |
622 | 620 |
623 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | 621 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { |
624 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | 622 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; |
625 DCHECK(thread_checker_.CalledOnValidThread()); | 623 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 255 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
881 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 879 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
882 init_retval == WEBRTC_VIDEO_CODEC_OK); | 880 init_retval == WEBRTC_VIDEO_CODEC_OK); |
883 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 881 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
884 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 882 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
885 profile, | 883 profile, |
886 media::VIDEO_CODEC_PROFILE_MAX + 1); | 884 media::VIDEO_CODEC_PROFILE_MAX + 1); |
887 } | 885 } |
888 } | 886 } |
889 | 887 |
890 } // namespace content | 888 } // namespace content |
OLD | NEW |