OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
6 | 6 |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/location.h" | 10 #include "base/location.h" |
(...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
466 return; | 466 return; |
467 } | 467 } |
468 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | 468 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
469 if (payload_size > output_buffer->mapped_size()) { | 469 if (payload_size > output_buffer->mapped_size()) { |
470 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 470 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
471 media::VideoEncodeAccelerator::kPlatformFailureError); | 471 media::VideoEncodeAccelerator::kPlatformFailureError); |
472 return; | 472 return; |
473 } | 473 } |
474 output_buffers_free_count_--; | 474 output_buffers_free_count_--; |
475 | 475 |
476 // CrOS Nyan provides invalid timestamp. Use the current time for now. | 476 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
477 // TODO(wuchengli): use the timestamp in BitstreamBufferReady after Nyan is | 477 int64_t capture_time_us, capture_time_ms; |
478 // fixed. http://crbug.com/620565. | 478 uint32_t rtp_timestamp; |
479 const int64_t capture_time_us = rtc::TimeMicros(); | |
480 | 479 |
481 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 480 if (!timestamp.is_zero()) { |
482 const int64_t capture_time_ms = | 481 capture_time_us = timestamp.InMicroseconds();; |
483 capture_time_us / base::Time::kMicrosecondsPerMillisecond; | 482 capture_time_ms = timestamp.InMilliseconds(); |
484 | 483 } else { |
485 const uint32_t rtp_timestamp = static_cast<uint32_t>( | 484 // Fallback to the current time if encoder does not provide timestamp. |
| 485 capture_time_us = rtc::TimeMicros(); |
| 486 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; |
| 487 } |
| 488 // RTP timestamp can wrap around. Get the lower 32 bits. |
| 489 rtp_timestamp = static_cast<uint32_t>( |
486 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | 490 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
487 | 491 |
488 webrtc::EncodedImage image( | 492 webrtc::EncodedImage image( |
489 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 493 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
490 output_buffer->mapped_size()); | 494 output_buffer->mapped_size()); |
491 image._encodedWidth = input_visible_size_.width(); | 495 image._encodedWidth = input_visible_size_.width(); |
492 image._encodedHeight = input_visible_size_.height(); | 496 image._encodedHeight = input_visible_size_.height(); |
493 image._timeStamp = rtp_timestamp; | 497 image._timeStamp = rtp_timestamp; |
494 image.capture_time_ms_ = capture_time_ms; | 498 image.capture_time_ms_ = capture_time_ms; |
495 image._frameType = | 499 image._frameType = |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
560 scoped_refptr<media::VideoFrame> frame; | 564 scoped_refptr<media::VideoFrame> frame; |
561 if (next_frame->video_frame_buffer()->native_handle()) { | 565 if (next_frame->video_frame_buffer()->native_handle()) { |
562 frame = static_cast<media::VideoFrame*>( | 566 frame = static_cast<media::VideoFrame*>( |
563 next_frame->video_frame_buffer()->native_handle()); | 567 next_frame->video_frame_buffer()->native_handle()); |
564 requires_copy = RequiresSizeChange(frame); | 568 requires_copy = RequiresSizeChange(frame); |
565 } else { | 569 } else { |
566 requires_copy = true; | 570 requires_copy = true; |
567 } | 571 } |
568 | 572 |
569 if (requires_copy) { | 573 if (requires_copy) { |
| 574 const base::TimeDelta timestamp = |
| 575 frame ? frame->timestamp() |
| 576 : base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()); |
570 base::SharedMemory* input_buffer = input_buffers_[index]; | 577 base::SharedMemory* input_buffer = input_buffers_[index]; |
571 frame = media::VideoFrame::WrapExternalSharedMemory( | 578 frame = media::VideoFrame::WrapExternalSharedMemory( |
572 media::PIXEL_FORMAT_I420, input_frame_coded_size_, | 579 media::PIXEL_FORMAT_I420, input_frame_coded_size_, |
573 gfx::Rect(input_visible_size_), input_visible_size_, | 580 gfx::Rect(input_visible_size_), input_visible_size_, |
574 reinterpret_cast<uint8_t*>(input_buffer->memory()), | 581 reinterpret_cast<uint8_t*>(input_buffer->memory()), |
575 input_buffer->mapped_size(), input_buffer->handle(), 0, | 582 input_buffer->mapped_size(), input_buffer->handle(), 0, timestamp); |
576 base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms())); | |
577 if (!frame.get()) { | 583 if (!frame.get()) { |
578 LogAndNotifyError(FROM_HERE, "failed to create frame", | 584 LogAndNotifyError(FROM_HERE, "failed to create frame", |
579 media::VideoEncodeAccelerator::kPlatformFailureError); | 585 media::VideoEncodeAccelerator::kPlatformFailureError); |
580 return; | 586 return; |
581 } | 587 } |
582 // Do a strided copy of the input frame to match the input requirements for | 588 // Do a strided copy of the input frame to match the input requirements for |
583 // the encoder. | 589 // the encoder. |
584 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 | 590 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 |
585 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(), | 591 if (libyuv::I420Copy(next_frame->video_frame_buffer()->DataY(), |
586 next_frame->video_frame_buffer()->StrideY(), | 592 next_frame->video_frame_buffer()->StrideY(), |
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
868 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 874 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
869 init_retval == WEBRTC_VIDEO_CODEC_OK); | 875 init_retval == WEBRTC_VIDEO_CODEC_OK); |
870 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 876 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
871 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 877 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
872 profile, | 878 profile, |
873 media::VIDEO_CODEC_PROFILE_MAX + 1); | 879 media::VIDEO_CODEC_PROFILE_MAX + 1); |
874 } | 880 } |
875 } | 881 } |
876 | 882 |
877 } // namespace content | 883 } // namespace content |
OLD | NEW |