Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" | 5 #include "content/renderer/media/gpu/rtc_video_encoder.h" |
| 6 | 6 |
| 7 #include <string.h> | 7 #include <string.h> |
| 8 | 8 |
| 9 #include "base/bind.h" | 9 #include "base/bind.h" |
| 10 #include "base/location.h" | 10 #include "base/location.h" |
| (...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 227 // we don't care about ordering. | 227 // we don't care about ordering. |
| 228 std::vector<int> input_buffers_free_; | 228 std::vector<int> input_buffers_free_; |
| 229 | 229 |
| 230 // The number of output buffers ready to be filled with output from the | 230 // The number of output buffers ready to be filled with output from the |
| 231 // encoder. | 231 // encoder. |
| 232 int output_buffers_free_count_; | 232 int output_buffers_free_count_; |
| 233 | 233 |
| 234 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. | 234 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. |
| 235 uint16_t picture_id_; | 235 uint16_t picture_id_; |
| 236 | 236 |
| 237 // capture_time_ms_ field of the last returned webrtc::EncodedImage from | |
|
mcasas
2017/02/16 19:10:15
nit : |capture_time_ms_|
emircan
2017/02/16 19:41:54
Done.
| |
| 238 // BitstreamBufferReady(). | |
| 239 int64_t last_capture_time_ms_; | |
| 240 | |
| 237 // webrtc::VideoEncoder encode complete callback. | 241 // webrtc::VideoEncoder encode complete callback. |
| 238 webrtc::EncodedImageCallback* encoded_image_callback_; | 242 webrtc::EncodedImageCallback* encoded_image_callback_; |
| 239 | 243 |
| 240 // The video codec type, as reported to WebRTC. | 244 // The video codec type, as reported to WebRTC. |
| 241 const webrtc::VideoCodecType video_codec_type_; | 245 const webrtc::VideoCodecType video_codec_type_; |
| 242 | 246 |
| 243 // Protect |status_|. |status_| is read or written on |gpu_task_runner_| in | 247 // Protect |status_|. |status_| is read or written on |gpu_task_runner_| in |
| 244 // Impl. It can be read in RTCVideoEncoder on other threads. | 248 // Impl. It can be read in RTCVideoEncoder on other threads. |
| 245 mutable base::Lock status_lock_; | 249 mutable base::Lock status_lock_; |
| 246 | 250 |
| 247 // We cannot immediately return error conditions to the WebRTC user of this | 251 // We cannot immediately return error conditions to the WebRTC user of this |
| 248 // class, as there is no error callback in the webrtc::VideoEncoder interface. | 252 // class, as there is no error callback in the webrtc::VideoEncoder interface. |
| 249 // Instead, we cache an error status here and return it the next time an | 253 // Instead, we cache an error status here and return it the next time an |
| 250 // interface entry point is called. This is protected by |status_lock_|. | 254 // interface entry point is called. This is protected by |status_lock_|. |
| 251 int32_t status_; | 255 int32_t status_; |
| 252 | 256 |
| 253 DISALLOW_COPY_AND_ASSIGN(Impl); | 257 DISALLOW_COPY_AND_ASSIGN(Impl); |
| 254 }; | 258 }; |
| 255 | 259 |
| 256 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, | 260 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
| 257 webrtc::VideoCodecType video_codec_type) | 261 webrtc::VideoCodecType video_codec_type) |
| 258 : gpu_factories_(gpu_factories), | 262 : gpu_factories_(gpu_factories), |
| 259 async_waiter_(NULL), | 263 async_waiter_(NULL), |
| 260 async_retval_(NULL), | 264 async_retval_(NULL), |
| 261 input_next_frame_(NULL), | 265 input_next_frame_(NULL), |
| 262 input_next_frame_keyframe_(false), | 266 input_next_frame_keyframe_(false), |
| 263 output_buffers_free_count_(0), | 267 output_buffers_free_count_(0), |
| 268 last_capture_time_ms_(-1), | |
| 264 encoded_image_callback_(nullptr), | 269 encoded_image_callback_(nullptr), |
| 265 video_codec_type_(video_codec_type), | 270 video_codec_type_(video_codec_type), |
| 266 status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { | 271 status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
| 267 thread_checker_.DetachFromThread(); | 272 thread_checker_.DetachFromThread(); |
| 268 // Picture ID should start on a random number. | 273 // Picture ID should start on a random number. |
| 269 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); | 274 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); |
| 270 } | 275 } |
| 271 | 276 |
| 272 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | 277 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
| 273 const gfx::Size& input_visible_size, | 278 const gfx::Size& input_visible_size, |
| (...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 462 return; | 467 return; |
| 463 } | 468 } |
| 464 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | 469 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
| 465 if (payload_size > output_buffer->mapped_size()) { | 470 if (payload_size > output_buffer->mapped_size()) { |
| 466 LogAndNotifyError(FROM_HERE, "invalid payload_size", | 471 LogAndNotifyError(FROM_HERE, "invalid payload_size", |
| 467 media::VideoEncodeAccelerator::kPlatformFailureError); | 472 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 468 return; | 473 return; |
| 469 } | 474 } |
| 470 output_buffers_free_count_--; | 475 output_buffers_free_count_--; |
| 471 | 476 |
| 472 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 477 // Derive the capture time in ms from system clock. Make sure that it is |
| 473 int64_t capture_time_us, capture_time_ms; | 478 // greater than the last. |
| 474 uint32_t rtp_timestamp; | 479 const int64_t capture_time_us = rtc::TimeMicros(); |
|
mcasas
2017/02/16 19:10:15
Could we do all these calcuations in ms and use
r
emircan
2017/02/16 19:41:54
It would lose the precision in |rtp_timestamp| cal
| |
| 480 int64_t capture_time_ms = | |
| 481 capture_time_us / base::Time::kMicrosecondsPerMillisecond; | |
| 482 if (capture_time_ms <= last_capture_time_ms_) | |
| 483 capture_time_ms = last_capture_time_ms_ + 1; | |
|
mcasas
2017/02/16 19:10:15
Maybe
capture_time_ms = std::min(capture_time_m
emircan
2017/02/16 19:41:54
That wouldn't work. Suppose there is a rapid succe
| |
| 484 last_capture_time_ms_ = capture_time_ms; | |
| 475 | 485 |
| 476 if (!timestamp.is_zero()) { | 486 // Fallback to the current time if encoder does not provide timestamp. |
| 477 capture_time_us = timestamp.InMicroseconds();; | 487 const int64_t encoder_time_us = |
| 478 capture_time_ms = timestamp.InMilliseconds(); | 488 timestamp.is_zero() ? capture_time_us : timestamp.InMicroseconds(); |
| 479 } else { | 489 |
| 480 // Fallback to the current time if encoder does not provide timestamp. | 490 // Derive the RTP timestamp (in 90KHz ticks). It can wrap around, get the |
| 481 capture_time_us = rtc::TimeMicros(); | 491 // lower 32 bits. |
| 482 capture_time_ms = capture_time_us / base::Time::kMicrosecondsPerMillisecond; | 492 const uint32_t rtp_timestamp = static_cast<uint32_t>( |
| 483 } | 493 encoder_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); |
| 484 // RTP timestamp can wrap around. Get the lower 32 bits. | |
| 485 rtp_timestamp = static_cast<uint32_t>( | |
| 486 capture_time_us * 90 / base::Time::kMicrosecondsPerMillisecond); | |
| 487 | 494 |
| 488 webrtc::EncodedImage image( | 495 webrtc::EncodedImage image( |
| 489 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 496 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
| 490 output_buffer->mapped_size()); | 497 output_buffer->mapped_size()); |
| 491 image._encodedWidth = input_visible_size_.width(); | 498 image._encodedWidth = input_visible_size_.width(); |
| 492 image._encodedHeight = input_visible_size_.height(); | 499 image._encodedHeight = input_visible_size_.height(); |
| 493 image._timeStamp = rtp_timestamp; | 500 image._timeStamp = rtp_timestamp; |
| 494 image.capture_time_ms_ = capture_time_ms; | 501 image.capture_time_ms_ = capture_time_ms; |
| 495 image._frameType = | 502 image._frameType = |
| 496 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 503 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| (...skipping 383 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 880 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 887 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
| 881 init_retval == WEBRTC_VIDEO_CODEC_OK); | 888 init_retval == WEBRTC_VIDEO_CODEC_OK); |
| 882 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 889 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
| 883 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 890 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
| 884 profile, | 891 profile, |
| 885 media::VIDEO_CODEC_PROFILE_MAX + 1); | 892 media::VIDEO_CODEC_PROFILE_MAX + 1); |
| 886 } | 893 } |
| 887 } | 894 } |
| 888 | 895 |
| 889 } // namespace content | 896 } // namespace content |
| OLD | NEW |