Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/rtc_video_encoder.h" | 5 #include "content/renderer/media/rtc_video_encoder.h" |
| 6 | 6 |
| 7 #include <string.h> | 7 #include <string.h> |
| 8 | 8 |
| 9 #include "base/bind.h" | 9 #include "base/bind.h" |
| 10 #include "base/location.h" | 10 #include "base/location.h" |
| (...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 96 // | 96 // |
| 97 // This class separates state related to the thread that RTCVideoEncoder | 97 // This class separates state related to the thread that RTCVideoEncoder |
| 98 // operates on (presently the libjingle worker thread) from the thread that | 98 // operates on (presently the libjingle worker thread) from the thread that |
| 99 // |gpu_factories_| provides for accelerator operations (presently the media | 99 // |gpu_factories_| provides for accelerator operations (presently the media |
| 100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while | 100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while |
| 101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. | 101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. |
| 102 class RTCVideoEncoder::Impl | 102 class RTCVideoEncoder::Impl |
| 103 : public media::VideoEncodeAccelerator::Client, | 103 : public media::VideoEncodeAccelerator::Client, |
| 104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { | 104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
| 105 public: | 105 public: |
| 106 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 106 Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
| 107 media::GpuVideoAcceleratorFactories* gpu_factories); | 107 webrtc::VideoCodecType video_codec_type); |
| 108 | 108 |
| 109 // Create the VEA and call Initialize() on it. Called once per instantiation, | 109 // Create the VEA and call Initialize() on it. Called once per instantiation, |
| 110 // and then the instance is bound forevermore to whichever thread made the | 110 // and then the instance is bound forevermore to whichever thread made the |
| 111 // call. | 111 // call. |
| 112 // RTCVideoEncoder expects to be able to call this function synchronously from | 112 // RTCVideoEncoder expects to be able to call this function synchronously from |
| 113 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 113 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
| 114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, | 114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, |
| 115 uint32_t bitrate, | 115 uint32_t bitrate, |
| 116 media::VideoCodecProfile profile, | 116 media::VideoCodecProfile profile, |
| 117 base::WaitableEvent* async_waiter, | 117 base::WaitableEvent* async_waiter, |
| 118 int32_t* async_retval); | 118 int32_t* async_retval); |
| 119 // Enqueue a frame from WebRTC for encoding. | 119 // Enqueue a frame from WebRTC for encoding. |
| 120 // RTCVideoEncoder expects to be able to call this function synchronously from | 120 // RTCVideoEncoder expects to be able to call this function synchronously from |
| 121 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 121 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
| 122 void Enqueue(const webrtc::VideoFrame* input_frame, | 122 void Enqueue(const webrtc::VideoFrame* input_frame, |
| 123 bool force_keyframe, | 123 bool force_keyframe, |
| 124 base::WaitableEvent* async_waiter, | 124 base::WaitableEvent* async_waiter, |
| 125 int32_t* async_retval); | 125 int32_t* async_retval); |
| 126 | 126 |
| 127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the | 127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the |
| 128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, | 128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, |
| 129 // the buffer is returned to Impl by its index using this function. | 129 // the buffer is returned to Impl by its index using this function. |
| 130 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); | 130 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); |
| 131 | 131 |
| 132 // Request encoding parameter change for the underlying encoder. | 132 // Request encoding parameter change for the underlying encoder. |
| 133 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); | 133 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); |
| 134 | 134 |
| 135 // Destroy this Impl's encoder. The destructor is not explicitly called, as | 135 // Destroy this Impl's encoder. The destructor is not explicitly called, as |
| 136 // Impl is a base::RefCountedThreadSafe. | 136 // Impl is a base::RefCountedThreadSafe. |
| 137 void Destroy(); | 137 void Destroy(base::WaitableEvent* waiter); |
| 138 | 138 |
| 139 // media::VideoEncodeAccelerator::Client implementation. | 139 // media::VideoEncodeAccelerator::Client implementation. |
| 140 void RequireBitstreamBuffers(unsigned int input_count, | 140 void RequireBitstreamBuffers(unsigned int input_count, |
| 141 const gfx::Size& input_coded_size, | 141 const gfx::Size& input_coded_size, |
| 142 size_t output_buffer_size) override; | 142 size_t output_buffer_size) override; |
| 143 void BitstreamBufferReady(int32_t bitstream_buffer_id, | 143 void BitstreamBufferReady(int32_t bitstream_buffer_id, |
| 144 size_t payload_size, | 144 size_t payload_size, |
| 145 bool key_frame) override; | 145 bool key_frame) override; |
| 146 void NotifyError(media::VideoEncodeAccelerator::Error error) override; | 146 void NotifyError(media::VideoEncodeAccelerator::Error error) override; |
| 147 | 147 |
| 148 void RegisterEncodeCompleteCallback(base::WaitableEvent* waiter, | |
| 149 webrtc::EncodedImageCallback* callback); | |
| 150 | |
| 148 private: | 151 private: |
| 149 friend class base::RefCountedThreadSafe<Impl>; | 152 friend class base::RefCountedThreadSafe<Impl>; |
| 150 | 153 |
| 151 enum { | 154 enum { |
| 152 kInputBufferExtraCount = 1, // The number of input buffers allocated, more | 155 kInputBufferExtraCount = 1, // The number of input buffers allocated, more |
| 153 // than what is requested by | 156 // than what is requested by |
| 154 // VEA::RequireBitstreamBuffers(). | 157 // VEA::RequireBitstreamBuffers(). |
| 155 kOutputBufferCount = 3, | 158 kOutputBufferCount = 3, |
| 156 }; | 159 }; |
| 157 | 160 |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 173 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); | 176 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); |
| 174 void SignalAsyncWaiter(int32_t retval); | 177 void SignalAsyncWaiter(int32_t retval); |
| 175 | 178 |
| 176 // Checks if the bitrate would overflow when passing from kbps to bps. | 179 // Checks if the bitrate would overflow when passing from kbps to bps. |
| 177 bool IsBitrateTooHigh(uint32_t bitrate); | 180 bool IsBitrateTooHigh(uint32_t bitrate); |
| 178 | 181 |
| 179 // Checks if the frame size is different than hardware accelerator | 182 // Checks if the frame size is different than hardware accelerator |
| 180 // requirements. | 183 // requirements. |
| 181 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; | 184 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; |
| 182 | 185 |
| 186 // Return an encoded output buffer to WebRTC. | |
| 187 void ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image, | |
| 188 int32_t bitstream_buffer_id, | |
| 189 uint16_t picture_id); | |
| 190 | |
| 183 base::ThreadChecker thread_checker_; | 191 base::ThreadChecker thread_checker_; |
| 184 | 192 |
| 185 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client | |
| 186 // notifications. | |
| 187 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; | |
| 188 | |
| 189 // The message loop on which to post callbacks to |weak_encoder_|. | |
| 190 const scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_; | |
| 191 | |
| 192 // Factory for creating VEAs, shared memory buffers, etc. | 193 // Factory for creating VEAs, shared memory buffers, etc. |
| 193 media::GpuVideoAcceleratorFactories* gpu_factories_; | 194 media::GpuVideoAcceleratorFactories* gpu_factories_; |
| 194 | 195 |
| 195 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 196 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| 196 // Do this by waiting on the |async_waiter_| and returning the return value in | 197 // Do this by waiting on the |async_waiter_| and returning the return value in |
| 197 // |async_retval_| when initialization completes, encoding completes, or | 198 // |async_retval_| when initialization completes, encoding completes, or |
| 198 // an error occurs. | 199 // an error occurs. |
| 199 base::WaitableEvent* async_waiter_; | 200 base::WaitableEvent* async_waiter_; |
| 200 int32_t* async_retval_; | 201 int32_t* async_retval_; |
| 201 | 202 |
| (...skipping 19 matching lines...) Expand all Loading... | |
| 221 // we don't care about ordering. | 222 // we don't care about ordering. |
| 222 std::vector<int> input_buffers_free_; | 223 std::vector<int> input_buffers_free_; |
| 223 | 224 |
| 224 // The number of output buffers ready to be filled with output from the | 225 // The number of output buffers ready to be filled with output from the |
| 225 // encoder. | 226 // encoder. |
| 226 int output_buffers_free_count_; | 227 int output_buffers_free_count_; |
| 227 | 228 |
| 228 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. | 229 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. |
| 229 uint16_t picture_id_; | 230 uint16_t picture_id_; |
| 230 | 231 |
| 232 // webrtc::VideoEncoder encode complete callback. | |
| 233 webrtc::EncodedImageCallback* encoded_image_callback_; | |
| 234 | |
| 235 // The video codec type, as reported to WebRTC. | |
| 236 const webrtc::VideoCodecType video_codec_type_; | |
| 237 | |
| 238 // If video encode accelerator posts an error, cache it here and return it | |
| 239 // the next time Enqueue is called. | |
| 240 int32_t status_; | |
| 241 | |
| 231 DISALLOW_COPY_AND_ASSIGN(Impl); | 242 DISALLOW_COPY_AND_ASSIGN(Impl); |
| 232 }; | 243 }; |
| 233 | 244 |
| 234 RTCVideoEncoder::Impl::Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 245 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
| 235 media::GpuVideoAcceleratorFactories* gpu_factories) | 246 webrtc::VideoCodecType video_codec_type) |
| 236 : weak_encoder_(weak_encoder), | 247 : gpu_factories_(gpu_factories), |
| 237 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
| 238 gpu_factories_(gpu_factories), | |
| 239 async_waiter_(NULL), | 248 async_waiter_(NULL), |
| 240 async_retval_(NULL), | 249 async_retval_(NULL), |
| 241 input_next_frame_(NULL), | 250 input_next_frame_(NULL), |
| 242 input_next_frame_keyframe_(false), | 251 input_next_frame_keyframe_(false), |
| 243 output_buffers_free_count_(0) { | 252 output_buffers_free_count_(0), |
| 253 encoded_image_callback_(nullptr), | |
| 254 video_codec_type_(video_codec_type), | |
| 255 status_(WEBRTC_VIDEO_CODEC_OK) { | |
| 244 thread_checker_.DetachFromThread(); | 256 thread_checker_.DetachFromThread(); |
| 245 // Picture ID should start on a random number. | 257 // Picture ID should start on a random number. |
| 246 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); | 258 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); |
| 247 } | 259 } |
| 248 | 260 |
| 249 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | 261 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
| 250 const gfx::Size& input_visible_size, | 262 const gfx::Size& input_visible_size, |
| 251 uint32_t bitrate, | 263 uint32_t bitrate, |
| 252 media::VideoCodecProfile profile, | 264 media::VideoCodecProfile profile, |
| 253 base::WaitableEvent* async_waiter, | 265 base::WaitableEvent* async_waiter, |
| 254 int32_t* async_retval) { | 266 int32_t* async_retval) { |
| 255 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; | 267 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
| 256 DCHECK(thread_checker_.CalledOnValidThread()); | 268 DCHECK(thread_checker_.CalledOnValidThread()); |
| 257 | 269 |
| 258 RegisterAsyncWaiter(async_waiter, async_retval); | 270 RegisterAsyncWaiter(async_waiter, async_retval); |
| 259 | 271 |
| 260 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 272 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
| 261 if (IsBitrateTooHigh(bitrate)) | 273 if (IsBitrateTooHigh(bitrate)) |
| 262 return; | 274 return; |
| 263 | 275 |
| 264 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); | 276 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); |
| 265 if (!video_encoder_) { | 277 if (!video_encoder_) { |
| 266 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", | 278 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", |
| 267 media::VideoEncodeAccelerator::kPlatformFailureError); | 279 media::VideoEncodeAccelerator::kPlatformFailureError); |
| 268 return; | 280 return; |
| 269 } | 281 } |
| 270 input_visible_size_ = input_visible_size; | 282 input_visible_size_ = input_visible_size; |
| 283 | |
| 271 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, | 284 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, |
| 272 profile, bitrate * 1000, this)) { | 285 profile, bitrate * 1000, this)) { |
| 273 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", | 286 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", |
| 274 media::VideoEncodeAccelerator::kInvalidArgumentError); | 287 media::VideoEncodeAccelerator::kInvalidArgumentError); |
| 275 return; | 288 return; |
| 276 } | 289 } |
| 277 } | 290 } |
|
kcwu
2016/04/01 12:33:05
I don't understand the original code.
There is n
wuchengli
2016/04/03 14:28:01
VEA will call RequireBitstreamBuffers after initia
| |
| 278 | 291 |
| 279 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, | 292 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, |
| 280 bool force_keyframe, | 293 bool force_keyframe, |
| 281 base::WaitableEvent* async_waiter, | 294 base::WaitableEvent* async_waiter, |
| 282 int32_t* async_retval) { | 295 int32_t* async_retval) { |
| 283 DVLOG(3) << "Impl::Enqueue()"; | 296 DVLOG(3) << "Impl::Enqueue()"; |
| 284 DCHECK(thread_checker_.CalledOnValidThread()); | 297 DCHECK(thread_checker_.CalledOnValidThread()); |
| 285 DCHECK(!input_next_frame_); | 298 DCHECK(!input_next_frame_); |
| 286 | 299 |
| 287 RegisterAsyncWaiter(async_waiter, async_retval); | 300 RegisterAsyncWaiter(async_waiter, async_retval); |
| 301 if (status_ != WEBRTC_VIDEO_CODEC_OK) | |
| 302 SignalAsyncWaiter(status_); | |
|
kcwu
2016/04/01 12:33:05
return here?
wuchengli
2016/04/03 14:28:01
Done.
| |
| 303 | |
| 288 // If there are no free input and output buffers, drop the frame to avoid a | 304 // If there are no free input and output buffers, drop the frame to avoid a |
| 289 // deadlock. If there is a free input buffer, EncodeOneFrame will run and | 305 // deadlock. If there is a free input buffer, EncodeOneFrame will run and |
| 290 // unblock Encode(). If there are no free input buffers but there is a free | 306 // unblock Encode(). If there are no free input buffers but there is a free |
| 291 // output buffer, EncodeFrameFinished will be called later to unblock | 307 // output buffer, EncodeFrameFinished will be called later to unblock |
| 292 // Encode(). | 308 // Encode(). |
| 293 // | 309 // |
| 294 // The caller of Encode() holds a webrtc lock. The deadlock happens when: | 310 // The caller of Encode() holds a webrtc lock. The deadlock happens when: |
| 295 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). | 311 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). |
| 296 // (2) There are no free input buffers and they cannot be freed because | 312 // (2) There are no free input buffers and they cannot be freed because |
| 297 // the encoder has no output buffers. | 313 // the encoder has no output buffers. |
| (...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 337 DCHECK(thread_checker_.CalledOnValidThread()); | 353 DCHECK(thread_checker_.CalledOnValidThread()); |
| 338 | 354 |
| 339 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 355 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
| 340 if (IsBitrateTooHigh(bitrate)) | 356 if (IsBitrateTooHigh(bitrate)) |
| 341 return; | 357 return; |
| 342 | 358 |
| 343 if (video_encoder_) | 359 if (video_encoder_) |
| 344 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); | 360 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); |
| 345 } | 361 } |
| 346 | 362 |
| 347 void RTCVideoEncoder::Impl::Destroy() { | 363 void RTCVideoEncoder::Impl::Destroy(base::WaitableEvent* waiter) { |
| 348 DVLOG(3) << "Impl::Destroy()"; | 364 DVLOG(3) << "Impl::Destroy()"; |
| 349 DCHECK(thread_checker_.CalledOnValidThread()); | 365 DCHECK(thread_checker_.CalledOnValidThread()); |
| 350 video_encoder_.reset(); | 366 video_encoder_.reset(); |
| 367 waiter->Signal(); | |
| 351 } | 368 } |
| 352 | 369 |
| 353 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( | 370 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
| 354 unsigned int input_count, | 371 unsigned int input_count, |
| 355 const gfx::Size& input_coded_size, | 372 const gfx::Size& input_coded_size, |
| 356 size_t output_buffer_size) { | 373 size_t output_buffer_size) { |
| 357 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count | 374 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
| 358 << ", input_coded_size=" << input_coded_size.ToString() | 375 << ", input_coded_size=" << input_coded_size.ToString() |
| 359 << ", output_buffer_size=" << output_buffer_size; | 376 << ", output_buffer_size=" << output_buffer_size; |
| 360 DCHECK(thread_checker_.CalledOnValidThread()); | 377 DCHECK(thread_checker_.CalledOnValidThread()); |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 422 | 439 |
| 423 // Use webrtc timestamps to ensure correct RTP sender behavior. | 440 // Use webrtc timestamps to ensure correct RTP sender behavior. |
| 424 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. | 441 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. |
| 425 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); | 442 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); |
| 426 | 443 |
| 427 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 444 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
| 428 const int64_t capture_time_ms = capture_time_us / 1000; | 445 const int64_t capture_time_ms = capture_time_us / 1000; |
| 429 const uint32_t rtp_timestamp = | 446 const uint32_t rtp_timestamp = |
| 430 static_cast<uint32_t>(capture_time_us * 90 / 1000); | 447 static_cast<uint32_t>(capture_time_us * 90 / 1000); |
| 431 | 448 |
| 432 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( | 449 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( |
|
pbos
2016/03/31 09:47:21
This can be stack allocated now, right?
wuchengli
2016/04/01 10:19:51
Yes. I'll update the code in the next patchset.
wuchengli
2016/04/03 14:28:01
Done.
| |
| 433 reinterpret_cast<uint8_t*>(output_buffer->memory()), | 450 reinterpret_cast<uint8_t*>(output_buffer->memory()), |
| 434 payload_size, | 451 payload_size, |
| 435 output_buffer->mapped_size())); | 452 output_buffer->mapped_size())); |
| 436 image->_encodedWidth = input_visible_size_.width(); | 453 image->_encodedWidth = input_visible_size_.width(); |
| 437 image->_encodedHeight = input_visible_size_.height(); | 454 image->_encodedHeight = input_visible_size_.height(); |
| 438 image->_timeStamp = rtp_timestamp; | 455 image->_timeStamp = rtp_timestamp; |
| 439 image->capture_time_ms_ = capture_time_ms; | 456 image->capture_time_ms_ = capture_time_ms; |
| 440 image->_frameType = | 457 image->_frameType = |
| 441 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 458 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
| 442 image->_completeFrame = true; | 459 image->_completeFrame = true; |
| 443 | 460 |
| 444 encoder_task_runner_->PostTask( | 461 ReturnEncodedImage(std::move(image), bitstream_buffer_id, picture_id_); |
| 445 FROM_HERE, | |
| 446 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, weak_encoder_, | |
| 447 base::Passed(&image), bitstream_buffer_id, picture_id_)); | |
| 448 // Picture ID must wrap after reaching the maximum. | 462 // Picture ID must wrap after reaching the maximum. |
| 449 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 463 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
| 450 } | 464 } |
| 451 | 465 |
| 452 void RTCVideoEncoder::Impl::NotifyError( | 466 void RTCVideoEncoder::Impl::NotifyError( |
| 453 media::VideoEncodeAccelerator::Error error) { | 467 media::VideoEncodeAccelerator::Error error) { |
| 454 DCHECK(thread_checker_.CalledOnValidThread()); | 468 DCHECK(thread_checker_.CalledOnValidThread()); |
| 455 int32_t retval; | 469 int32_t retval; |
| 456 switch (error) { | 470 switch (error) { |
| 457 case media::VideoEncodeAccelerator::kInvalidArgumentError: | 471 case media::VideoEncodeAccelerator::kInvalidArgumentError: |
| 458 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 472 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
| 459 break; | 473 break; |
| 460 default: | 474 default: |
| 461 retval = WEBRTC_VIDEO_CODEC_ERROR; | 475 retval = WEBRTC_VIDEO_CODEC_ERROR; |
| 462 } | 476 } |
| 463 | 477 |
| 464 video_encoder_.reset(); | 478 video_encoder_.reset(); |
| 465 | 479 |
| 466 if (async_waiter_) { | 480 if (async_waiter_) { |
| 467 SignalAsyncWaiter(retval); | 481 SignalAsyncWaiter(retval); |
| 468 } else { | 482 } else { |
| 469 encoder_task_runner_->PostTask( | 483 status_ = retval; |
|
kcwu
2016/04/01 12:33:05
In the original code, if any error occurs, RTCVide
wuchengli
2016/04/03 14:28:01
Moved impl_status_ to Impl. Now all methods will c
| |
| 470 FROM_HERE, | |
| 471 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval)); | |
| 472 } | 484 } |
| 473 } | 485 } |
| 474 | 486 |
| 475 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } | 487 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } |
| 476 | 488 |
| 477 void RTCVideoEncoder::Impl::LogAndNotifyError( | 489 void RTCVideoEncoder::Impl::LogAndNotifyError( |
| 478 const tracked_objects::Location& location, | 490 const tracked_objects::Location& location, |
| 479 const std::string& str, | 491 const std::string& str, |
| 480 media::VideoEncodeAccelerator::Error error) { | 492 media::VideoEncodeAccelerator::Error error) { |
| 481 static const char* kErrorNames[] = { | 493 static const char* kErrorNames[] = { |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 592 media::VideoEncodeAccelerator::kInvalidArgumentError); | 604 media::VideoEncodeAccelerator::kInvalidArgumentError); |
| 593 return true; | 605 return true; |
| 594 } | 606 } |
| 595 | 607 |
| 596 bool RTCVideoEncoder::Impl::RequiresSizeChange( | 608 bool RTCVideoEncoder::Impl::RequiresSizeChange( |
| 597 const scoped_refptr<media::VideoFrame>& frame) const { | 609 const scoped_refptr<media::VideoFrame>& frame) const { |
| 598 return (frame->coded_size() != input_frame_coded_size_ || | 610 return (frame->coded_size() != input_frame_coded_size_ || |
| 599 frame->visible_rect() != gfx::Rect(input_visible_size_)); | 611 frame->visible_rect() != gfx::Rect(input_visible_size_)); |
| 600 } | 612 } |
| 601 | 613 |
| 614 void RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback( | |
| 615 base::WaitableEvent* waiter, | |
| 616 webrtc::EncodedImageCallback* callback) { | |
| 617 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 618 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | |
| 619 encoded_image_callback_ = callback; | |
| 620 waiter->Signal(); | |
| 621 } | |
| 622 | |
| 623 void RTCVideoEncoder::Impl::ReturnEncodedImage( | |
| 624 scoped_ptr<webrtc::EncodedImage> image, | |
|
pbos
2016/03/31 09:47:21
And this could be a const webrtc::EncodedImage&, n
wuchengli
2016/04/01 10:19:51
Yes. I'll update the code in the next patchset.
wuchengli
2016/04/03 14:28:01
Done.
| |
| 625 int32_t bitstream_buffer_id, | |
| 626 uint16_t picture_id) { | |
| 627 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 628 DVLOG(3) << "ReturnEncodedImage(): " | |
| 629 << "bitstream_buffer_id=" << bitstream_buffer_id | |
| 630 << ", picture_id=" << picture_id; | |
| 631 | |
| 632 if (!encoded_image_callback_) | |
| 633 return; | |
| 634 | |
| 635 webrtc::RTPFragmentationHeader header; | |
| 636 memset(&header, 0, sizeof(header)); | |
| 637 switch (video_codec_type_) { | |
| 638 case webrtc::kVideoCodecVP8: | |
| 639 // Generate a header describing a single fragment. | |
| 640 header.VerifyAndAllocateFragmentationHeader(1); | |
| 641 header.fragmentationOffset[0] = 0; | |
| 642 header.fragmentationLength[0] = image->_length; | |
| 643 header.fragmentationPlType[0] = 0; | |
| 644 header.fragmentationTimeDiff[0] = 0; | |
| 645 break; | |
| 646 case webrtc::kVideoCodecH264: | |
| 647 if (!GetRTPFragmentationHeaderH264(&header, image->_buffer, | |
| 648 image->_length)) { | |
| 649 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; | |
| 650 NotifyError( | |
| 651 (media::VideoEncodeAccelerator::Error)WEBRTC_VIDEO_CODEC_ERROR); | |
| 652 return; | |
| 653 } | |
| 654 break; | |
| 655 default: | |
| 656 NOTREACHED() << "Invalid video codec type"; | |
| 657 return; | |
| 658 } | |
| 659 | |
| 660 webrtc::CodecSpecificInfo info; | |
| 661 memset(&info, 0, sizeof(info)); | |
| 662 info.codecType = video_codec_type_; | |
| 663 if (video_codec_type_ == webrtc::kVideoCodecVP8) { | |
| 664 info.codecSpecific.VP8.pictureId = picture_id; | |
| 665 info.codecSpecific.VP8.tl0PicIdx = -1; | |
| 666 info.codecSpecific.VP8.keyIdx = -1; | |
| 667 } | |
| 668 | |
| 669 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
| 670 if (retval < 0) { | |
| 671 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
| 672 << retval; | |
| 673 } | |
| 674 | |
| 675 UseOutputBitstreamBufferId(bitstream_buffer_id); | |
| 676 } | |
| 677 | |
| 602 RTCVideoEncoder::RTCVideoEncoder( | 678 RTCVideoEncoder::RTCVideoEncoder( |
| 603 webrtc::VideoCodecType type, | 679 webrtc::VideoCodecType type, |
| 604 media::GpuVideoAcceleratorFactories* gpu_factories) | 680 media::GpuVideoAcceleratorFactories* gpu_factories) |
| 605 : video_codec_type_(type), | 681 : video_codec_type_(type), |
| 606 gpu_factories_(gpu_factories), | 682 gpu_factories_(gpu_factories), |
| 607 gpu_task_runner_(gpu_factories->GetTaskRunner()), | 683 gpu_task_runner_(gpu_factories->GetTaskRunner()), |
| 608 encoded_image_callback_(NULL), | 684 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
|
kcwu
2016/04/01 12:33:05
impl_status_ becomes useless
wuchengli
2016/04/03 14:28:01
Removed. Added Impl.status_.
| |
| 609 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED), | |
| 610 weak_factory_(this) { | |
| 611 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; | 685 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; |
| 612 } | 686 } |
| 613 | 687 |
| 614 RTCVideoEncoder::~RTCVideoEncoder() { | 688 RTCVideoEncoder::~RTCVideoEncoder() { |
| 615 DVLOG(3) << "~RTCVideoEncoder"; | 689 DVLOG(3) << "~RTCVideoEncoder"; |
| 616 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 617 Release(); | 690 Release(); |
| 618 DCHECK(!impl_.get()); | 691 DCHECK(!impl_.get()); |
| 619 } | 692 } |
| 620 | 693 |
| 621 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, | 694 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
| 622 int32_t number_of_cores, | 695 int32_t number_of_cores, |
| 623 size_t max_payload_size) { | 696 size_t max_payload_size) { |
| 624 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType | 697 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
| 625 << ", width=" << codec_settings->width | 698 << ", width=" << codec_settings->width |
| 626 << ", height=" << codec_settings->height | 699 << ", height=" << codec_settings->height |
| 627 << ", startBitrate=" << codec_settings->startBitrate; | 700 << ", startBitrate=" << codec_settings->startBitrate; |
| 628 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 629 DCHECK(!impl_.get()); | 701 DCHECK(!impl_.get()); |
| 630 | 702 |
| 631 const media::VideoCodecProfile profile = | 703 const media::VideoCodecProfile profile = |
| 632 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_, codec_settings); | 704 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_, codec_settings); |
| 633 | 705 |
| 634 weak_factory_.InvalidateWeakPtrs(); | 706 impl_ = new Impl(gpu_factories_, video_codec_type_); |
| 635 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_); | |
| 636 base::WaitableEvent initialization_waiter(true, false); | 707 base::WaitableEvent initialization_waiter(true, false); |
| 637 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 708 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 638 gpu_task_runner_->PostTask( | 709 gpu_task_runner_->PostTask( |
| 639 FROM_HERE, | 710 FROM_HERE, |
| 640 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, | 711 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
| 641 impl_, | 712 impl_, |
| 642 gfx::Size(codec_settings->width, codec_settings->height), | 713 gfx::Size(codec_settings->width, codec_settings->height), |
| 643 codec_settings->startBitrate, | 714 codec_settings->startBitrate, |
| 644 profile, | 715 profile, |
| 645 &initialization_waiter, | 716 &initialization_waiter, |
| (...skipping 30 matching lines...) Expand all Loading... | |
| 676 | 747 |
| 677 // webrtc::VideoEncoder expects this call to be synchronous. | 748 // webrtc::VideoEncoder expects this call to be synchronous. |
| 678 encode_waiter.Wait(); | 749 encode_waiter.Wait(); |
| 679 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; | 750 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
| 680 return encode_retval; | 751 return encode_retval; |
| 681 } | 752 } |
| 682 | 753 |
| 683 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( | 754 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
| 684 webrtc::EncodedImageCallback* callback) { | 755 webrtc::EncodedImageCallback* callback) { |
| 685 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | 756 DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
| 686 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 687 if (!impl_.get()) { | 757 if (!impl_.get()) { |
| 688 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; | 758 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; |
| 689 return impl_status_; | 759 return impl_status_; |
| 690 } | 760 } |
| 691 | 761 |
| 692 encoded_image_callback_ = callback; | 762 base::WaitableEvent encode_waiter(true, false); |
| 763 gpu_task_runner_->PostTask( | |
| 764 FROM_HERE, | |
| 765 base::Bind(&RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback, impl_, | |
| 766 &encode_waiter, callback)); | |
| 767 encode_waiter.Wait(); | |
| 693 return WEBRTC_VIDEO_CODEC_OK; | 768 return WEBRTC_VIDEO_CODEC_OK; |
| 694 } | 769 } |
| 695 | 770 |
| 696 int32_t RTCVideoEncoder::Release() { | 771 int32_t RTCVideoEncoder::Release() { |
| 697 DVLOG(3) << "Release()"; | 772 DVLOG(3) << "Release()"; |
| 698 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 699 | 773 |
| 700 if (impl_.get()) { | 774 if (impl_.get()) { |
| 701 gpu_task_runner_->PostTask(FROM_HERE, | 775 base::WaitableEvent encode_waiter(true, false); |
| 702 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | 776 gpu_task_runner_->PostTask( |
| 777 FROM_HERE, | |
| 778 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_, &encode_waiter)); | |
|
kcwu
2016/04/01 12:33:05
I'm wondering maybe we need to get async_retval he
wuchengli
2016/04/03 14:28:01
I don't think we should return error in Release. R
| |
| 779 encode_waiter.Wait(); | |
| 703 impl_ = NULL; | 780 impl_ = NULL; |
| 704 weak_factory_.InvalidateWeakPtrs(); | |
| 705 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 781 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| 706 } | 782 } |
| 707 return WEBRTC_VIDEO_CODEC_OK; | 783 return WEBRTC_VIDEO_CODEC_OK; |
| 708 } | 784 } |
| 709 | 785 |
| 710 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, | 786 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, |
| 711 int64_t rtt) { | 787 int64_t rtt) { |
| 712 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss | 788 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
| 713 << ", rtt=" << rtt; | 789 << ", rtt=" << rtt; |
| 714 // Ignored. | 790 // Ignored. |
| (...skipping 10 matching lines...) Expand all Loading... | |
| 725 | 801 |
| 726 gpu_task_runner_->PostTask( | 802 gpu_task_runner_->PostTask( |
| 727 FROM_HERE, | 803 FROM_HERE, |
| 728 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, | 804 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, |
| 729 impl_, | 805 impl_, |
| 730 new_bit_rate, | 806 new_bit_rate, |
| 731 frame_rate)); | 807 frame_rate)); |
| 732 return WEBRTC_VIDEO_CODEC_OK; | 808 return WEBRTC_VIDEO_CODEC_OK; |
| 733 } | 809 } |
| 734 | 810 |
| 735 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image, | |
| 736 int32_t bitstream_buffer_id, | |
| 737 uint16_t picture_id) { | |
| 738 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 739 DVLOG(3) << "ReturnEncodedImage(): " | |
| 740 << "bitstream_buffer_id=" << bitstream_buffer_id | |
| 741 << ", picture_id=" << picture_id; | |
| 742 | |
| 743 if (!encoded_image_callback_) | |
| 744 return; | |
| 745 | |
| 746 webrtc::RTPFragmentationHeader header; | |
| 747 memset(&header, 0, sizeof(header)); | |
| 748 switch (video_codec_type_) { | |
| 749 case webrtc::kVideoCodecVP8: | |
| 750 // Generate a header describing a single fragment. | |
| 751 header.VerifyAndAllocateFragmentationHeader(1); | |
| 752 header.fragmentationOffset[0] = 0; | |
| 753 header.fragmentationLength[0] = image->_length; | |
| 754 header.fragmentationPlType[0] = 0; | |
| 755 header.fragmentationTimeDiff[0] = 0; | |
| 756 break; | |
| 757 case webrtc::kVideoCodecH264: | |
| 758 if (!GetRTPFragmentationHeaderH264( | |
| 759 &header, image->_buffer, image->_length)) { | |
| 760 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; | |
| 761 NotifyError(WEBRTC_VIDEO_CODEC_ERROR); | |
| 762 return; | |
| 763 } | |
| 764 break; | |
| 765 default: | |
| 766 NOTREACHED() << "Invalid video codec type"; | |
| 767 return; | |
| 768 } | |
| 769 | |
| 770 webrtc::CodecSpecificInfo info; | |
| 771 memset(&info, 0, sizeof(info)); | |
| 772 info.codecType = video_codec_type_; | |
| 773 if (video_codec_type_ == webrtc::kVideoCodecVP8) { | |
| 774 info.codecSpecific.VP8.pictureId = picture_id; | |
| 775 info.codecSpecific.VP8.tl0PicIdx = -1; | |
| 776 info.codecSpecific.VP8.keyIdx = -1; | |
| 777 } | |
| 778 | |
| 779 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
| 780 if (retval < 0) { | |
| 781 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
| 782 << retval; | |
| 783 } | |
| 784 | |
| 785 // The call through webrtc::EncodedImageCallback is synchronous, so we can | |
| 786 // immediately recycle the output buffer back to the Impl. | |
| 787 gpu_task_runner_->PostTask( | |
| 788 FROM_HERE, | |
| 789 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, | |
| 790 impl_, | |
| 791 bitstream_buffer_id)); | |
| 792 } | |
| 793 | |
| 794 void RTCVideoEncoder::NotifyError(int32_t error) { | |
| 795 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 796 DVLOG(1) << "NotifyError(): error=" << error; | |
| 797 | |
| 798 impl_status_ = error; | |
| 799 gpu_task_runner_->PostTask(FROM_HERE, | |
| 800 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
| 801 impl_ = NULL; | |
| 802 } | |
| 803 | |
| 804 void RTCVideoEncoder::RecordInitEncodeUMA( | 811 void RTCVideoEncoder::RecordInitEncodeUMA( |
| 805 int32_t init_retval, media::VideoCodecProfile profile) { | 812 int32_t init_retval, media::VideoCodecProfile profile) { |
| 806 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 813 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
| 807 init_retval == WEBRTC_VIDEO_CODEC_OK); | 814 init_retval == WEBRTC_VIDEO_CODEC_OK); |
| 808 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 815 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
| 809 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 816 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
| 810 profile, | 817 profile, |
| 811 media::VIDEO_CODEC_PROFILE_MAX + 1); | 818 media::VIDEO_CODEC_PROFILE_MAX + 1); |
| 812 } | 819 } |
| 813 } | 820 } |
| 814 | 821 |
| 815 } // namespace content | 822 } // namespace content |
| OLD | NEW |