OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/rtc_video_encoder.h" | 5 #include "content/renderer/media/rtc_video_encoder.h" |
6 | 6 |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/location.h" | 10 #include "base/location.h" |
11 #include "base/logging.h" | 11 #include "base/logging.h" |
12 #include "base/macros.h" | 12 #include "base/macros.h" |
13 #include "base/memory/scoped_vector.h" | 13 #include "base/memory/scoped_vector.h" |
14 #include "base/metrics/histogram.h" | 14 #include "base/metrics/histogram.h" |
15 #include "base/numerics/safe_conversions.h" | 15 #include "base/numerics/safe_conversions.h" |
16 #include "base/rand_util.h" | 16 #include "base/rand_util.h" |
17 #include "base/single_thread_task_runner.h" | 17 #include "base/single_thread_task_runner.h" |
| 18 #include "base/synchronization/lock.h" |
18 #include "base/synchronization/waitable_event.h" | 19 #include "base/synchronization/waitable_event.h" |
19 #include "base/thread_task_runner_handle.h" | 20 #include "base/thread_task_runner_handle.h" |
20 #include "media/base/bind_to_current_loop.h" | 21 #include "media/base/bind_to_current_loop.h" |
21 #include "media/base/bitstream_buffer.h" | 22 #include "media/base/bitstream_buffer.h" |
22 #include "media/base/video_frame.h" | 23 #include "media/base/video_frame.h" |
23 #include "media/base/video_util.h" | 24 #include "media/base/video_util.h" |
24 #include "media/filters/h264_parser.h" | 25 #include "media/filters/h264_parser.h" |
25 #include "media/renderers/gpu_video_accelerator_factories.h" | 26 #include "media/renderers/gpu_video_accelerator_factories.h" |
26 #include "media/video/video_encode_accelerator.h" | 27 #include "media/video/video_encode_accelerator.h" |
27 #include "third_party/libyuv/include/libyuv.h" | 28 #include "third_party/libyuv/include/libyuv.h" |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
84 header->fragmentationTimeDiff[i] = 0; | 85 header->fragmentationTimeDiff[i] = 0; |
85 } | 86 } |
86 return true; | 87 return true; |
87 } | 88 } |
88 | 89 |
89 } // namespace | 90 } // namespace |
90 | 91 |
91 // This private class of RTCVideoEncoder does the actual work of communicating | 92 // This private class of RTCVideoEncoder does the actual work of communicating |
92 // with a media::VideoEncodeAccelerator for handling video encoding. It can | 93 // with a media::VideoEncodeAccelerator for handling video encoding. It can |
93 // be created on any thread, but should subsequently be posted to (and Destroy() | 94 // be created on any thread, but should subsequently be posted to (and Destroy() |
94 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the | 95 // called on) a single thread. |
95 // thread on which the instance was constructed. | |
96 // | 96 // |
97 // This class separates state related to the thread that RTCVideoEncoder | 97 // This class separates state related to the thread that RTCVideoEncoder |
98 // operates on (presently the libjingle worker thread) from the thread that | 98 // operates on from the thread that |gpu_factories_| provides for accelerator |
99 // |gpu_factories_| provides for accelerator operations (presently the media | 99 // operations (presently the media thread). |
100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while | |
101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. | |
102 class RTCVideoEncoder::Impl | 100 class RTCVideoEncoder::Impl |
103 : public media::VideoEncodeAccelerator::Client, | 101 : public media::VideoEncodeAccelerator::Client, |
104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { | 102 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
105 public: | 103 public: |
106 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 104 Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
107 media::GpuVideoAcceleratorFactories* gpu_factories); | 105 webrtc::VideoCodecType video_codec_type); |
108 | 106 |
109 // Create the VEA and call Initialize() on it. Called once per instantiation, | 107 // Create the VEA and call Initialize() on it. Called once per instantiation, |
110 // and then the instance is bound forevermore to whichever thread made the | 108 // and then the instance is bound forevermore to whichever thread made the |
111 // call. | 109 // call. |
112 // RTCVideoEncoder expects to be able to call this function synchronously from | 110 // RTCVideoEncoder expects to be able to call this function synchronously from |
113 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 111 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, | 112 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, |
115 uint32_t bitrate, | 113 uint32_t bitrate, |
116 media::VideoCodecProfile profile, | 114 media::VideoCodecProfile profile, |
117 base::WaitableEvent* async_waiter, | 115 base::WaitableEvent* async_waiter, |
118 int32_t* async_retval); | 116 int32_t* async_retval); |
119 // Enqueue a frame from WebRTC for encoding. | 117 // Enqueue a frame from WebRTC for encoding. |
120 // RTCVideoEncoder expects to be able to call this function synchronously from | 118 // RTCVideoEncoder expects to be able to call this function synchronously from |
121 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 119 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
122 void Enqueue(const webrtc::VideoFrame* input_frame, | 120 void Enqueue(const webrtc::VideoFrame* input_frame, |
123 bool force_keyframe, | 121 bool force_keyframe, |
124 base::WaitableEvent* async_waiter, | 122 base::WaitableEvent* async_waiter, |
125 int32_t* async_retval); | 123 int32_t* async_retval); |
126 | 124 |
127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the | 125 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the |
128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, | 126 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, |
129 // the buffer is returned to Impl by its index using this function. | 127 // the buffer is returned to Impl by its index using this function. |
130 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); | 128 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); |
131 | 129 |
132 // Request encoding parameter change for the underlying encoder. | 130 // Request encoding parameter change for the underlying encoder. |
133 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); | 131 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); |
134 | 132 |
| 133 void RegisterEncodeCompleteCallback(base::WaitableEvent* async_waiter, |
| 134 int32_t* async_retval, |
| 135 webrtc::EncodedImageCallback* callback); |
| 136 |
135 // Destroy this Impl's encoder. The destructor is not explicitly called, as | 137 // Destroy this Impl's encoder. The destructor is not explicitly called, as |
136 // Impl is a base::RefCountedThreadSafe. | 138 // Impl is a base::RefCountedThreadSafe. |
137 void Destroy(); | 139 void Destroy(base::WaitableEvent* async_waiter); |
| 140 |
| 141 // Return the status of Impl. One of WEBRTC_VIDEO_CODEC_XXX value. |
| 142 int32_t GetStatus() const; |
| 143 |
| 144 webrtc::VideoCodecType video_codec_type() { return video_codec_type_; } |
138 | 145 |
139 // media::VideoEncodeAccelerator::Client implementation. | 146 // media::VideoEncodeAccelerator::Client implementation. |
140 void RequireBitstreamBuffers(unsigned int input_count, | 147 void RequireBitstreamBuffers(unsigned int input_count, |
141 const gfx::Size& input_coded_size, | 148 const gfx::Size& input_coded_size, |
142 size_t output_buffer_size) override; | 149 size_t output_buffer_size) override; |
143 void BitstreamBufferReady(int32_t bitstream_buffer_id, | 150 void BitstreamBufferReady(int32_t bitstream_buffer_id, |
144 size_t payload_size, | 151 size_t payload_size, |
145 bool key_frame) override; | 152 bool key_frame) override; |
146 void NotifyError(media::VideoEncodeAccelerator::Error error) override; | 153 void NotifyError(media::VideoEncodeAccelerator::Error error) override; |
147 | 154 |
(...skipping 25 matching lines...) Expand all Loading... |
173 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); | 180 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); |
174 void SignalAsyncWaiter(int32_t retval); | 181 void SignalAsyncWaiter(int32_t retval); |
175 | 182 |
176 // Checks if the bitrate would overflow when passing from kbps to bps. | 183 // Checks if the bitrate would overflow when passing from kbps to bps. |
177 bool IsBitrateTooHigh(uint32_t bitrate); | 184 bool IsBitrateTooHigh(uint32_t bitrate); |
178 | 185 |
179 // Checks if the frame size is different than hardware accelerator | 186 // Checks if the frame size is different than hardware accelerator |
180 // requirements. | 187 // requirements. |
181 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; | 188 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; |
182 | 189 |
| 190 // Return an encoded output buffer to WebRTC. |
| 191 void ReturnEncodedImage(const webrtc::EncodedImage& image, |
| 192 int32_t bitstream_buffer_id, |
| 193 uint16_t picture_id); |
| 194 |
| 195 void SetStatus(int32_t status); |
| 196 |
| 197 // This is attached to |gpu_task_runner_|, not the thread class is constructed |
| 198 // on. |
183 base::ThreadChecker thread_checker_; | 199 base::ThreadChecker thread_checker_; |
184 | 200 |
185 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client | |
186 // notifications. | |
187 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; | |
188 | |
189 // The message loop on which to post callbacks to |weak_encoder_|. | |
190 const scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_; | |
191 | |
192 // Factory for creating VEAs, shared memory buffers, etc. | 201 // Factory for creating VEAs, shared memory buffers, etc. |
193 media::GpuVideoAcceleratorFactories* gpu_factories_; | 202 media::GpuVideoAcceleratorFactories* gpu_factories_; |
194 | 203 |
195 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 204 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
196 // Do this by waiting on the |async_waiter_| and returning the return value in | 205 // Do this by waiting on the |async_waiter_| and returning the return value in |
197 // |async_retval_| when initialization completes, encoding completes, or | 206 // |async_retval_| when initialization completes, encoding completes, or |
198 // an error occurs. | 207 // an error occurs. |
199 base::WaitableEvent* async_waiter_; | 208 base::WaitableEvent* async_waiter_; |
200 int32_t* async_retval_; | 209 int32_t* async_retval_; |
201 | 210 |
(...skipping 19 matching lines...) Expand all Loading... |
221 // we don't care about ordering. | 230 // we don't care about ordering. |
222 std::vector<int> input_buffers_free_; | 231 std::vector<int> input_buffers_free_; |
223 | 232 |
224 // The number of output buffers ready to be filled with output from the | 233 // The number of output buffers ready to be filled with output from the |
225 // encoder. | 234 // encoder. |
226 int output_buffers_free_count_; | 235 int output_buffers_free_count_; |
227 | 236 |
228 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. | 237 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. |
229 uint16_t picture_id_; | 238 uint16_t picture_id_; |
230 | 239 |
| 240 // webrtc::VideoEncoder encode complete callback. |
| 241 webrtc::EncodedImageCallback* encoded_image_callback_; |
| 242 |
| 243 // The video codec type, as reported to WebRTC. |
| 244 const webrtc::VideoCodecType video_codec_type_; |
| 245 |
| 246 // Protect |status_|. |status_| is read or written on |gpu_task_runner_| in |
| 247 // Impl. It can be read in RTCVideoEncoder on other threads. |
| 248 mutable base::Lock status_lock_; |
| 249 |
| 250 // We cannot immediately return error conditions to the WebRTC user of this |
| 251 // class, as there is no error callback in the webrtc::VideoEncoder interface. |
| 252 // Instead, we cache an error status here and return it the next time an |
| 253 // interface entry point is called. This is protected by |status_lock_|. |
| 254 int32_t status_; |
| 255 |
231 DISALLOW_COPY_AND_ASSIGN(Impl); | 256 DISALLOW_COPY_AND_ASSIGN(Impl); |
232 }; | 257 }; |
233 | 258 |
234 RTCVideoEncoder::Impl::Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 259 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
235 media::GpuVideoAcceleratorFactories* gpu_factories) | 260 webrtc::VideoCodecType video_codec_type) |
236 : weak_encoder_(weak_encoder), | 261 : gpu_factories_(gpu_factories), |
237 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
238 gpu_factories_(gpu_factories), | |
239 async_waiter_(NULL), | 262 async_waiter_(NULL), |
240 async_retval_(NULL), | 263 async_retval_(NULL), |
241 input_next_frame_(NULL), | 264 input_next_frame_(NULL), |
242 input_next_frame_keyframe_(false), | 265 input_next_frame_keyframe_(false), |
243 output_buffers_free_count_(0) { | 266 output_buffers_free_count_(0), |
| 267 encoded_image_callback_(nullptr), |
| 268 video_codec_type_(video_codec_type), |
| 269 status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { |
244 thread_checker_.DetachFromThread(); | 270 thread_checker_.DetachFromThread(); |
245 // Picture ID should start on a random number. | 271 // Picture ID should start on a random number. |
246 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); | 272 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); |
247 } | 273 } |
248 | 274 |
249 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | 275 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
250 const gfx::Size& input_visible_size, | 276 const gfx::Size& input_visible_size, |
251 uint32_t bitrate, | 277 uint32_t bitrate, |
252 media::VideoCodecProfile profile, | 278 media::VideoCodecProfile profile, |
253 base::WaitableEvent* async_waiter, | 279 base::WaitableEvent* async_waiter, |
254 int32_t* async_retval) { | 280 int32_t* async_retval) { |
255 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; | 281 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
256 DCHECK(thread_checker_.CalledOnValidThread()); | 282 DCHECK(thread_checker_.CalledOnValidThread()); |
257 | 283 |
| 284 SetStatus(WEBRTC_VIDEO_CODEC_UNINITIALIZED); |
258 RegisterAsyncWaiter(async_waiter, async_retval); | 285 RegisterAsyncWaiter(async_waiter, async_retval); |
259 | 286 |
260 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 287 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
261 if (IsBitrateTooHigh(bitrate)) | 288 if (IsBitrateTooHigh(bitrate)) |
262 return; | 289 return; |
263 | 290 |
264 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); | 291 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); |
265 if (!video_encoder_) { | 292 if (!video_encoder_) { |
266 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", | 293 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", |
267 media::VideoEncodeAccelerator::kPlatformFailureError); | 294 media::VideoEncodeAccelerator::kPlatformFailureError); |
268 return; | 295 return; |
269 } | 296 } |
270 input_visible_size_ = input_visible_size; | 297 input_visible_size_ = input_visible_size; |
271 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, | 298 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, |
272 profile, bitrate * 1000, this)) { | 299 profile, bitrate * 1000, this)) { |
273 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", | 300 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", |
274 media::VideoEncodeAccelerator::kInvalidArgumentError); | 301 media::VideoEncodeAccelerator::kInvalidArgumentError); |
275 return; | 302 return; |
276 } | 303 } |
| 304 // RequireBitstreamBuffers or NotifyError will be called and the waiter will |
| 305 // be signaled. |
277 } | 306 } |
278 | 307 |
279 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, | 308 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, |
280 bool force_keyframe, | 309 bool force_keyframe, |
281 base::WaitableEvent* async_waiter, | 310 base::WaitableEvent* async_waiter, |
282 int32_t* async_retval) { | 311 int32_t* async_retval) { |
283 DVLOG(3) << "Impl::Enqueue()"; | 312 DVLOG(3) << "Impl::Enqueue()"; |
284 DCHECK(thread_checker_.CalledOnValidThread()); | 313 DCHECK(thread_checker_.CalledOnValidThread()); |
285 DCHECK(!input_next_frame_); | 314 DCHECK(!input_next_frame_); |
286 | 315 |
287 RegisterAsyncWaiter(async_waiter, async_retval); | 316 RegisterAsyncWaiter(async_waiter, async_retval); |
| 317 int32_t retval = GetStatus(); |
| 318 if (retval != WEBRTC_VIDEO_CODEC_OK) { |
| 319 SignalAsyncWaiter(retval); |
| 320 return; |
| 321 } |
| 322 |
288 // If there are no free input and output buffers, drop the frame to avoid a | 323 // If there are no free input and output buffers, drop the frame to avoid a |
289 // deadlock. If there is a free input buffer, EncodeOneFrame will run and | 324 // deadlock. If there is a free input buffer, EncodeOneFrame will run and |
290 // unblock Encode(). If there are no free input buffers but there is a free | 325 // unblock Encode(). If there are no free input buffers but there is a free |
291 // output buffer, EncodeFrameFinished will be called later to unblock | 326 // output buffer, EncodeFrameFinished will be called later to unblock |
292 // Encode(). | 327 // Encode(). |
293 // | 328 // |
294 // The caller of Encode() holds a webrtc lock. The deadlock happens when: | 329 // The caller of Encode() holds a webrtc lock. The deadlock happens when: |
295 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). | 330 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). |
296 // (2) There are no free input buffers and they cannot be freed because | 331 // (2) There are no free input buffers and they cannot be freed because |
297 // the encoder has no output buffers. | 332 // the encoder has no output buffers. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
337 DCHECK(thread_checker_.CalledOnValidThread()); | 372 DCHECK(thread_checker_.CalledOnValidThread()); |
338 | 373 |
339 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 374 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
340 if (IsBitrateTooHigh(bitrate)) | 375 if (IsBitrateTooHigh(bitrate)) |
341 return; | 376 return; |
342 | 377 |
343 if (video_encoder_) | 378 if (video_encoder_) |
344 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); | 379 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); |
345 } | 380 } |
346 | 381 |
347 void RTCVideoEncoder::Impl::Destroy() { | 382 void RTCVideoEncoder::Impl::Destroy(base::WaitableEvent* async_waiter) { |
348 DVLOG(3) << "Impl::Destroy()"; | 383 DVLOG(3) << "Impl::Destroy()"; |
349 DCHECK(thread_checker_.CalledOnValidThread()); | 384 DCHECK(thread_checker_.CalledOnValidThread()); |
350 video_encoder_.reset(); | 385 if (video_encoder_) { |
| 386 video_encoder_.reset(); |
| 387 SetStatus(WEBRTC_VIDEO_CODEC_UNINITIALIZED); |
| 388 } |
| 389 async_waiter->Signal(); |
| 390 } |
| 391 |
| 392 int32_t RTCVideoEncoder::Impl::GetStatus() const { |
| 393 base::AutoLock lock(status_lock_); |
| 394 return status_; |
| 395 } |
| 396 |
| 397 void RTCVideoEncoder::Impl::SetStatus(int32_t status) { |
| 398 base::AutoLock lock(status_lock_); |
| 399 status_ = status; |
351 } | 400 } |
352 | 401 |
353 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( | 402 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
354 unsigned int input_count, | 403 unsigned int input_count, |
355 const gfx::Size& input_coded_size, | 404 const gfx::Size& input_coded_size, |
356 size_t output_buffer_size) { | 405 size_t output_buffer_size) { |
357 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count | 406 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
358 << ", input_coded_size=" << input_coded_size.ToString() | 407 << ", input_coded_size=" << input_coded_size.ToString() |
359 << ", output_buffer_size=" << output_buffer_size; | 408 << ", output_buffer_size=" << output_buffer_size; |
360 DCHECK(thread_checker_.CalledOnValidThread()); | 409 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 26 matching lines...) Expand all Loading... |
387 } | 436 } |
388 output_buffers_.push_back(shm.release()); | 437 output_buffers_.push_back(shm.release()); |
389 } | 438 } |
390 | 439 |
391 // Immediately provide all output buffers to the VEA. | 440 // Immediately provide all output buffers to the VEA. |
392 for (size_t i = 0; i < output_buffers_.size(); ++i) { | 441 for (size_t i = 0; i < output_buffers_.size(); ++i) { |
393 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | 442 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
394 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | 443 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
395 output_buffers_free_count_++; | 444 output_buffers_free_count_++; |
396 } | 445 } |
| 446 DCHECK_EQ(GetStatus(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); |
| 447 SetStatus(WEBRTC_VIDEO_CODEC_OK); |
397 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 448 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
398 } | 449 } |
399 | 450 |
400 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, | 451 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
401 size_t payload_size, | 452 size_t payload_size, |
402 bool key_frame) { | 453 bool key_frame) { |
403 DVLOG(3) << "Impl::BitstreamBufferReady(): " | 454 DVLOG(3) << "Impl::BitstreamBufferReady(): " |
404 "bitstream_buffer_id=" << bitstream_buffer_id | 455 "bitstream_buffer_id=" << bitstream_buffer_id |
405 << ", payload_size=" << payload_size | 456 << ", payload_size=" << payload_size |
406 << ", key_frame=" << key_frame; | 457 << ", key_frame=" << key_frame; |
(...skipping 15 matching lines...) Expand all Loading... |
422 | 473 |
423 // Use webrtc timestamps to ensure correct RTP sender behavior. | 474 // Use webrtc timestamps to ensure correct RTP sender behavior. |
424 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. | 475 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. |
425 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); | 476 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); |
426 | 477 |
427 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 478 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
428 const int64_t capture_time_ms = capture_time_us / 1000; | 479 const int64_t capture_time_ms = capture_time_us / 1000; |
429 const uint32_t rtp_timestamp = | 480 const uint32_t rtp_timestamp = |
430 static_cast<uint32_t>(capture_time_us * 90 / 1000); | 481 static_cast<uint32_t>(capture_time_us * 90 / 1000); |
431 | 482 |
432 std::unique_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( | 483 webrtc::EncodedImage image( |
433 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, | 484 reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size, |
434 output_buffer->mapped_size())); | 485 output_buffer->mapped_size()); |
435 image->_encodedWidth = input_visible_size_.width(); | 486 image._encodedWidth = input_visible_size_.width(); |
436 image->_encodedHeight = input_visible_size_.height(); | 487 image._encodedHeight = input_visible_size_.height(); |
437 image->_timeStamp = rtp_timestamp; | 488 image._timeStamp = rtp_timestamp; |
438 image->capture_time_ms_ = capture_time_ms; | 489 image.capture_time_ms_ = capture_time_ms; |
439 image->_frameType = | 490 image._frameType = |
440 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 491 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
441 image->_completeFrame = true; | 492 image._completeFrame = true; |
442 | 493 |
443 encoder_task_runner_->PostTask( | 494 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); |
444 FROM_HERE, | |
445 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, weak_encoder_, | |
446 base::Passed(&image), bitstream_buffer_id, picture_id_)); | |
447 // Picture ID must wrap after reaching the maximum. | 495 // Picture ID must wrap after reaching the maximum. |
448 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 496 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
449 } | 497 } |
450 | 498 |
451 void RTCVideoEncoder::Impl::NotifyError( | 499 void RTCVideoEncoder::Impl::NotifyError( |
452 media::VideoEncodeAccelerator::Error error) { | 500 media::VideoEncodeAccelerator::Error error) { |
453 DCHECK(thread_checker_.CalledOnValidThread()); | 501 DCHECK(thread_checker_.CalledOnValidThread()); |
454 int32_t retval; | 502 int32_t retval; |
455 switch (error) { | 503 switch (error) { |
456 case media::VideoEncodeAccelerator::kInvalidArgumentError: | 504 case media::VideoEncodeAccelerator::kInvalidArgumentError: |
457 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 505 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
458 break; | 506 break; |
459 default: | 507 default: |
460 retval = WEBRTC_VIDEO_CODEC_ERROR; | 508 retval = WEBRTC_VIDEO_CODEC_ERROR; |
461 } | 509 } |
462 | 510 |
463 video_encoder_.reset(); | 511 video_encoder_.reset(); |
464 | 512 |
465 if (async_waiter_) { | 513 SetStatus(retval); |
| 514 if (async_waiter_) |
466 SignalAsyncWaiter(retval); | 515 SignalAsyncWaiter(retval); |
467 } else { | |
468 encoder_task_runner_->PostTask( | |
469 FROM_HERE, | |
470 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval)); | |
471 } | |
472 } | 516 } |
473 | 517 |
474 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } | 518 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } |
475 | 519 |
476 void RTCVideoEncoder::Impl::LogAndNotifyError( | 520 void RTCVideoEncoder::Impl::LogAndNotifyError( |
477 const tracked_objects::Location& location, | 521 const tracked_objects::Location& location, |
478 const std::string& str, | 522 const std::string& str, |
479 media::VideoEncodeAccelerator::Error error) { | 523 media::VideoEncodeAccelerator::Error error) { |
480 static const char* const kErrorNames[] = { | 524 static const char* const kErrorNames[] = { |
481 "kIllegalStateError", "kInvalidArgumentError", "kPlatformFailureError"}; | 525 "kIllegalStateError", "kInvalidArgumentError", "kPlatformFailureError"}; |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
591 media::VideoEncodeAccelerator::kInvalidArgumentError); | 635 media::VideoEncodeAccelerator::kInvalidArgumentError); |
592 return true; | 636 return true; |
593 } | 637 } |
594 | 638 |
595 bool RTCVideoEncoder::Impl::RequiresSizeChange( | 639 bool RTCVideoEncoder::Impl::RequiresSizeChange( |
596 const scoped_refptr<media::VideoFrame>& frame) const { | 640 const scoped_refptr<media::VideoFrame>& frame) const { |
597 return (frame->coded_size() != input_frame_coded_size_ || | 641 return (frame->coded_size() != input_frame_coded_size_ || |
598 frame->visible_rect() != gfx::Rect(input_visible_size_)); | 642 frame->visible_rect() != gfx::Rect(input_visible_size_)); |
599 } | 643 } |
600 | 644 |
| 645 void RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback( |
| 646 base::WaitableEvent* async_waiter, |
| 647 int32_t* async_retval, |
| 648 webrtc::EncodedImageCallback* callback) { |
| 649 DCHECK(thread_checker_.CalledOnValidThread()); |
| 650 DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
| 651 RegisterAsyncWaiter(async_waiter, async_retval); |
| 652 int32_t retval = GetStatus(); |
| 653 if (retval == WEBRTC_VIDEO_CODEC_OK) |
| 654 encoded_image_callback_ = callback; |
| 655 SignalAsyncWaiter(retval); |
| 656 } |
| 657 |
| 658 void RTCVideoEncoder::Impl::ReturnEncodedImage( |
| 659 const webrtc::EncodedImage& image, |
| 660 int32_t bitstream_buffer_id, |
| 661 uint16_t picture_id) { |
| 662 DCHECK(thread_checker_.CalledOnValidThread()); |
| 663 DVLOG(3) << "ReturnEncodedImage(): " |
| 664 << "bitstream_buffer_id=" << bitstream_buffer_id |
| 665 << ", picture_id=" << picture_id; |
| 666 |
| 667 if (!encoded_image_callback_) |
| 668 return; |
| 669 |
| 670 webrtc::RTPFragmentationHeader header; |
| 671 memset(&header, 0, sizeof(header)); |
| 672 switch (video_codec_type_) { |
| 673 case webrtc::kVideoCodecVP8: |
| 674 // Generate a header describing a single fragment. |
| 675 header.VerifyAndAllocateFragmentationHeader(1); |
| 676 header.fragmentationOffset[0] = 0; |
| 677 header.fragmentationLength[0] = image._length; |
| 678 header.fragmentationPlType[0] = 0; |
| 679 header.fragmentationTimeDiff[0] = 0; |
| 680 break; |
| 681 case webrtc::kVideoCodecH264: |
| 682 if (!GetRTPFragmentationHeaderH264(&header, image._buffer, |
| 683 image._length)) { |
| 684 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; |
| 685 NotifyError( |
| 686 (media::VideoEncodeAccelerator::Error)WEBRTC_VIDEO_CODEC_ERROR); |
| 687 return; |
| 688 } |
| 689 break; |
| 690 default: |
| 691 NOTREACHED() << "Invalid video codec type"; |
| 692 return; |
| 693 } |
| 694 |
| 695 webrtc::CodecSpecificInfo info; |
| 696 memset(&info, 0, sizeof(info)); |
| 697 info.codecType = video_codec_type_; |
| 698 if (video_codec_type_ == webrtc::kVideoCodecVP8) { |
| 699 info.codecSpecific.VP8.pictureId = picture_id; |
| 700 info.codecSpecific.VP8.tl0PicIdx = -1; |
| 701 info.codecSpecific.VP8.keyIdx = -1; |
| 702 } |
| 703 |
| 704 const int32_t retval = |
| 705 encoded_image_callback_->Encoded(image, &info, &header); |
| 706 if (retval < 0) { |
| 707 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " |
| 708 << retval; |
| 709 } |
| 710 |
| 711 UseOutputBitstreamBufferId(bitstream_buffer_id); |
| 712 } |
| 713 |
601 RTCVideoEncoder::RTCVideoEncoder( | 714 RTCVideoEncoder::RTCVideoEncoder( |
602 webrtc::VideoCodecType type, | 715 webrtc::VideoCodecType type, |
603 media::GpuVideoAcceleratorFactories* gpu_factories) | 716 media::GpuVideoAcceleratorFactories* gpu_factories) |
604 : video_codec_type_(type), | 717 : gpu_factories_(gpu_factories), |
605 gpu_factories_(gpu_factories), | |
606 gpu_task_runner_(gpu_factories->GetTaskRunner()), | 718 gpu_task_runner_(gpu_factories->GetTaskRunner()), |
607 encoded_image_callback_(NULL), | 719 impl_(new Impl(gpu_factories_, type)) { |
608 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED), | |
609 weak_factory_(this) { | |
610 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; | 720 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; |
611 } | 721 } |
612 | 722 |
613 RTCVideoEncoder::~RTCVideoEncoder() { | 723 RTCVideoEncoder::~RTCVideoEncoder() { |
614 DVLOG(3) << "~RTCVideoEncoder"; | 724 DVLOG(3) << "~RTCVideoEncoder"; |
615 DCHECK(thread_checker_.CalledOnValidThread()); | |
616 Release(); | 725 Release(); |
617 DCHECK(!impl_.get()); | |
618 } | 726 } |
619 | 727 |
620 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, | 728 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
621 int32_t number_of_cores, | 729 int32_t number_of_cores, |
622 size_t max_payload_size) { | 730 size_t max_payload_size) { |
623 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType | 731 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
624 << ", width=" << codec_settings->width | 732 << ", width=" << codec_settings->width |
625 << ", height=" << codec_settings->height | 733 << ", height=" << codec_settings->height |
626 << ", startBitrate=" << codec_settings->startBitrate; | 734 << ", startBitrate=" << codec_settings->startBitrate; |
627 DCHECK(thread_checker_.CalledOnValidThread()); | |
628 DCHECK(!impl_.get()); | |
629 | 735 |
630 const media::VideoCodecProfile profile = | 736 const media::VideoCodecProfile profile = WebRTCVideoCodecToVideoCodecProfile( |
631 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_, codec_settings); | 737 impl_->video_codec_type(), codec_settings); |
632 | 738 |
633 weak_factory_.InvalidateWeakPtrs(); | |
634 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_); | |
635 base::WaitableEvent initialization_waiter(true, false); | 739 base::WaitableEvent initialization_waiter(true, false); |
636 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 740 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
637 gpu_task_runner_->PostTask( | 741 gpu_task_runner_->PostTask( |
638 FROM_HERE, | 742 FROM_HERE, |
639 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, | 743 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
640 impl_, | 744 impl_, |
641 gfx::Size(codec_settings->width, codec_settings->height), | 745 gfx::Size(codec_settings->width, codec_settings->height), |
642 codec_settings->startBitrate, | 746 codec_settings->startBitrate, |
643 profile, | 747 profile, |
644 &initialization_waiter, | 748 &initialization_waiter, |
645 &initialization_retval)); | 749 &initialization_retval)); |
646 | 750 |
647 // webrtc::VideoEncoder expects this call to be synchronous. | 751 // webrtc::VideoEncoder expects this call to be synchronous. |
648 initialization_waiter.Wait(); | 752 initialization_waiter.Wait(); |
649 RecordInitEncodeUMA(initialization_retval, profile); | 753 RecordInitEncodeUMA(initialization_retval, profile); |
650 return initialization_retval; | 754 return initialization_retval; |
651 } | 755 } |
652 | 756 |
653 int32_t RTCVideoEncoder::Encode( | 757 int32_t RTCVideoEncoder::Encode( |
654 const webrtc::VideoFrame& input_image, | 758 const webrtc::VideoFrame& input_image, |
655 const webrtc::CodecSpecificInfo* codec_specific_info, | 759 const webrtc::CodecSpecificInfo* codec_specific_info, |
656 const std::vector<webrtc::FrameType>* frame_types) { | 760 const std::vector<webrtc::FrameType>* frame_types) { |
657 DVLOG(3) << "Encode()"; | 761 DVLOG(3) << "Encode()"; |
658 if (!impl_.get()) { | |
659 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; | |
660 return impl_status_; | |
661 } | |
662 | 762 |
663 const bool want_key_frame = frame_types && frame_types->size() && | 763 const bool want_key_frame = frame_types && frame_types->size() && |
664 frame_types->front() == webrtc::kVideoFrameKey; | 764 frame_types->front() == webrtc::kVideoFrameKey; |
665 base::WaitableEvent encode_waiter(true, false); | 765 base::WaitableEvent encode_waiter(true, false); |
666 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 766 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
667 gpu_task_runner_->PostTask( | 767 gpu_task_runner_->PostTask( |
668 FROM_HERE, | 768 FROM_HERE, |
669 base::Bind(&RTCVideoEncoder::Impl::Enqueue, | 769 base::Bind(&RTCVideoEncoder::Impl::Enqueue, |
670 impl_, | 770 impl_, |
671 &input_image, | 771 &input_image, |
672 want_key_frame, | 772 want_key_frame, |
673 &encode_waiter, | 773 &encode_waiter, |
674 &encode_retval)); | 774 &encode_retval)); |
675 | 775 |
676 // webrtc::VideoEncoder expects this call to be synchronous. | 776 // webrtc::VideoEncoder expects this call to be synchronous. |
677 encode_waiter.Wait(); | 777 encode_waiter.Wait(); |
678 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; | 778 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
679 return encode_retval; | 779 return encode_retval; |
680 } | 780 } |
681 | 781 |
682 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( | 782 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
683 webrtc::EncodedImageCallback* callback) { | 783 webrtc::EncodedImageCallback* callback) { |
684 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | 784 DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
685 DCHECK(thread_checker_.CalledOnValidThread()); | 785 base::WaitableEvent register_waiter(true, false); |
686 if (!impl_.get()) { | 786 int32_t register_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
687 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; | 787 gpu_task_runner_->PostTask( |
688 return impl_status_; | 788 FROM_HERE, |
689 } | 789 base::Bind(&RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback, impl_, |
690 | 790 ®ister_waiter, ®ister_retval, callback)); |
691 encoded_image_callback_ = callback; | 791 register_waiter.Wait(); |
692 return WEBRTC_VIDEO_CODEC_OK; | 792 return register_retval; |
693 } | 793 } |
694 | 794 |
695 int32_t RTCVideoEncoder::Release() { | 795 int32_t RTCVideoEncoder::Release() { |
696 DVLOG(3) << "Release()"; | 796 DVLOG(3) << "Release()"; |
697 DCHECK(thread_checker_.CalledOnValidThread()); | |
698 | 797 |
699 if (impl_.get()) { | 798 base::WaitableEvent release_waiter(true, false); |
700 gpu_task_runner_->PostTask(FROM_HERE, | 799 gpu_task_runner_->PostTask( |
701 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | 800 FROM_HERE, |
702 impl_ = NULL; | 801 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_, &release_waiter)); |
703 weak_factory_.InvalidateWeakPtrs(); | 802 release_waiter.Wait(); |
704 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
705 } | |
706 return WEBRTC_VIDEO_CODEC_OK; | 803 return WEBRTC_VIDEO_CODEC_OK; |
707 } | 804 } |
708 | 805 |
709 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, | 806 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, |
710 int64_t rtt) { | 807 int64_t rtt) { |
711 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss | 808 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
712 << ", rtt=" << rtt; | 809 << ", rtt=" << rtt; |
713 // Ignored. | 810 // Ignored. |
714 return WEBRTC_VIDEO_CODEC_OK; | 811 return WEBRTC_VIDEO_CODEC_OK; |
715 } | 812 } |
716 | 813 |
717 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { | 814 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { |
718 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate | 815 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate |
719 << ", frame_rate=" << frame_rate; | 816 << ", frame_rate=" << frame_rate; |
720 if (!impl_.get()) { | 817 const int32_t retval = impl_->GetStatus(); |
721 DVLOG(3) << "SetRates(): returning " << impl_status_; | 818 if (retval != WEBRTC_VIDEO_CODEC_OK) { |
722 return impl_status_; | 819 DVLOG(3) << "SetRates(): returning " << retval; |
| 820 return retval; |
723 } | 821 } |
724 | 822 |
725 gpu_task_runner_->PostTask( | 823 gpu_task_runner_->PostTask( |
726 FROM_HERE, | 824 FROM_HERE, |
727 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, | 825 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, |
728 impl_, | 826 impl_, |
729 new_bit_rate, | 827 new_bit_rate, |
730 frame_rate)); | 828 frame_rate)); |
731 return WEBRTC_VIDEO_CODEC_OK; | 829 return WEBRTC_VIDEO_CODEC_OK; |
732 } | 830 } |
733 | 831 |
734 void RTCVideoEncoder::ReturnEncodedImage( | |
735 std::unique_ptr<webrtc::EncodedImage> image, | |
736 int32_t bitstream_buffer_id, | |
737 uint16_t picture_id) { | |
738 DCHECK(thread_checker_.CalledOnValidThread()); | |
739 DVLOG(3) << "ReturnEncodedImage(): " | |
740 << "bitstream_buffer_id=" << bitstream_buffer_id | |
741 << ", picture_id=" << picture_id; | |
742 | |
743 if (!encoded_image_callback_) | |
744 return; | |
745 | |
746 webrtc::RTPFragmentationHeader header; | |
747 memset(&header, 0, sizeof(header)); | |
748 switch (video_codec_type_) { | |
749 case webrtc::kVideoCodecVP8: | |
750 // Generate a header describing a single fragment. | |
751 header.VerifyAndAllocateFragmentationHeader(1); | |
752 header.fragmentationOffset[0] = 0; | |
753 header.fragmentationLength[0] = image->_length; | |
754 header.fragmentationPlType[0] = 0; | |
755 header.fragmentationTimeDiff[0] = 0; | |
756 break; | |
757 case webrtc::kVideoCodecH264: | |
758 if (!GetRTPFragmentationHeaderH264( | |
759 &header, image->_buffer, image->_length)) { | |
760 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; | |
761 NotifyError(WEBRTC_VIDEO_CODEC_ERROR); | |
762 return; | |
763 } | |
764 break; | |
765 default: | |
766 NOTREACHED() << "Invalid video codec type"; | |
767 return; | |
768 } | |
769 | |
770 webrtc::CodecSpecificInfo info; | |
771 memset(&info, 0, sizeof(info)); | |
772 info.codecType = video_codec_type_; | |
773 if (video_codec_type_ == webrtc::kVideoCodecVP8) { | |
774 info.codecSpecific.VP8.pictureId = picture_id; | |
775 info.codecSpecific.VP8.tl0PicIdx = -1; | |
776 info.codecSpecific.VP8.keyIdx = -1; | |
777 } | |
778 | |
779 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
780 if (retval < 0) { | |
781 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
782 << retval; | |
783 } | |
784 | |
785 // The call through webrtc::EncodedImageCallback is synchronous, so we can | |
786 // immediately recycle the output buffer back to the Impl. | |
787 gpu_task_runner_->PostTask( | |
788 FROM_HERE, | |
789 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, | |
790 impl_, | |
791 bitstream_buffer_id)); | |
792 } | |
793 | |
794 void RTCVideoEncoder::NotifyError(int32_t error) { | |
795 DCHECK(thread_checker_.CalledOnValidThread()); | |
796 DVLOG(1) << "NotifyError(): error=" << error; | |
797 | |
798 impl_status_ = error; | |
799 gpu_task_runner_->PostTask(FROM_HERE, | |
800 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
801 impl_ = NULL; | |
802 } | |
803 | |
804 void RTCVideoEncoder::RecordInitEncodeUMA( | 832 void RTCVideoEncoder::RecordInitEncodeUMA( |
805 int32_t init_retval, media::VideoCodecProfile profile) { | 833 int32_t init_retval, media::VideoCodecProfile profile) { |
806 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 834 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
807 init_retval == WEBRTC_VIDEO_CODEC_OK); | 835 init_retval == WEBRTC_VIDEO_CODEC_OK); |
808 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 836 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
809 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 837 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
810 profile, | 838 profile, |
811 media::VIDEO_CODEC_PROFILE_MAX + 1); | 839 media::VIDEO_CODEC_PROFILE_MAX + 1); |
812 } | 840 } |
813 } | 841 } |
814 | 842 |
815 } // namespace content | 843 } // namespace content |
OLD | NEW |