OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/rtc_video_encoder.h" | 5 #include "content/renderer/media/rtc_video_encoder.h" |
6 | 6 |
7 #include <string.h> | 7 #include <string.h> |
8 | 8 |
9 #include "base/bind.h" | 9 #include "base/bind.h" |
10 #include "base/location.h" | 10 #include "base/location.h" |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
96 // | 96 // |
97 // This class separates state related to the thread that RTCVideoEncoder | 97 // This class separates state related to the thread that RTCVideoEncoder |
98 // operates on (presently the libjingle worker thread) from the thread that | 98 // operates on (presently the libjingle worker thread) from the thread that |
99 // |gpu_factories_| provides for accelerator operations (presently the media | 99 // |gpu_factories_| provides for accelerator operations (presently the media |
100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while | 100 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while |
101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. | 101 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. |
102 class RTCVideoEncoder::Impl | 102 class RTCVideoEncoder::Impl |
103 : public media::VideoEncodeAccelerator::Client, | 103 : public media::VideoEncodeAccelerator::Client, |
104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { | 104 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
105 public: | 105 public: |
106 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 106 Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
107 media::GpuVideoAcceleratorFactories* gpu_factories); | 107 webrtc::VideoCodecType video_codec_type); |
108 | 108 |
109 // Create the VEA and call Initialize() on it. Called once per instantiation, | 109 // Create the VEA and call Initialize() on it. Called once per instantiation, |
110 // and then the instance is bound forevermore to whichever thread made the | 110 // and then the instance is bound forevermore to whichever thread made the |
111 // call. | 111 // call. |
112 // RTCVideoEncoder expects to be able to call this function synchronously from | 112 // RTCVideoEncoder expects to be able to call this function synchronously from |
113 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 113 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, | 114 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, |
115 uint32_t bitrate, | 115 uint32_t bitrate, |
116 media::VideoCodecProfile profile, | 116 media::VideoCodecProfile profile, |
117 base::WaitableEvent* async_waiter, | 117 base::WaitableEvent* async_waiter, |
118 int32_t* async_retval); | 118 int32_t* async_retval); |
119 // Enqueue a frame from WebRTC for encoding. | 119 // Enqueue a frame from WebRTC for encoding. |
120 // RTCVideoEncoder expects to be able to call this function synchronously from | 120 // RTCVideoEncoder expects to be able to call this function synchronously from |
121 // its own thread, hence the |async_waiter| and |async_retval| arguments. | 121 // its own thread, hence the |async_waiter| and |async_retval| arguments. |
122 void Enqueue(const webrtc::VideoFrame* input_frame, | 122 void Enqueue(const webrtc::VideoFrame* input_frame, |
123 bool force_keyframe, | 123 bool force_keyframe, |
124 base::WaitableEvent* async_waiter, | 124 base::WaitableEvent* async_waiter, |
125 int32_t* async_retval); | 125 int32_t* async_retval); |
126 | 126 |
127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the | 127 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the |
128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, | 128 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, |
129 // the buffer is returned to Impl by its index using this function. | 129 // the buffer is returned to Impl by its index using this function. |
130 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); | 130 void UseOutputBitstreamBufferId(int32_t bitstream_buffer_id); |
131 | 131 |
132 // Request encoding parameter change for the underlying encoder. | 132 // Request encoding parameter change for the underlying encoder. |
133 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); | 133 void RequestEncodingParametersChange(uint32_t bitrate, uint32_t framerate); |
134 | 134 |
135 // Destroy this Impl's encoder. The destructor is not explicitly called, as | 135 // Destroy this Impl's encoder. The destructor is not explicitly called, as |
136 // Impl is a base::RefCountedThreadSafe. | 136 // Impl is a base::RefCountedThreadSafe. |
137 void Destroy(); | 137 void Destroy(base::WaitableEvent* waiter); |
pbos
2016/04/04 09:00:31
Should this also be named async_waiter for consist
wuchengli
2016/04/06 07:51:58
Done.
| |
138 | |
139 // Return the status of Impl. One of WEBRTC_VIDEO_CODEC_XXX value. | |
140 int32_t GetStatus(); | |
141 | |
142 webrtc::VideoCodecType video_codec_type() { return video_codec_type_; } | |
138 | 143 |
139 // media::VideoEncodeAccelerator::Client implementation. | 144 // media::VideoEncodeAccelerator::Client implementation. |
140 void RequireBitstreamBuffers(unsigned int input_count, | 145 void RequireBitstreamBuffers(unsigned int input_count, |
141 const gfx::Size& input_coded_size, | 146 const gfx::Size& input_coded_size, |
142 size_t output_buffer_size) override; | 147 size_t output_buffer_size) override; |
143 void BitstreamBufferReady(int32_t bitstream_buffer_id, | 148 void BitstreamBufferReady(int32_t bitstream_buffer_id, |
144 size_t payload_size, | 149 size_t payload_size, |
145 bool key_frame) override; | 150 bool key_frame) override; |
146 void NotifyError(media::VideoEncodeAccelerator::Error error) override; | 151 void NotifyError(media::VideoEncodeAccelerator::Error error) override; |
147 | 152 |
153 void RegisterEncodeCompleteCallback(base::WaitableEvent* async_waiter, | |
154 int32_t* async_retval, | |
155 webrtc::EncodedImageCallback* callback); | |
156 | |
148 private: | 157 private: |
149 friend class base::RefCountedThreadSafe<Impl>; | 158 friend class base::RefCountedThreadSafe<Impl>; |
150 | 159 |
151 enum { | 160 enum { |
152 kInputBufferExtraCount = 1, // The number of input buffers allocated, more | 161 kInputBufferExtraCount = 1, // The number of input buffers allocated, more |
153 // than what is requested by | 162 // than what is requested by |
154 // VEA::RequireBitstreamBuffers(). | 163 // VEA::RequireBitstreamBuffers(). |
155 kOutputBufferCount = 3, | 164 kOutputBufferCount = 3, |
156 }; | 165 }; |
157 | 166 |
(...skipping 15 matching lines...) Expand all Loading... | |
173 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); | 182 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); |
174 void SignalAsyncWaiter(int32_t retval); | 183 void SignalAsyncWaiter(int32_t retval); |
175 | 184 |
176 // Checks if the bitrate would overflow when passing from kbps to bps. | 185 // Checks if the bitrate would overflow when passing from kbps to bps. |
177 bool IsBitrateTooHigh(uint32_t bitrate); | 186 bool IsBitrateTooHigh(uint32_t bitrate); |
178 | 187 |
179 // Checks if the frame size is different than hardware accelerator | 188 // Checks if the frame size is different than hardware accelerator |
180 // requirements. | 189 // requirements. |
181 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; | 190 bool RequiresSizeChange(const scoped_refptr<media::VideoFrame>& frame) const; |
182 | 191 |
192 // Return an encoded output buffer to WebRTC. | |
193 void ReturnEncodedImage(const webrtc::EncodedImage& image, | |
194 int32_t bitstream_buffer_id, | |
195 uint16_t picture_id); | |
196 | |
183 base::ThreadChecker thread_checker_; | 197 base::ThreadChecker thread_checker_; |
184 | 198 |
185 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client | |
186 // notifications. | |
187 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; | |
188 | |
189 // The message loop on which to post callbacks to |weak_encoder_|. | |
190 const scoped_refptr<base::SingleThreadTaskRunner> encoder_task_runner_; | |
191 | |
192 // Factory for creating VEAs, shared memory buffers, etc. | 199 // Factory for creating VEAs, shared memory buffers, etc. |
193 media::GpuVideoAcceleratorFactories* gpu_factories_; | 200 media::GpuVideoAcceleratorFactories* gpu_factories_; |
194 | 201 |
195 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | 202 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
196 // Do this by waiting on the |async_waiter_| and returning the return value in | 203 // Do this by waiting on the |async_waiter_| and returning the return value in |
197 // |async_retval_| when initialization completes, encoding completes, or | 204 // |async_retval_| when initialization completes, encoding completes, or |
198 // an error occurs. | 205 // an error occurs. |
199 base::WaitableEvent* async_waiter_; | 206 base::WaitableEvent* async_waiter_; |
200 int32_t* async_retval_; | 207 int32_t* async_retval_; |
201 | 208 |
(...skipping 19 matching lines...) Expand all Loading... | |
221 // we don't care about ordering. | 228 // we don't care about ordering. |
222 std::vector<int> input_buffers_free_; | 229 std::vector<int> input_buffers_free_; |
223 | 230 |
224 // The number of output buffers ready to be filled with output from the | 231 // The number of output buffers ready to be filled with output from the |
225 // encoder. | 232 // encoder. |
226 int output_buffers_free_count_; | 233 int output_buffers_free_count_; |
227 | 234 |
228 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. | 235 // 15 bits running index of the VP8 frames. See VP8 RTP spec for details. |
229 uint16_t picture_id_; | 236 uint16_t picture_id_; |
230 | 237 |
238 // webrtc::VideoEncoder encode complete callback. | |
239 webrtc::EncodedImageCallback* encoded_image_callback_; | |
240 | |
241 // The video codec type, as reported to WebRTC. | |
242 const webrtc::VideoCodecType video_codec_type_; | |
243 | |
244 // We cannot immediately return error conditions to the WebRTC user of this | |
245 // class, as there is no error callback in the webrtc::VideoEncoder interface. | |
246 // Instead, we cache an error status here and return it the next time an | |
247 // interface entry point is called. | |
248 std::atomic<int32_t> status_; | |
249 | |
231 DISALLOW_COPY_AND_ASSIGN(Impl); | 250 DISALLOW_COPY_AND_ASSIGN(Impl); |
232 }; | 251 }; |
233 | 252 |
234 RTCVideoEncoder::Impl::Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | 253 RTCVideoEncoder::Impl::Impl(media::GpuVideoAcceleratorFactories* gpu_factories, |
235 media::GpuVideoAcceleratorFactories* gpu_factories) | 254 webrtc::VideoCodecType video_codec_type) |
236 : weak_encoder_(weak_encoder), | 255 : gpu_factories_(gpu_factories), |
237 encoder_task_runner_(base::ThreadTaskRunnerHandle::Get()), | |
238 gpu_factories_(gpu_factories), | |
239 async_waiter_(NULL), | 256 async_waiter_(NULL), |
240 async_retval_(NULL), | 257 async_retval_(NULL), |
241 input_next_frame_(NULL), | 258 input_next_frame_(NULL), |
242 input_next_frame_keyframe_(false), | 259 input_next_frame_keyframe_(false), |
243 output_buffers_free_count_(0) { | 260 output_buffers_free_count_(0), |
261 encoded_image_callback_(nullptr), | |
262 video_codec_type_(video_codec_type), | |
263 status_(WEBRTC_VIDEO_CODEC_OK) { | |
244 thread_checker_.DetachFromThread(); | 264 thread_checker_.DetachFromThread(); |
245 // Picture ID should start on a random number. | 265 // Picture ID should start on a random number. |
246 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); | 266 picture_id_ = static_cast<uint16_t>(base::RandInt(0, 0x7FFF)); |
247 } | 267 } |
248 | 268 |
249 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | 269 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
250 const gfx::Size& input_visible_size, | 270 const gfx::Size& input_visible_size, |
251 uint32_t bitrate, | 271 uint32_t bitrate, |
252 media::VideoCodecProfile profile, | 272 media::VideoCodecProfile profile, |
253 base::WaitableEvent* async_waiter, | 273 base::WaitableEvent* async_waiter, |
254 int32_t* async_retval) { | 274 int32_t* async_retval) { |
255 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; | 275 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
256 DCHECK(thread_checker_.CalledOnValidThread()); | 276 DCHECK(thread_checker_.CalledOnValidThread()); |
257 | 277 |
278 status_.store(WEBRTC_VIDEO_CODEC_UNINITIALIZED); | |
258 RegisterAsyncWaiter(async_waiter, async_retval); | 279 RegisterAsyncWaiter(async_waiter, async_retval); |
259 | 280 |
260 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 281 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
261 if (IsBitrateTooHigh(bitrate)) | 282 if (IsBitrateTooHigh(bitrate)) |
262 return; | 283 return; |
263 | 284 |
264 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); | 285 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(); |
265 if (!video_encoder_) { | 286 if (!video_encoder_) { |
266 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", | 287 LogAndNotifyError(FROM_HERE, "Error creating VideoEncodeAccelerator", |
267 media::VideoEncodeAccelerator::kPlatformFailureError); | 288 media::VideoEncodeAccelerator::kPlatformFailureError); |
268 return; | 289 return; |
269 } | 290 } |
270 input_visible_size_ = input_visible_size; | 291 input_visible_size_ = input_visible_size; |
271 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, | 292 if (!video_encoder_->Initialize(media::PIXEL_FORMAT_I420, input_visible_size_, |
272 profile, bitrate * 1000, this)) { | 293 profile, bitrate * 1000, this)) { |
273 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", | 294 LogAndNotifyError(FROM_HERE, "Error initializing video_encoder", |
274 media::VideoEncodeAccelerator::kInvalidArgumentError); | 295 media::VideoEncodeAccelerator::kInvalidArgumentError); |
275 return; | 296 return; |
276 } | 297 } |
298 // RequireBitstreamBuffers or NotifyError will be called and the waiter will | |
299 // be signaled. | |
277 } | 300 } |
278 | 301 |
279 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, | 302 void RTCVideoEncoder::Impl::Enqueue(const webrtc::VideoFrame* input_frame, |
280 bool force_keyframe, | 303 bool force_keyframe, |
281 base::WaitableEvent* async_waiter, | 304 base::WaitableEvent* async_waiter, |
282 int32_t* async_retval) { | 305 int32_t* async_retval) { |
283 DVLOG(3) << "Impl::Enqueue()"; | 306 DVLOG(3) << "Impl::Enqueue()"; |
284 DCHECK(thread_checker_.CalledOnValidThread()); | 307 DCHECK(thread_checker_.CalledOnValidThread()); |
285 DCHECK(!input_next_frame_); | 308 DCHECK(!input_next_frame_); |
286 | 309 |
287 RegisterAsyncWaiter(async_waiter, async_retval); | 310 RegisterAsyncWaiter(async_waiter, async_retval); |
311 if (status_.load() != WEBRTC_VIDEO_CODEC_OK) { | |
pbos
2016/04/04 09:00:31
Store this status_.load() instead of reading it tw
wuchengli
2016/04/06 07:51:58
Done. Actually this is fine because |status_| is o
| |
312 SignalAsyncWaiter(status_.load()); | |
313 return; | |
314 } | |
315 | |
288 // If there are no free input and output buffers, drop the frame to avoid a | 316 // If there are no free input and output buffers, drop the frame to avoid a |
289 // deadlock. If there is a free input buffer, EncodeOneFrame will run and | 317 // deadlock. If there is a free input buffer, EncodeOneFrame will run and |
290 // unblock Encode(). If there are no free input buffers but there is a free | 318 // unblock Encode(). If there are no free input buffers but there is a free |
291 // output buffer, EncodeFrameFinished will be called later to unblock | 319 // output buffer, EncodeFrameFinished will be called later to unblock |
292 // Encode(). | 320 // Encode(). |
293 // | 321 // |
294 // The caller of Encode() holds a webrtc lock. The deadlock happens when: | 322 // The caller of Encode() holds a webrtc lock. The deadlock happens when: |
295 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). | 323 // (1) Encode() is waiting for the frame to be encoded in EncodeOneFrame(). |
296 // (2) There are no free input buffers and they cannot be freed because | 324 // (2) There are no free input buffers and they cannot be freed because |
297 // the encoder has no output buffers. | 325 // the encoder has no output buffers. |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
337 DCHECK(thread_checker_.CalledOnValidThread()); | 365 DCHECK(thread_checker_.CalledOnValidThread()); |
338 | 366 |
339 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | 367 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. |
340 if (IsBitrateTooHigh(bitrate)) | 368 if (IsBitrateTooHigh(bitrate)) |
341 return; | 369 return; |
342 | 370 |
343 if (video_encoder_) | 371 if (video_encoder_) |
344 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); | 372 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); |
345 } | 373 } |
346 | 374 |
347 void RTCVideoEncoder::Impl::Destroy() { | 375 void RTCVideoEncoder::Impl::Destroy(base::WaitableEvent* waiter) { |
348 DVLOG(3) << "Impl::Destroy()"; | 376 DVLOG(3) << "Impl::Destroy()"; |
349 DCHECK(thread_checker_.CalledOnValidThread()); | 377 DCHECK(thread_checker_.CalledOnValidThread()); |
350 video_encoder_.reset(); | 378 if (video_encoder_) { |
379 video_encoder_.reset(); | |
380 status_.store(WEBRTC_VIDEO_CODEC_UNINITIALIZED); | |
381 } | |
382 waiter->Signal(); | |
383 } | |
384 | |
385 int32_t RTCVideoEncoder::Impl::GetStatus() { | |
386 return status_.load(); | |
351 } | 387 } |
352 | 388 |
353 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( | 389 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
354 unsigned int input_count, | 390 unsigned int input_count, |
355 const gfx::Size& input_coded_size, | 391 const gfx::Size& input_coded_size, |
356 size_t output_buffer_size) { | 392 size_t output_buffer_size) { |
357 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count | 393 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
358 << ", input_coded_size=" << input_coded_size.ToString() | 394 << ", input_coded_size=" << input_coded_size.ToString() |
359 << ", output_buffer_size=" << output_buffer_size; | 395 << ", output_buffer_size=" << output_buffer_size; |
360 DCHECK(thread_checker_.CalledOnValidThread()); | 396 DCHECK(thread_checker_.CalledOnValidThread()); |
(...skipping 26 matching lines...) Expand all Loading... | |
387 } | 423 } |
388 output_buffers_.push_back(shm.release()); | 424 output_buffers_.push_back(shm.release()); |
389 } | 425 } |
390 | 426 |
391 // Immediately provide all output buffers to the VEA. | 427 // Immediately provide all output buffers to the VEA. |
392 for (size_t i = 0; i < output_buffers_.size(); ++i) { | 428 for (size_t i = 0; i < output_buffers_.size(); ++i) { |
393 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | 429 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
394 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | 430 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
395 output_buffers_free_count_++; | 431 output_buffers_free_count_++; |
396 } | 432 } |
433 DCHECK_EQ(status_.load(), WEBRTC_VIDEO_CODEC_UNINITIALIZED); | |
434 status_.store(WEBRTC_VIDEO_CODEC_OK); | |
397 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | 435 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); |
398 } | 436 } |
399 | 437 |
400 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, | 438 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id, |
401 size_t payload_size, | 439 size_t payload_size, |
402 bool key_frame) { | 440 bool key_frame) { |
403 DVLOG(3) << "Impl::BitstreamBufferReady(): " | 441 DVLOG(3) << "Impl::BitstreamBufferReady(): " |
404 "bitstream_buffer_id=" << bitstream_buffer_id | 442 "bitstream_buffer_id=" << bitstream_buffer_id |
405 << ", payload_size=" << payload_size | 443 << ", payload_size=" << payload_size |
406 << ", key_frame=" << key_frame; | 444 << ", key_frame=" << key_frame; |
(...skipping 15 matching lines...) Expand all Loading... | |
422 | 460 |
423 // Use webrtc timestamps to ensure correct RTP sender behavior. | 461 // Use webrtc timestamps to ensure correct RTP sender behavior. |
424 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. | 462 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106. |
425 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); | 463 const int64_t capture_time_us = webrtc::TickTime::MicrosecondTimestamp(); |
426 | 464 |
427 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). | 465 // Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks). |
428 const int64_t capture_time_ms = capture_time_us / 1000; | 466 const int64_t capture_time_ms = capture_time_us / 1000; |
429 const uint32_t rtp_timestamp = | 467 const uint32_t rtp_timestamp = |
430 static_cast<uint32_t>(capture_time_us * 90 / 1000); | 468 static_cast<uint32_t>(capture_time_us * 90 / 1000); |
431 | 469 |
432 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( | 470 webrtc::EncodedImage image( |
433 reinterpret_cast<uint8_t*>(output_buffer->memory()), | 471 reinterpret_cast<uint8_t*>(output_buffer->memory()), |
434 payload_size, | 472 payload_size, |
435 output_buffer->mapped_size())); | 473 output_buffer->mapped_size()); |
436 image->_encodedWidth = input_visible_size_.width(); | 474 image._encodedWidth = input_visible_size_.width(); |
437 image->_encodedHeight = input_visible_size_.height(); | 475 image._encodedHeight = input_visible_size_.height(); |
438 image->_timeStamp = rtp_timestamp; | 476 image._timeStamp = rtp_timestamp; |
439 image->capture_time_ms_ = capture_time_ms; | 477 image.capture_time_ms_ = capture_time_ms; |
440 image->_frameType = | 478 image._frameType = |
441 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); | 479 (key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta); |
442 image->_completeFrame = true; | 480 image._completeFrame = true; |
443 | 481 |
444 encoder_task_runner_->PostTask( | 482 ReturnEncodedImage(image, bitstream_buffer_id, picture_id_); |
445 FROM_HERE, | |
446 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, weak_encoder_, | |
447 base::Passed(&image), bitstream_buffer_id, picture_id_)); | |
448 // Picture ID must wrap after reaching the maximum. | 483 // Picture ID must wrap after reaching the maximum. |
449 picture_id_ = (picture_id_ + 1) & 0x7FFF; | 484 picture_id_ = (picture_id_ + 1) & 0x7FFF; |
450 } | 485 } |
451 | 486 |
452 void RTCVideoEncoder::Impl::NotifyError( | 487 void RTCVideoEncoder::Impl::NotifyError( |
453 media::VideoEncodeAccelerator::Error error) { | 488 media::VideoEncodeAccelerator::Error error) { |
454 DCHECK(thread_checker_.CalledOnValidThread()); | 489 DCHECK(thread_checker_.CalledOnValidThread()); |
455 int32_t retval; | 490 int32_t retval; |
456 switch (error) { | 491 switch (error) { |
457 case media::VideoEncodeAccelerator::kInvalidArgumentError: | 492 case media::VideoEncodeAccelerator::kInvalidArgumentError: |
458 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | 493 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; |
459 break; | 494 break; |
460 default: | 495 default: |
461 retval = WEBRTC_VIDEO_CODEC_ERROR; | 496 retval = WEBRTC_VIDEO_CODEC_ERROR; |
462 } | 497 } |
463 | 498 |
464 video_encoder_.reset(); | 499 video_encoder_.reset(); |
465 | 500 |
466 if (async_waiter_) { | 501 status_.store(retval); |
502 if (async_waiter_) | |
467 SignalAsyncWaiter(retval); | 503 SignalAsyncWaiter(retval); |
468 } else { | |
469 encoder_task_runner_->PostTask( | |
470 FROM_HERE, | |
471 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval)); | |
472 } | |
473 } | 504 } |
474 | 505 |
475 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } | 506 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } |
476 | 507 |
477 void RTCVideoEncoder::Impl::LogAndNotifyError( | 508 void RTCVideoEncoder::Impl::LogAndNotifyError( |
478 const tracked_objects::Location& location, | 509 const tracked_objects::Location& location, |
479 const std::string& str, | 510 const std::string& str, |
480 media::VideoEncodeAccelerator::Error error) { | 511 media::VideoEncodeAccelerator::Error error) { |
481 static const char* kErrorNames[] = { | 512 static const char* kErrorNames[] = { |
482 "kIllegalStateError", "kInvalidArgumentError", "kPlatformFailureError"}; | 513 "kIllegalStateError", "kInvalidArgumentError", "kPlatformFailureError"}; |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
592 media::VideoEncodeAccelerator::kInvalidArgumentError); | 623 media::VideoEncodeAccelerator::kInvalidArgumentError); |
593 return true; | 624 return true; |
594 } | 625 } |
595 | 626 |
596 bool RTCVideoEncoder::Impl::RequiresSizeChange( | 627 bool RTCVideoEncoder::Impl::RequiresSizeChange( |
597 const scoped_refptr<media::VideoFrame>& frame) const { | 628 const scoped_refptr<media::VideoFrame>& frame) const { |
598 return (frame->coded_size() != input_frame_coded_size_ || | 629 return (frame->coded_size() != input_frame_coded_size_ || |
599 frame->visible_rect() != gfx::Rect(input_visible_size_)); | 630 frame->visible_rect() != gfx::Rect(input_visible_size_)); |
600 } | 631 } |
601 | 632 |
633 void RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback( | |
634 base::WaitableEvent* async_waiter, | |
635 int32_t* async_retval, | |
636 webrtc::EncodedImageCallback* callback) { | |
637 DCHECK(thread_checker_.CalledOnValidThread()); | |
638 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | |
639 RegisterAsyncWaiter(async_waiter, async_retval); | |
640 if (status_.load() == WEBRTC_VIDEO_CODEC_OK) { | |
641 encoded_image_callback_ = callback; | |
642 } | |
643 SignalAsyncWaiter(status_.load()); | |
644 } | |
645 | |
646 void RTCVideoEncoder::Impl::ReturnEncodedImage( | |
647 const webrtc::EncodedImage& image, | |
648 int32_t bitstream_buffer_id, | |
649 uint16_t picture_id) { | |
650 DCHECK(thread_checker_.CalledOnValidThread()); | |
651 DVLOG(3) << "ReturnEncodedImage(): " | |
652 << "bitstream_buffer_id=" << bitstream_buffer_id | |
653 << ", picture_id=" << picture_id; | |
654 | |
655 if (!encoded_image_callback_) | |
656 return; | |
657 | |
658 webrtc::RTPFragmentationHeader header; | |
659 memset(&header, 0, sizeof(header)); | |
660 switch (video_codec_type_) { | |
661 case webrtc::kVideoCodecVP8: | |
662 // Generate a header describing a single fragment. | |
663 header.VerifyAndAllocateFragmentationHeader(1); | |
664 header.fragmentationOffset[0] = 0; | |
665 header.fragmentationLength[0] = image._length; | |
666 header.fragmentationPlType[0] = 0; | |
667 header.fragmentationTimeDiff[0] = 0; | |
668 break; | |
669 case webrtc::kVideoCodecH264: | |
670 if (!GetRTPFragmentationHeaderH264(&header, image._buffer, | |
671 image._length)) { | |
672 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; | |
673 NotifyError( | |
674 (media::VideoEncodeAccelerator::Error)WEBRTC_VIDEO_CODEC_ERROR); | |
675 return; | |
676 } | |
677 break; | |
678 default: | |
679 NOTREACHED() << "Invalid video codec type"; | |
680 return; | |
681 } | |
682 | |
683 webrtc::CodecSpecificInfo info; | |
684 memset(&info, 0, sizeof(info)); | |
685 info.codecType = video_codec_type_; | |
686 if (video_codec_type_ == webrtc::kVideoCodecVP8) { | |
687 info.codecSpecific.VP8.pictureId = picture_id; | |
688 info.codecSpecific.VP8.tl0PicIdx = -1; | |
689 info.codecSpecific.VP8.keyIdx = -1; | |
690 } | |
691 | |
692 int32_t retval = encoded_image_callback_->Encoded(image, &info, &header); | |
693 if (retval < 0) { | |
694 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
695 << retval; | |
696 } | |
697 | |
698 UseOutputBitstreamBufferId(bitstream_buffer_id); | |
699 } | |
700 | |
602 RTCVideoEncoder::RTCVideoEncoder( | 701 RTCVideoEncoder::RTCVideoEncoder( |
603 webrtc::VideoCodecType type, | 702 webrtc::VideoCodecType type, |
604 media::GpuVideoAcceleratorFactories* gpu_factories) | 703 media::GpuVideoAcceleratorFactories* gpu_factories) |
605 : video_codec_type_(type), | 704 : video_codec_type_(type), |
kcwu
2016/04/03 18:53:46
no longer used.
wuchengli
2016/04/06 07:51:58
Done.
| |
606 gpu_factories_(gpu_factories), | 705 gpu_factories_(gpu_factories), |
607 gpu_task_runner_(gpu_factories->GetTaskRunner()), | 706 gpu_task_runner_(gpu_factories->GetTaskRunner()), |
608 encoded_image_callback_(NULL), | 707 impl_(new Impl(gpu_factories_, video_codec_type_)) { |
609 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED), | |
610 weak_factory_(this) { | |
611 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; | 708 DVLOG(1) << "RTCVideoEncoder(): codec type=" << type; |
612 } | 709 } |
613 | 710 |
614 RTCVideoEncoder::~RTCVideoEncoder() { | 711 RTCVideoEncoder::~RTCVideoEncoder() { |
615 DVLOG(3) << "~RTCVideoEncoder"; | 712 DVLOG(3) << "~RTCVideoEncoder"; |
616 DCHECK(thread_checker_.CalledOnValidThread()); | |
617 Release(); | 713 Release(); |
618 DCHECK(!impl_.get()); | |
619 } | 714 } |
620 | 715 |
621 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, | 716 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
622 int32_t number_of_cores, | 717 int32_t number_of_cores, |
623 size_t max_payload_size) { | 718 size_t max_payload_size) { |
624 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType | 719 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
625 << ", width=" << codec_settings->width | 720 << ", width=" << codec_settings->width |
626 << ", height=" << codec_settings->height | 721 << ", height=" << codec_settings->height |
627 << ", startBitrate=" << codec_settings->startBitrate; | 722 << ", startBitrate=" << codec_settings->startBitrate; |
628 DCHECK(thread_checker_.CalledOnValidThread()); | |
629 DCHECK(!impl_.get()); | |
630 | 723 |
631 const media::VideoCodecProfile profile = | 724 const media::VideoCodecProfile profile = |
632 WebRTCVideoCodecToVideoCodecProfile(video_codec_type_, codec_settings); | 725 WebRTCVideoCodecToVideoCodecProfile(impl_->video_codec_type(), codec_setti ngs); |
pbos
2016/04/04 09:00:31
line length (run git cl format)
wuchengli
2016/04/06 07:51:58
Done.
| |
633 | 726 |
634 weak_factory_.InvalidateWeakPtrs(); | |
635 impl_ = new Impl(weak_factory_.GetWeakPtr(), gpu_factories_); | |
636 base::WaitableEvent initialization_waiter(true, false); | 727 base::WaitableEvent initialization_waiter(true, false); |
637 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 728 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
638 gpu_task_runner_->PostTask( | 729 gpu_task_runner_->PostTask( |
639 FROM_HERE, | 730 FROM_HERE, |
640 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, | 731 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
641 impl_, | 732 impl_, |
642 gfx::Size(codec_settings->width, codec_settings->height), | 733 gfx::Size(codec_settings->width, codec_settings->height), |
643 codec_settings->startBitrate, | 734 codec_settings->startBitrate, |
644 profile, | 735 profile, |
645 &initialization_waiter, | 736 &initialization_waiter, |
646 &initialization_retval)); | 737 &initialization_retval)); |
647 | 738 |
648 // webrtc::VideoEncoder expects this call to be synchronous. | 739 // webrtc::VideoEncoder expects this call to be synchronous. |
649 initialization_waiter.Wait(); | 740 initialization_waiter.Wait(); |
650 RecordInitEncodeUMA(initialization_retval, profile); | 741 RecordInitEncodeUMA(initialization_retval, profile); |
651 return initialization_retval; | 742 return initialization_retval; |
652 } | 743 } |
653 | 744 |
654 int32_t RTCVideoEncoder::Encode( | 745 int32_t RTCVideoEncoder::Encode( |
655 const webrtc::VideoFrame& input_image, | 746 const webrtc::VideoFrame& input_image, |
656 const webrtc::CodecSpecificInfo* codec_specific_info, | 747 const webrtc::CodecSpecificInfo* codec_specific_info, |
657 const std::vector<webrtc::FrameType>* frame_types) { | 748 const std::vector<webrtc::FrameType>* frame_types) { |
658 DVLOG(3) << "Encode()"; | 749 DVLOG(3) << "Encode()"; |
659 if (!impl_.get()) { | |
660 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; | |
661 return impl_status_; | |
662 } | |
663 | 750 |
664 const bool want_key_frame = frame_types && frame_types->size() && | 751 const bool want_key_frame = frame_types && frame_types->size() && |
665 frame_types->front() == webrtc::kVideoFrameKey; | 752 frame_types->front() == webrtc::kVideoFrameKey; |
666 base::WaitableEvent encode_waiter(true, false); | 753 base::WaitableEvent encode_waiter(true, false); |
667 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | 754 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
668 gpu_task_runner_->PostTask( | 755 gpu_task_runner_->PostTask( |
669 FROM_HERE, | 756 FROM_HERE, |
670 base::Bind(&RTCVideoEncoder::Impl::Enqueue, | 757 base::Bind(&RTCVideoEncoder::Impl::Enqueue, |
671 impl_, | 758 impl_, |
672 &input_image, | 759 &input_image, |
673 want_key_frame, | 760 want_key_frame, |
674 &encode_waiter, | 761 &encode_waiter, |
675 &encode_retval)); | 762 &encode_retval)); |
676 | 763 |
677 // webrtc::VideoEncoder expects this call to be synchronous. | 764 // webrtc::VideoEncoder expects this call to be synchronous. |
678 encode_waiter.Wait(); | 765 encode_waiter.Wait(); |
679 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; | 766 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
680 return encode_retval; | 767 return encode_retval; |
681 } | 768 } |
682 | 769 |
683 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( | 770 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
684 webrtc::EncodedImageCallback* callback) { | 771 webrtc::EncodedImageCallback* callback) { |
685 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | 772 DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
686 DCHECK(thread_checker_.CalledOnValidThread()); | 773 base::WaitableEvent encode_waiter(true, false); |
687 if (!impl_.get()) { | 774 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
688 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; | 775 gpu_task_runner_->PostTask( |
689 return impl_status_; | 776 FROM_HERE, |
690 } | 777 base::Bind(&RTCVideoEncoder::Impl::RegisterEncodeCompleteCallback, impl_, |
691 | 778 &encode_waiter, &encode_retval, callback)); |
692 encoded_image_callback_ = callback; | 779 encode_waiter.Wait(); |
693 return WEBRTC_VIDEO_CODEC_OK; | 780 return encode_retval;; |
pbos
2016/04/04 09:00:31
remove ;
wuchengli
2016/04/06 07:51:58
Done.
| |
694 } | 781 } |
695 | 782 |
696 int32_t RTCVideoEncoder::Release() { | 783 int32_t RTCVideoEncoder::Release() { |
697 DVLOG(3) << "Release()"; | 784 DVLOG(3) << "Release()"; |
698 DCHECK(thread_checker_.CalledOnValidThread()); | |
699 | 785 |
700 if (impl_.get()) { | 786 base::WaitableEvent encode_waiter(true, false); |
701 gpu_task_runner_->PostTask(FROM_HERE, | 787 gpu_task_runner_->PostTask( |
702 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | 788 FROM_HERE, |
703 impl_ = NULL; | 789 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_, &encode_waiter)); |
704 weak_factory_.InvalidateWeakPtrs(); | 790 encode_waiter.Wait(); |
705 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
706 } | |
707 return WEBRTC_VIDEO_CODEC_OK; | 791 return WEBRTC_VIDEO_CODEC_OK; |
708 } | 792 } |
709 | 793 |
710 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, | 794 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, |
711 int64_t rtt) { | 795 int64_t rtt) { |
712 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss | 796 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
713 << ", rtt=" << rtt; | 797 << ", rtt=" << rtt; |
714 // Ignored. | 798 // Ignored. |
715 return WEBRTC_VIDEO_CODEC_OK; | 799 return impl_->GetStatus(); |
pbos
2016/04/04 09:00:31
I think this should still return WEBRTC_VIDEO_CODE
wuchengli
2016/04/06 07:51:58
Done.
| |
716 } | 800 } |
717 | 801 |
718 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { | 802 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { |
719 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate | 803 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate |
720 << ", frame_rate=" << frame_rate; | 804 << ", frame_rate=" << frame_rate; |
721 if (!impl_.get()) { | 805 int32_t retval = impl_->GetStatus(); |
722 DVLOG(3) << "SetRates(): returning " << impl_status_; | 806 if (retval != WEBRTC_VIDEO_CODEC_OK) { |
723 return impl_status_; | 807 DVLOG(3) << "SetRates(): returning " << retval; |
808 return retval; | |
724 } | 809 } |
725 | 810 |
726 gpu_task_runner_->PostTask( | 811 gpu_task_runner_->PostTask( |
727 FROM_HERE, | 812 FROM_HERE, |
728 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, | 813 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, |
729 impl_, | 814 impl_, |
730 new_bit_rate, | 815 new_bit_rate, |
731 frame_rate)); | 816 frame_rate)); |
732 return WEBRTC_VIDEO_CODEC_OK; | 817 return WEBRTC_VIDEO_CODEC_OK; |
733 } | 818 } |
734 | 819 |
735 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image, | |
736 int32_t bitstream_buffer_id, | |
737 uint16_t picture_id) { | |
738 DCHECK(thread_checker_.CalledOnValidThread()); | |
739 DVLOG(3) << "ReturnEncodedImage(): " | |
740 << "bitstream_buffer_id=" << bitstream_buffer_id | |
741 << ", picture_id=" << picture_id; | |
742 | |
743 if (!encoded_image_callback_) | |
744 return; | |
745 | |
746 webrtc::RTPFragmentationHeader header; | |
747 memset(&header, 0, sizeof(header)); | |
748 switch (video_codec_type_) { | |
749 case webrtc::kVideoCodecVP8: | |
750 // Generate a header describing a single fragment. | |
751 header.VerifyAndAllocateFragmentationHeader(1); | |
752 header.fragmentationOffset[0] = 0; | |
753 header.fragmentationLength[0] = image->_length; | |
754 header.fragmentationPlType[0] = 0; | |
755 header.fragmentationTimeDiff[0] = 0; | |
756 break; | |
757 case webrtc::kVideoCodecH264: | |
758 if (!GetRTPFragmentationHeaderH264( | |
759 &header, image->_buffer, image->_length)) { | |
760 DLOG(ERROR) << "Failed to get RTP fragmentation header for H264"; | |
761 NotifyError(WEBRTC_VIDEO_CODEC_ERROR); | |
762 return; | |
763 } | |
764 break; | |
765 default: | |
766 NOTREACHED() << "Invalid video codec type"; | |
767 return; | |
768 } | |
769 | |
770 webrtc::CodecSpecificInfo info; | |
771 memset(&info, 0, sizeof(info)); | |
772 info.codecType = video_codec_type_; | |
773 if (video_codec_type_ == webrtc::kVideoCodecVP8) { | |
774 info.codecSpecific.VP8.pictureId = picture_id; | |
775 info.codecSpecific.VP8.tl0PicIdx = -1; | |
776 info.codecSpecific.VP8.keyIdx = -1; | |
777 } | |
778 | |
779 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
780 if (retval < 0) { | |
781 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
782 << retval; | |
783 } | |
784 | |
785 // The call through webrtc::EncodedImageCallback is synchronous, so we can | |
786 // immediately recycle the output buffer back to the Impl. | |
787 gpu_task_runner_->PostTask( | |
788 FROM_HERE, | |
789 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, | |
790 impl_, | |
791 bitstream_buffer_id)); | |
792 } | |
793 | |
794 void RTCVideoEncoder::NotifyError(int32_t error) { | |
795 DCHECK(thread_checker_.CalledOnValidThread()); | |
796 DVLOG(1) << "NotifyError(): error=" << error; | |
797 | |
798 impl_status_ = error; | |
799 gpu_task_runner_->PostTask(FROM_HERE, | |
800 base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
801 impl_ = NULL; | |
802 } | |
803 | |
804 void RTCVideoEncoder::RecordInitEncodeUMA( | 820 void RTCVideoEncoder::RecordInitEncodeUMA( |
805 int32_t init_retval, media::VideoCodecProfile profile) { | 821 int32_t init_retval, media::VideoCodecProfile profile) { |
806 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", | 822 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess", |
807 init_retval == WEBRTC_VIDEO_CODEC_OK); | 823 init_retval == WEBRTC_VIDEO_CODEC_OK); |
808 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { | 824 if (init_retval == WEBRTC_VIDEO_CODEC_OK) { |
809 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", | 825 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile", |
810 profile, | 826 profile, |
811 media::VIDEO_CODEC_PROFILE_MAX + 1); | 827 media::VIDEO_CODEC_PROFILE_MAX + 1); |
812 } | 828 } |
813 } | 829 } |
814 | 830 |
815 } // namespace content | 831 } // namespace content |
OLD | NEW |