OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/renderer/media/rtc_video_encoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/location.h" | |
9 #include "base/logging.h" | |
10 #include "base/memory/scoped_vector.h" | |
11 #include "base/message_loop/message_loop_proxy.h" | |
12 #include "base/synchronization/waitable_event.h" | |
13 #include "content/renderer/media/renderer_gpu_video_accelerator_factories.h" | |
14 #include "media/base/bitstream_buffer.h" | |
15 #include "media/base/video_frame.h" | |
16 #include "media/filters/gpu_video_accelerator_factories.h" | |
17 #include "media/video/video_encode_accelerator.h" | |
18 | |
19 #define NOTIFY_ERROR(x) \ | |
20 do { \ | |
21 DLOG(ERROR) << "calling NotifyError(): " << x; \ | |
22 NotifyError(x); \ | |
23 } while (0) | |
24 | |
25 namespace content { | |
26 | |
27 // This private class of RTCVideoEncoder does the actual work of communicating | |
28 // with a media::VideoEncodeAccelerator for handling video encoding. It can | |
29 // be created on any thread, but should subsequently be posted to (and Destroy() | |
30 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the | |
31 // thread on which the instance was constructed. | |
32 // | |
33 // This class separates state related to the thread that RTCVideoEncoder | |
34 // operates on (presently the libjingle worker thread) from the thread that | |
35 // |gpu_factories_| provides for accelerator operations (presently the media | |
36 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while | |
37 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA. | |
38 class RTCVideoEncoder::Impl | |
39 : public media::VideoEncodeAccelerator::Client, | |
40 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { | |
41 public: | |
42 Impl( | |
43 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | |
44 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories); | |
45 | |
46 // Create the VEA and call Initialize() on it. Called once per instantiation, | |
47 // and then the instance is bound forevermore to whichever thread made the | |
48 // call. | |
49 // RTCVideoEncoder expects to be able to call this function synchronously from | |
50 // its own thread, hence the |async_waiter| and |async_retval| arguments. | |
51 void CreateAndInitializeVEA(const gfx::Size& input_visible_size, | |
52 uint32 bitrate, | |
53 media::VideoCodecProfile profile, | |
54 base::WaitableEvent* async_waiter, | |
55 int32_t* async_retval); | |
56 // Enqueue a frame from WebRTC for encoding. | |
57 // RTCVideoEncoder expects to be able to call this function synchronously from | |
58 // its own thread, hence the |async_waiter| and |async_retval| arguments. | |
59 void Enqueue(const webrtc::I420VideoFrame* input_frame, | |
60 bool force_keyframe, | |
61 base::WaitableEvent* async_waiter, | |
62 int32_t* async_retval); | |
63 | |
64 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the | |
65 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete, | |
66 // the buffer is returned to Impl by its index using this function. | |
67 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); | |
68 | |
69 // Request encoding parameter change for the underlying encoder. | |
70 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate); | |
71 | |
72 // Destroy this Impl's encoder. The destructor is not explicitly called, as | |
73 // Impl is a base::RefCountedThreadSafe. | |
74 void Destroy(); | |
75 | |
76 // media::VideoEncodeAccelerator::Client implementation. | |
77 virtual void NotifyInitializeDone() OVERRIDE; | |
78 virtual void RequireBitstreamBuffers(unsigned int input_count, | |
79 const gfx::Size& input_coded_size, | |
80 size_t output_buffer_size) OVERRIDE; | |
81 virtual void BitstreamBufferReady(int32 bitstream_buffer_id, | |
82 size_t payload_size, | |
83 bool key_frame) OVERRIDE; | |
84 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; | |
85 | |
86 private: | |
87 friend class base::RefCountedThreadSafe<Impl>; | |
88 | |
89 enum { | |
90 kInputBufferExtraCount = 1, // The number of input buffers allocated, more | |
91 // than what is requested by | |
92 // VEA::RequireBitstreamBuffers(). | |
93 kOutputBufferCount = 3, | |
94 }; | |
95 | |
96 virtual ~Impl(); | |
97 | |
98 // Perform encoding on an input frame from the input queue. | |
99 void EncodeOneFrame(); | |
100 | |
101 // Notify that an input frame is finished for encoding. |index| is the index | |
102 // of the completed frame in |input_buffers_|. | |
103 void EncodeFrameFinished(int index); | |
104 | |
105 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below. | |
106 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval); | |
107 void SignalAsyncWaiter(int32_t retval); | |
108 | |
109 base::ThreadChecker thread_checker_; | |
110 | |
111 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client | |
112 // notifications. | |
113 const base::WeakPtr<RTCVideoEncoder> weak_encoder_; | |
114 | |
115 // The message loop on which to post callbacks to |weak_encoder_|. | |
116 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; | |
117 | |
118 // Factory for creating VEAs, shared memory buffers, etc. | |
119 const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_; | |
120 | |
121 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. | |
122 // Do this by waiting on the |async_waiter_| and returning the return value in | |
123 // |async_retval_| when initialization completes, encoding completes, or | |
124 // an error occurs. | |
125 base::WaitableEvent* async_waiter_; | |
126 int32_t* async_retval_; | |
127 | |
128 // The underlying VEA to perform encoding on. | |
129 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; | |
130 | |
131 // Next input frame. Since there is at most one next frame, a single-element | |
132 // queue is sufficient. | |
133 const webrtc::I420VideoFrame* input_next_frame_; | |
134 | |
135 // Whether to encode a keyframe next. | |
136 bool input_next_frame_keyframe_; | |
137 | |
138 // Frame sizes. | |
139 gfx::Size input_frame_coded_size_; | |
140 gfx::Size input_visible_size_; | |
141 | |
142 // Shared memory buffers for input/output with the VEA. | |
143 ScopedVector<base::SharedMemory> input_buffers_; | |
144 ScopedVector<base::SharedMemory> output_buffers_; | |
145 | |
146 // Input buffers ready to be filled with input from Encode(). As a LIFO since | |
147 // we don't care about ordering. | |
148 std::vector<int> input_buffers_free_; | |
149 | |
150 // Timestamp of first frame returned from encoder. We calculate subsequent | |
151 // capture times as deltas from this base. | |
152 base::Time time_base_; | |
153 | |
154 DISALLOW_COPY_AND_ASSIGN(Impl); | |
155 }; | |
156 | |
157 RTCVideoEncoder::Impl::Impl( | |
158 const base::WeakPtr<RTCVideoEncoder>& weak_encoder, | |
159 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) | |
160 : weak_encoder_(weak_encoder), | |
161 encoder_message_loop_proxy_(base::MessageLoopProxy::current()), | |
162 gpu_factories_(gpu_factories), | |
163 async_waiter_(NULL), | |
164 async_retval_(NULL), | |
165 input_next_frame_(NULL), | |
166 input_next_frame_keyframe_(false) { | |
167 thread_checker_.DetachFromThread(); | |
168 } | |
169 | |
170 void RTCVideoEncoder::Impl::CreateAndInitializeVEA( | |
171 const gfx::Size& input_visible_size, | |
172 uint32 bitrate, | |
173 media::VideoCodecProfile profile, | |
174 base::WaitableEvent* async_waiter, | |
175 int32_t* async_retval) { | |
176 DVLOG(3) << "Impl::CreateAndInitializeVEA()"; | |
177 DCHECK(thread_checker_.CalledOnValidThread()); | |
178 | |
179 RegisterAsyncWaiter(async_waiter, async_retval); | |
180 | |
181 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | |
182 if (bitrate > kuint32max / 1000) { | |
183 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); | |
184 return; | |
185 } | |
186 | |
187 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass(); | |
188 if (!video_encoder_) { | |
189 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
190 return; | |
191 } | |
192 input_visible_size_ = input_visible_size; | |
193 video_encoder_->Initialize( | |
194 media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000); | |
195 } | |
196 | |
197 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, | |
198 bool force_keyframe, | |
199 base::WaitableEvent* async_waiter, | |
200 int32_t* async_retval) { | |
201 DVLOG(3) << "Impl::Enqueue()"; | |
202 DCHECK(thread_checker_.CalledOnValidThread()); | |
203 DCHECK(!input_next_frame_); | |
204 | |
205 RegisterAsyncWaiter(async_waiter, async_retval); | |
206 input_next_frame_ = input_frame; | |
207 input_next_frame_keyframe_ = force_keyframe; | |
208 | |
209 if (!input_buffers_free_.empty()) | |
210 EncodeOneFrame(); | |
211 } | |
212 | |
213 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( | |
214 int32 bitstream_buffer_id) { | |
215 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " | |
216 "bitstream_buffer_id=" << bitstream_buffer_id; | |
217 DCHECK(thread_checker_.CalledOnValidThread()); | |
218 if (video_encoder_) { | |
219 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | |
220 bitstream_buffer_id, | |
221 output_buffers_[bitstream_buffer_id]->handle(), | |
222 output_buffers_[bitstream_buffer_id]->mapped_size())); | |
223 } | |
224 } | |
225 | |
226 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate, | |
227 uint32 framerate) { | |
228 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate | |
229 << ", framerate=" << framerate; | |
230 DCHECK(thread_checker_.CalledOnValidThread()); | |
231 | |
232 // Check for overflow converting bitrate (kilobits/sec) to bits/sec. | |
233 if (bitrate > kuint32max / 1000) { | |
234 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); | |
235 return; | |
236 } | |
237 | |
238 if (video_encoder_) | |
239 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate); | |
240 } | |
241 | |
242 void RTCVideoEncoder::Impl::Destroy() { | |
243 DVLOG(3) << "Impl::Destroy()"; | |
244 DCHECK(thread_checker_.CalledOnValidThread()); | |
245 if (video_encoder_) | |
246 video_encoder_.release()->Destroy(); | |
247 } | |
248 | |
249 void RTCVideoEncoder::Impl::NotifyInitializeDone() { | |
250 DVLOG(3) << "Impl::NotifyInitializeDone()"; | |
251 DCHECK(thread_checker_.CalledOnValidThread()); | |
252 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | |
253 } | |
254 | |
255 void RTCVideoEncoder::Impl::RequireBitstreamBuffers( | |
256 unsigned int input_count, | |
257 const gfx::Size& input_coded_size, | |
258 size_t output_buffer_size) { | |
259 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count | |
260 << ", input_coded_size=" << input_coded_size.ToString() | |
261 << ", output_buffer_size=" << output_buffer_size; | |
262 DCHECK(thread_checker_.CalledOnValidThread()); | |
263 | |
264 if (!video_encoder_) | |
265 return; | |
266 | |
267 input_frame_coded_size_ = input_coded_size; | |
268 | |
269 for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) { | |
270 base::SharedMemory* shm = | |
271 gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2); | |
272 if (!shm) { | |
273 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " | |
274 "failed to create input buffer " << i; | |
275 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
276 return; | |
277 } | |
278 input_buffers_.push_back(shm); | |
279 input_buffers_free_.push_back(i); | |
280 } | |
281 | |
282 for (int i = 0; i < kOutputBufferCount; ++i) { | |
283 base::SharedMemory* shm = | |
284 gpu_factories_->CreateSharedMemory(output_buffer_size); | |
285 if (!shm) { | |
286 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " | |
287 "failed to create output buffer " << i; | |
288 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
289 return; | |
290 } | |
291 output_buffers_.push_back(shm); | |
292 } | |
293 | |
294 // Immediately provide all output buffers to the VEA. | |
295 for (size_t i = 0; i < output_buffers_.size(); ++i) { | |
296 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( | |
297 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); | |
298 } | |
299 } | |
300 | |
301 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, | |
302 size_t payload_size, | |
303 bool key_frame) { | |
304 DVLOG(3) << "Impl::BitstreamBufferReady(): " | |
305 "bitstream_buffer_id=" << bitstream_buffer_id | |
306 << ", payload_size=" << payload_size | |
307 << ", key_frame=" << key_frame; | |
308 DCHECK(thread_checker_.CalledOnValidThread()); | |
309 | |
310 if (bitstream_buffer_id < 0 || | |
311 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { | |
312 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" | |
313 << bitstream_buffer_id; | |
314 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
315 return; | |
316 } | |
317 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; | |
318 if (payload_size > output_buffer->mapped_size()) { | |
319 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" | |
320 << payload_size; | |
321 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); | |
322 return; | |
323 } | |
324 | |
325 const base::Time now = base::Time::Now(); | |
326 if (time_base_.is_null()) | |
327 time_base_ = now; | |
328 const base::TimeDelta delta = now - time_base_; | |
329 | |
330 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( | |
331 reinterpret_cast<uint8_t*>(output_buffer->memory()), | |
332 payload_size, | |
333 output_buffer->mapped_size())); | |
334 image->_encodedWidth = input_visible_size_.width(); | |
335 image->_encodedHeight = input_visible_size_.height(); | |
336 // Convert capture time to 90 kHz RTP timestamp. | |
337 image->_timeStamp = (delta * 90000).InSeconds(); | |
338 image->capture_time_ms_ = delta.InMilliseconds(); | |
339 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); | |
340 image->_completeFrame = true; | |
341 | |
342 encoder_message_loop_proxy_->PostTask( | |
343 FROM_HERE, | |
344 base::Bind(&RTCVideoEncoder::ReturnEncodedImage, | |
345 weak_encoder_, | |
346 base::Passed(&image), | |
347 bitstream_buffer_id)); | |
348 } | |
349 | |
350 void RTCVideoEncoder::Impl::NotifyError( | |
351 media::VideoEncodeAccelerator::Error error) { | |
352 DVLOG(3) << "Impl::NotifyError(): error=" << error; | |
353 DCHECK(thread_checker_.CalledOnValidThread()); | |
354 int32_t retval; | |
355 switch (error) { | |
356 case media::VideoEncodeAccelerator::kInvalidArgumentError: | |
357 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER; | |
358 break; | |
359 default: | |
360 retval = WEBRTC_VIDEO_CODEC_ERROR; | |
361 } | |
362 | |
363 if (video_encoder_) | |
364 video_encoder_.release()->Destroy(); | |
365 | |
366 if (async_waiter_) { | |
367 SignalAsyncWaiter(retval); | |
368 } else { | |
369 encoder_message_loop_proxy_->PostTask( | |
370 FROM_HERE, | |
371 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval)); | |
372 } | |
373 } | |
374 | |
375 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); } | |
376 | |
377 void RTCVideoEncoder::Impl::EncodeOneFrame() { | |
378 DVLOG(3) << "Impl::EncodeOneFrame()"; | |
379 DCHECK(thread_checker_.CalledOnValidThread()); | |
380 DCHECK(input_next_frame_); | |
381 DCHECK(!input_buffers_free_.empty()); | |
382 | |
383 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails, | |
384 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to | |
385 // Encode() gets destroyed early. Handle this by resetting our | |
386 // input_next_frame_* state before we hand off the VideoFrame to the VEA. | |
387 const webrtc::I420VideoFrame* next_frame = input_next_frame_; | |
388 bool next_frame_keyframe = input_next_frame_keyframe_; | |
389 input_next_frame_ = NULL; | |
390 input_next_frame_keyframe_ = false; | |
391 | |
392 if (!video_encoder_) { | |
393 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR); | |
394 return; | |
395 } | |
396 | |
397 const int index = input_buffers_free_.back(); | |
398 base::SharedMemory* input_buffer = input_buffers_[index]; | |
399 | |
400 // Do a strided copy of the input frame to match the input requirements for | |
401 // the encoder. | |
402 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312 | |
403 const uint8_t* src = next_frame->buffer(webrtc::kYPlane); | |
404 uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory()); | |
405 uint8* const y_dst = dst; | |
406 int width = input_frame_coded_size_.width(); | |
407 int stride = next_frame->stride(webrtc::kYPlane); | |
408 for (int i = 0; i < next_frame->height(); ++i) { | |
409 memcpy(dst, src, width); | |
410 src += stride; | |
411 dst += width; | |
412 } | |
413 src = next_frame->buffer(webrtc::kUPlane); | |
414 width = input_frame_coded_size_.width() / 2; | |
415 stride = next_frame->stride(webrtc::kUPlane); | |
416 for (int i = 0; i < next_frame->height() / 2; ++i) { | |
417 memcpy(dst, src, width); | |
418 src += stride; | |
419 dst += width; | |
420 } | |
421 src = next_frame->buffer(webrtc::kVPlane); | |
422 width = input_frame_coded_size_.width() / 2; | |
423 stride = next_frame->stride(webrtc::kVPlane); | |
424 for (int i = 0; i < next_frame->height() / 2; ++i) { | |
425 memcpy(dst, src, width); | |
426 src += stride; | |
427 dst += width; | |
428 } | |
429 | |
430 scoped_refptr<media::VideoFrame> frame = | |
431 media::VideoFrame::WrapExternalSharedMemory( | |
432 media::VideoFrame::I420, | |
433 input_frame_coded_size_, | |
434 gfx::Rect(input_visible_size_), | |
435 input_visible_size_, | |
436 y_dst, | |
437 input_buffer->handle(), | |
438 base::TimeDelta(), | |
439 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index)); | |
440 | |
441 video_encoder_->Encode(frame, next_frame_keyframe); | |
442 input_buffers_free_.pop_back(); | |
443 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK); | |
444 } | |
445 | |
446 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) { | |
447 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index; | |
448 DCHECK(thread_checker_.CalledOnValidThread()); | |
449 DCHECK_GE(index, 0); | |
450 DCHECK_LT(index, static_cast<int>(input_buffers_.size())); | |
451 input_buffers_free_.push_back(index); | |
452 if (input_next_frame_) | |
453 EncodeOneFrame(); | |
454 } | |
455 | |
456 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter, | |
457 int32_t* retval) { | |
458 DCHECK(thread_checker_.CalledOnValidThread()); | |
459 DCHECK(!async_waiter_); | |
460 DCHECK(!async_retval_); | |
461 async_waiter_ = waiter; | |
462 async_retval_ = retval; | |
463 } | |
464 | |
465 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) { | |
466 DCHECK(thread_checker_.CalledOnValidThread()); | |
467 *async_retval_ = retval; | |
468 async_waiter_->Signal(); | |
469 async_retval_ = NULL; | |
470 async_waiter_ = NULL; | |
471 } | |
472 | |
473 #undef NOTIFY_ERROR | |
474 | |
475 //////////////////////////////////////////////////////////////////////////////// | |
476 // | |
477 // RTCVideoEncoder | |
478 // | |
479 //////////////////////////////////////////////////////////////////////////////// | |
480 | |
481 RTCVideoEncoder::RTCVideoEncoder( | |
482 webrtc::VideoCodecType type, | |
483 media::VideoCodecProfile profile, | |
484 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories) | |
485 : video_codec_type_(type), | |
486 video_codec_profile_(profile), | |
487 gpu_factories_(gpu_factories), | |
488 encoded_image_callback_(NULL), | |
489 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) { | |
490 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; | |
491 } | |
492 | |
493 RTCVideoEncoder::~RTCVideoEncoder() { | |
494 DCHECK(thread_checker_.CalledOnValidThread()); | |
495 Release(); | |
496 DCHECK(!impl_); | |
497 } | |
498 | |
499 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, | |
500 int32_t number_of_cores, | |
501 uint32_t max_payload_size) { | |
502 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType | |
503 << ", width=" << codec_settings->width | |
504 << ", height=" << codec_settings->height | |
505 << ", startBitrate=" << codec_settings->startBitrate; | |
506 DCHECK(thread_checker_.CalledOnValidThread()); | |
507 DCHECK(!impl_); | |
508 | |
509 weak_this_factory_.reset(new base::WeakPtrFactory<RTCVideoEncoder>(this)); | |
510 impl_ = new Impl(weak_this_factory_->GetWeakPtr(), gpu_factories_); | |
511 base::WaitableEvent initialization_waiter(true, false); | |
512 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
513 gpu_factories_->GetMessageLoop()->PostTask( | |
514 FROM_HERE, | |
515 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, | |
516 impl_, | |
517 gfx::Size(codec_settings->width, codec_settings->height), | |
518 codec_settings->startBitrate, | |
519 video_codec_profile_, | |
520 &initialization_waiter, | |
521 &initialization_retval)); | |
522 | |
523 // webrtc::VideoEncoder expects this call to be synchronous. | |
524 initialization_waiter.Wait(); | |
525 return initialization_retval; | |
526 } | |
527 | |
528 int32_t RTCVideoEncoder::Encode( | |
529 const webrtc::I420VideoFrame& input_image, | |
530 const webrtc::CodecSpecificInfo* codec_specific_info, | |
531 const std::vector<webrtc::VideoFrameType>* frame_types) { | |
532 DVLOG(3) << "Encode()"; | |
533 // TODO(sheu): figure out why this check fails. | |
534 // DCHECK(thread_checker_.CalledOnValidThread()); | |
535 if (!impl_) { | |
536 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; | |
537 return impl_status_; | |
538 } | |
539 | |
540 base::WaitableEvent encode_waiter(true, false); | |
541 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
542 gpu_factories_->GetMessageLoop()->PostTask( | |
543 FROM_HERE, | |
544 base::Bind(&RTCVideoEncoder::Impl::Enqueue, | |
545 impl_, | |
546 &input_image, | |
547 (frame_types->front() == webrtc::kKeyFrame), | |
548 &encode_waiter, | |
549 &encode_retval)); | |
550 | |
551 // webrtc::VideoEncoder expects this call to be synchronous. | |
552 encode_waiter.Wait(); | |
553 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; | |
554 return encode_retval; | |
555 } | |
556 | |
557 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( | |
558 webrtc::EncodedImageCallback* callback) { | |
559 DVLOG(3) << "RegisterEncodeCompleteCallback()"; | |
560 DCHECK(thread_checker_.CalledOnValidThread()); | |
561 if (!impl_) { | |
562 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; | |
563 return impl_status_; | |
564 } | |
565 | |
566 encoded_image_callback_ = callback; | |
567 return WEBRTC_VIDEO_CODEC_OK; | |
568 } | |
569 | |
570 int32_t RTCVideoEncoder::Release() { | |
571 DVLOG(3) << "Release()"; | |
572 DCHECK(thread_checker_.CalledOnValidThread()); | |
573 | |
574 // Reset the gpu_factory_, in case we reuse this encoder. | |
575 gpu_factories_->Abort(); | |
576 gpu_factories_ = gpu_factories_->Clone(); | |
577 if (impl_) { | |
578 gpu_factories_->GetMessageLoop()->PostTask( | |
579 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
580 impl_ = NULL; | |
581 weak_this_factory_.reset(); | |
582 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED; | |
583 } | |
584 return WEBRTC_VIDEO_CODEC_OK; | |
585 } | |
586 | |
587 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { | |
588 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss | |
589 << ", rtt=" << rtt; | |
590 DCHECK(thread_checker_.CalledOnValidThread()); | |
591 // Ignored. | |
592 return WEBRTC_VIDEO_CODEC_OK; | |
593 } | |
594 | |
595 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { | |
596 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate | |
597 << ", frame_rate=" << frame_rate; | |
598 DCHECK(thread_checker_.CalledOnValidThread()); | |
599 if (!impl_) { | |
600 DVLOG(3) << "SetRates(): returning " << impl_status_; | |
601 return impl_status_; | |
602 } | |
603 | |
604 gpu_factories_->GetMessageLoop()->PostTask( | |
605 FROM_HERE, | |
606 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange, | |
607 impl_, | |
608 new_bit_rate, | |
609 frame_rate)); | |
610 return WEBRTC_VIDEO_CODEC_OK; | |
611 } | |
612 | |
613 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image, | |
614 int32 bitstream_buffer_id) { | |
615 DCHECK(thread_checker_.CalledOnValidThread()); | |
616 DVLOG(3) << "ReturnEncodedImage(): " | |
617 "bitstream_buffer_id=" << bitstream_buffer_id; | |
618 | |
619 if (!encoded_image_callback_) | |
620 return; | |
621 | |
622 webrtc::CodecSpecificInfo info; | |
623 info.codecType = video_codec_type_; | |
624 | |
625 // Generate a header describing a single fragment. | |
626 webrtc::RTPFragmentationHeader header; | |
627 header.VerifyAndAllocateFragmentationHeader(1); | |
628 header.fragmentationOffset[0] = 0; | |
629 header.fragmentationLength[0] = image->_length; | |
630 header.fragmentationPlType[0] = 0; | |
631 header.fragmentationTimeDiff[0] = 0; | |
632 | |
633 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header); | |
634 if (retval < 0) { | |
635 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned " | |
636 << retval; | |
637 } | |
638 | |
639 // The call through webrtc::EncodedImageCallback is synchronous, so we can | |
640 // immediately recycle the output buffer back to the Impl. | |
641 gpu_factories_->GetMessageLoop()->PostTask( | |
642 FROM_HERE, | |
643 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, | |
644 impl_, | |
645 bitstream_buffer_id)); | |
646 } | |
647 | |
648 void RTCVideoEncoder::NotifyError(int32_t error) { | |
649 DCHECK(thread_checker_.CalledOnValidThread()); | |
650 DVLOG(1) << "NotifyError(): error=" << error; | |
651 | |
652 impl_status_ = error; | |
653 gpu_factories_->GetMessageLoop()->PostTask( | |
654 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); | |
655 impl_ = NULL; | |
656 } | |
657 | |
658 } // namespace content | |
OLD | NEW |