Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(264)

Side by Side Diff: content/renderer/media/rtc_video_encoder.cc

Issue 20632002: Add media::VideoEncodeAccelerator with WebRTC integration (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@git-svn
Patch Set: 9830db80 Missing changes from last patchset Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/rtc_video_encoder.h"
6
7 #include "base/bind.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/message_loop/message_loop_proxy.h"
12 #include "base/synchronization/waitable_event.h"
13 #include "content/renderer/media/renderer_gpu_video_accelerator_factories.h"
14 #include "media/base/bitstream_buffer.h"
15 #include "media/base/video_frame.h"
16 #include "media/filters/gpu_video_accelerator_factories.h"
17 #include "media/video/video_encode_accelerator.h"
18
19 #define NOTIFY_ERROR(x) \
20 do { \
21 DLOG(ERROR) << "calling NotifyError(): " << x; \
22 NotifyError(x); \
23 } while (0)
24
25 namespace content {
26
27 // This private class of RTCVideoEncoder does the actual work of communicating
28 // with a media::VideoEncodeAccelerator for handling video encoding. It can
29 // be created on any thread, but should subsequently be posted to (and Destroy()
30 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
31 // thread on which the instance was constructed.
32 //
33 // This class separates state related to the thread that RTCVideoEncoder
34 // operates on (presently the libjingle worker thread) from the thread that
35 // |gpu_factories_| provides for accelerator operations (presently the media
36 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
37 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
38 class RTCVideoEncoder::Impl
39 : public media::VideoEncodeAccelerator::Client,
40 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
41 public:
42 Impl(
43 const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
44 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories);
45
46 // Create the VEA and call Initialize() on it. Called once per instantiation,
47 // and then the instance is bound forevermore to whichever thread made the
48 // call.
49 // RTCVideoEncoder expects to be able to call this function synchronously from
50 // its own thread, hence the |async_waiter| and |async_retval| arguments.
51 void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
52 uint32 bitrate,
53 media::VideoCodecProfile profile,
54 base::WaitableEvent* async_waiter,
55 int32_t* async_retval);
56 // Enqueue a frame from WebRTC for encoding.
57 // RTCVideoEncoder expects to be able to call this function synchronously from
58 // its own thread, hence the |async_waiter| and |async_retval| arguments.
59 void Enqueue(const webrtc::I420VideoFrame* input_frame,
60 bool force_keyframe,
61 base::WaitableEvent* async_waiter,
62 int32_t* async_retval);
63
64 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
65 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
66 // the buffer is returned to Impl by its index using this function.
67 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
68
69 // Request encoding parameter change for the underlying encoder.
70 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
71
72 // Destroy this Impl's encoder. The destructor is not explicitly called, as
73 // Impl is a base::RefCountedThreadSafe.
74 void Destroy();
75
76 // media::VideoEncodeAccelerator::Client implementation.
77 virtual void NotifyInitializeDone() OVERRIDE;
78 virtual void RequireBitstreamBuffers(int input_count,
79 const gfx::Size& input_coded_size,
80 size_t output_buffer_size) OVERRIDE;
81 virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
82 size_t payload_size,
83 bool key_frame) OVERRIDE;
84 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
85
86 private:
87 friend class base::RefCountedThreadSafe<Impl>;
88
89 enum {
90 kInputBufferExtraCount = 1, // The number of input buffers allocated, more
91 // than what is requested by
92 // VEA::RequireBitstreamBuffers().
93 kOutputBufferCount = 3,
94 };
95
96 virtual ~Impl();
97
98 // Perform encoding on an input frame from the input queue.
99 void EncodeOneFrame();
100
101 // Notify that an input frame is finished for encoding. |index| is the index
102 // of the completed frame in |input_buffers_|.
103 void EncodeFrameFinished(int index);
104
105 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
106 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
107 void SignalAsyncWaiter(int32_t retval);
108
109 base::ThreadChecker thread_checker_;
110
111 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
112 // notifications.
113 const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
114
115 // The message loop on which to post callbacks to |weak_encoder_|.
116 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
117
118 // Factory for creating VEAs, shared memory buffers, etc.
119 const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_;
120
121 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
122 // Do this by waiting on the |async_waiter_| and returning the return value in
123 // |async_retval_| when initialization completes, encoding completes, or
124 // an error occurs.
125 base::WaitableEvent* async_waiter_;
126 int32_t* async_retval_;
127
128 // The underlying VEA to perform encoding on.
129 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
130
131 // Next input frame. Since there is at most one next frame, a single-element
132 // queue is sufficient.
133 const webrtc::I420VideoFrame* input_next_frame_;
134
135 // Whether to encode a keyframe next.
136 bool input_next_frame_keyframe_;
137
138 // Frame sizes.
139 gfx::Size input_frame_coded_size_;
140 gfx::Size input_visible_size_;
141
142 // Shared memory buffers for input/output with the VEA.
143 ScopedVector<base::SharedMemory> input_buffers_;
144 ScopedVector<base::SharedMemory> output_buffers_;
145
146 // Input buffers ready to be filled with input from Encode(). As a LIFO since
147 // we don't care about ordering.
148 std::vector<int> input_buffers_free_;
149
150 // Timestamp of first frame returned from encoder. We calculate subsequent
151 // capture times as deltas from this base.
152 base::Time time_base_;
153
154 DISALLOW_COPY_AND_ASSIGN(Impl);
155 };
156
157 RTCVideoEncoder::Impl::Impl(
158 const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
159 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
160 : weak_encoder_(weak_encoder),
161 encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
162 gpu_factories_(gpu_factories),
163 async_waiter_(NULL),
164 async_retval_(NULL),
165 input_next_frame_(NULL),
166 input_next_frame_keyframe_(false) {
167 thread_checker_.DetachFromThread();
168 }
169
170 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
171 const gfx::Size& input_visible_size,
172 uint32 bitrate,
173 media::VideoCodecProfile profile,
174 base::WaitableEvent* async_waiter,
175 int32_t* async_retval) {
176 DVLOG(3) << "Impl::CreateAndInitializeVEA()";
177 DCHECK(thread_checker_.CalledOnValidThread());
178
179 RegisterAsyncWaiter(async_waiter, async_retval);
180
181 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
182 if (bitrate > kuint32max / 1000) {
183 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
184 return;
185 }
186
187 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass();
188 if (!video_encoder_) {
189 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
190 return;
191 }
192 input_visible_size_ = input_visible_size;
193 video_encoder_->Initialize(
194 media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000);
195 }
196
197 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
198 bool force_keyframe,
199 base::WaitableEvent* async_waiter,
200 int32_t* async_retval) {
201 DVLOG(3) << "Impl::Enqueue()";
202 DCHECK(thread_checker_.CalledOnValidThread());
203 DCHECK(!input_next_frame_);
204
205 RegisterAsyncWaiter(async_waiter, async_retval);
206 input_next_frame_ = input_frame;
207 input_next_frame_keyframe_ = force_keyframe;
208
209 if (!input_buffers_free_.empty())
210 EncodeOneFrame();
211 }
212
213 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
214 int32 bitstream_buffer_id) {
215 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
216 "bitstream_buffer_id=" << bitstream_buffer_id;
217 DCHECK(thread_checker_.CalledOnValidThread());
218 if (video_encoder_) {
219 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
220 bitstream_buffer_id,
221 output_buffers_[bitstream_buffer_id]->handle(),
222 output_buffers_[bitstream_buffer_id]->mapped_size()));
223 }
224 }
225
226 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
227 uint32 framerate) {
228 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
229 << ", framerate=" << framerate;
230 DCHECK(thread_checker_.CalledOnValidThread());
231
232 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
233 if (bitrate > kuint32max / 1000) {
234 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
235 return;
236 }
237
238 if (video_encoder_)
239 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
240 }
241
242 void RTCVideoEncoder::Impl::Destroy() {
243 DVLOG(3) << "Impl::Destroy()";
244 DCHECK(thread_checker_.CalledOnValidThread());
245 if (video_encoder_)
246 video_encoder_.release()->Destroy();
247 }
248
249 void RTCVideoEncoder::Impl::NotifyInitializeDone() {
250 DVLOG(3) << "Impl::NotifyInitializeDone()";
251 DCHECK(thread_checker_.CalledOnValidThread());
252 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
253 }
254
255 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
256 int input_count,
Cris Neckar 2013/08/12 23:29:02 Why is this signed?
257 const gfx::Size& input_coded_size,
258 size_t output_buffer_size) {
259 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
260 << ", input_coded_size=" << input_coded_size.ToString()
261 << ", output_buffer_size=" << output_buffer_size;
262 DCHECK(thread_checker_.CalledOnValidThread());
263
264 if (!video_encoder_)
265 return;
266
267 input_frame_coded_size_ = input_coded_size;
268
269 for (int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
Cris Neckar 2013/08/12 23:29:02 I don't see anything that would prevent input_coun
270 base::SharedMemory* shm =
271 gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2);
272 if (!shm) {
273 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
274 "failed to create input buffer " << i;
275 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
276 return;
277 }
278 input_buffers_.push_back(shm);
279 input_buffers_free_.push_back(i);
280 }
281
282 for (int i = 0; i < kOutputBufferCount; ++i) {
283 base::SharedMemory* shm =
284 gpu_factories_->CreateSharedMemory(output_buffer_size);
285 if (!shm) {
286 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
287 "failed to create output buffer " << i;
288 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
289 return;
290 }
291 output_buffers_.push_back(shm);
292 }
293
294 // Immediately provide all output buffers to the VEA.
295 for (size_t i = 0; i < output_buffers_.size(); ++i) {
296 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
297 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
298 }
299 }
300
301 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
302 size_t payload_size,
303 bool key_frame) {
304 DVLOG(3) << "Impl::BitstreamBufferReady(): "
305 "bitstream_buffer_id=" << bitstream_buffer_id
306 << ", payload_size=" << payload_size
307 << ", key_frame=" << key_frame;
308 DCHECK(thread_checker_.CalledOnValidThread());
309
310 if (bitstream_buffer_id < 0 ||
311 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
312 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
313 << bitstream_buffer_id;
314 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
315 return;
316 }
317 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
318 if (payload_size > output_buffer->mapped_size()) {
319 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
320 << payload_size;
321 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
322 return;
323 }
324
325 const base::Time now = base::Time::Now();
326 if (time_base_.is_null())
327 time_base_ = now;
328 const base::TimeDelta delta = now - time_base_;
329
330 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
331 reinterpret_cast<uint8_t*>(output_buffer->memory()),
332 payload_size,
333 output_buffer->mapped_size()));
334 image->_encodedWidth = input_visible_size_.width();
335 image->_encodedHeight = input_visible_size_.height();
336 // Convert capture time to 90 kHz RTP timestamp.
337 image->_timeStamp = (delta * 90000).InSeconds();
338 image->capture_time_ms_ = delta.InMilliseconds();
339 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
340 image->_completeFrame = true;
341
342 encoder_message_loop_proxy_->PostTask(
343 FROM_HERE,
344 base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
345 weak_encoder_,
346 base::Passed(&image),
347 bitstream_buffer_id));
348 }
349
350 void RTCVideoEncoder::Impl::NotifyError(
351 media::VideoEncodeAccelerator::Error error) {
352 DVLOG(3) << "Impl::NotifyError(): error=" << error;
353 DCHECK(thread_checker_.CalledOnValidThread());
354 int32_t retval;
355 switch (error) {
356 case media::VideoEncodeAccelerator::kInvalidArgumentError:
357 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
358 break;
359 default:
360 retval = WEBRTC_VIDEO_CODEC_ERROR;
361 }
362
363 if (video_encoder_)
364 video_encoder_.release()->Destroy();
365
366 if (async_waiter_) {
367 SignalAsyncWaiter(retval);
368 } else {
369 encoder_message_loop_proxy_->PostTask(
370 FROM_HERE,
371 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
372 }
373 }
374
375 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
376
377 void RTCVideoEncoder::Impl::EncodeOneFrame() {
378 DVLOG(3) << "Impl::EncodeOneFrame()";
379 DCHECK(thread_checker_.CalledOnValidThread());
380 DCHECK(input_next_frame_);
381 DCHECK(!input_buffers_free_.empty());
382
383 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
384 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
385 // Encode() gets destroyed early. Handle this by resetting our
386 // input_next_frame_* state before we hand off the VideoFrame to the VEA.
387 const webrtc::I420VideoFrame* next_frame = input_next_frame_;
388 bool next_frame_keyframe = input_next_frame_keyframe_;
389 input_next_frame_ = NULL;
390 input_next_frame_keyframe_ = false;
391
392 if (!video_encoder_) {
393 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
394 return;
395 }
396
397 const int index = input_buffers_free_.back();
398 base::SharedMemory* input_buffer = input_buffers_[index];
399
400 // Do a strided copy of the input frame to match the input requirements for
401 // the encoder.
402 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
403 const uint8_t* src = next_frame->buffer(webrtc::kYPlane);
404 uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory());
405 uint8* const y_dst = dst;
406 int width = input_frame_coded_size_.width();
407 int stride = next_frame->stride(webrtc::kYPlane);
Cris Neckar 2013/08/12 23:29:02 passing signed ints to memcpy scares me. Why is th
408 for (int i = 0; i < next_frame->height(); ++i) {
409 memcpy(dst, src, width);
410 src += stride;
411 dst += width;
412 }
413 src = next_frame->buffer(webrtc::kUPlane);
414 width = input_frame_coded_size_.width() / 2;
415 stride = next_frame->stride(webrtc::kUPlane);
416 uint8* const u_dst = dst;
417 for (int i = 0; i < next_frame->height() / 2; ++i) {
418 memcpy(dst, src, width);
419 src += stride;
420 dst += width;
421 }
422 src = next_frame->buffer(webrtc::kVPlane);
423 width = input_frame_coded_size_.width() / 2;
424 stride = next_frame->stride(webrtc::kVPlane);
425 uint8* const v_dst = dst;
426 for (int i = 0; i < next_frame->height() / 2; ++i) {
427 memcpy(dst, src, width);
428 src += stride;
429 dst += width;
430 }
431
432 scoped_refptr<media::VideoFrame> frame =
433 media::VideoFrame::WrapExternalYuvData(
434 media::VideoFrame::I420,
435 input_frame_coded_size_,
436 gfx::Rect(input_visible_size_),
437 input_visible_size_,
438 input_frame_coded_size_.width(),
439 input_frame_coded_size_.width() / 2,
440 input_frame_coded_size_.width() / 2,
441 y_dst,
442 u_dst,
443 v_dst,
444 base::TimeDelta(),
445 input_buffer->handle(),
446 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
447
448 video_encoder_->Encode(frame, next_frame_keyframe);
449 input_buffers_free_.pop_back();
450 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
451 }
452
453 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
454 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
455 DCHECK(thread_checker_.CalledOnValidThread());
456 DCHECK_GE(index, 0);
457 DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
458 input_buffers_free_.push_back(index);
459 if (input_next_frame_)
460 EncodeOneFrame();
461 }
462
463 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
464 int32_t* retval) {
465 DCHECK(thread_checker_.CalledOnValidThread());
466 DCHECK(!async_waiter_);
467 DCHECK(!async_retval_);
468 async_waiter_ = waiter;
469 async_retval_ = retval;
470 }
471
472 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
473 DCHECK(thread_checker_.CalledOnValidThread());
474 *async_retval_ = retval;
475 async_waiter_->Signal();
476 async_retval_ = NULL;
477 async_waiter_ = NULL;
478 }
479
480 #undef NOTIFY_ERROR
481
482 ////////////////////////////////////////////////////////////////////////////////
483 //
484 // RTCVideoEncoder
485 //
486 ////////////////////////////////////////////////////////////////////////////////
487
488 RTCVideoEncoder::RTCVideoEncoder(
489 webrtc::VideoCodecType type,
490 media::VideoCodecProfile profile,
491 const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
492 : video_codec_type_(type),
493 video_codec_profile_(profile),
494 gpu_factories_(gpu_factories),
495 encoded_image_callback_(NULL),
496 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) {
497 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
498 }
499
500 RTCVideoEncoder::~RTCVideoEncoder() {
501 DCHECK(thread_checker_.CalledOnValidThread());
502 Release();
503 DCHECK(!impl_);
504 }
505
506 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
507 int32_t number_of_cores,
508 uint32_t max_payload_size) {
509 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
510 << ", width=" << codec_settings->width
511 << ", height=" << codec_settings->height
512 << ", startBitrate=" << codec_settings->startBitrate;
513 DCHECK(thread_checker_.CalledOnValidThread());
514 DCHECK(!impl_);
515
516 weak_this_factory_.reset(new base::WeakPtrFactory<RTCVideoEncoder>(this));
517 impl_ = new Impl(weak_this_factory_->GetWeakPtr(), gpu_factories_);
518 base::WaitableEvent initialization_waiter(true, false);
519 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
520 gpu_factories_->GetMessageLoop()->PostTask(
521 FROM_HERE,
522 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
523 impl_,
524 gfx::Size(codec_settings->width, codec_settings->height),
525 codec_settings->startBitrate,
526 video_codec_profile_,
527 &initialization_waiter,
528 &initialization_retval));
529
530 // webrtc::VideoEncoder expects this call to be synchronous.
531 initialization_waiter.Wait();
532 return initialization_retval;
533 }
534
535 int32_t RTCVideoEncoder::Encode(
536 const webrtc::I420VideoFrame& input_image,
537 const webrtc::CodecSpecificInfo* codec_specific_info,
538 const std::vector<webrtc::VideoFrameType>* frame_types) {
539 DVLOG(3) << "Encode()";
540 // TODO(sheu): figure out why this check fails.
541 // DCHECK(thread_checker_.CalledOnValidThread());
542 if (!impl_) {
543 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
544 return impl_status_;
545 }
546
547 base::WaitableEvent encode_waiter(true, false);
548 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
549 gpu_factories_->GetMessageLoop()->PostTask(
550 FROM_HERE,
551 base::Bind(&RTCVideoEncoder::Impl::Enqueue,
552 impl_,
553 &input_image,
554 (frame_types->front() == webrtc::kKeyFrame),
555 &encode_waiter,
556 &encode_retval));
557
558 // webrtc::VideoEncoder expects this call to be synchronous.
559 encode_waiter.Wait();
560 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
561 return encode_retval;
562 }
563
564 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
565 webrtc::EncodedImageCallback* callback) {
566 DVLOG(3) << "RegisterEncodeCompleteCallback()";
567 DCHECK(thread_checker_.CalledOnValidThread());
568 if (!impl_) {
569 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
570 return impl_status_;
571 }
572
573 encoded_image_callback_ = callback;
574 return WEBRTC_VIDEO_CODEC_OK;
575 }
576
577 int32_t RTCVideoEncoder::Release() {
578 DVLOG(3) << "Release()";
579 DCHECK(thread_checker_.CalledOnValidThread());
580
581 // Reset the gpu_factory_, in case we reuse this encoder.
582 gpu_factories_->Abort();
583 gpu_factories_ = gpu_factories_->Clone();
584 if (impl_) {
585 gpu_factories_->GetMessageLoop()->PostTask(
586 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
587 impl_ = NULL;
588 weak_this_factory_.reset();
589 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
590 }
591 return WEBRTC_VIDEO_CODEC_OK;
592 }
593
594 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
595 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
596 << ", rtt=" << rtt;
597 DCHECK(thread_checker_.CalledOnValidThread());
598 // Ignored.
599 return WEBRTC_VIDEO_CODEC_OK;
600 }
601
602 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
603 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
604 << ", frame_rate=" << frame_rate;
605 DCHECK(thread_checker_.CalledOnValidThread());
606 if (!impl_) {
607 DVLOG(3) << "SetRates(): returning " << impl_status_;
608 return impl_status_;
609 }
610
611 gpu_factories_->GetMessageLoop()->PostTask(
612 FROM_HERE,
613 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
614 impl_,
615 new_bit_rate,
616 frame_rate));
617 return WEBRTC_VIDEO_CODEC_OK;
618 }
619
620 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
621 int32 bitstream_buffer_id) {
622 DCHECK(thread_checker_.CalledOnValidThread());
623 DVLOG(3) << "ReturnEncodedImage(): "
624 "bitstream_buffer_id=" << bitstream_buffer_id;
625
626 if (!encoded_image_callback_)
627 return;
628
629 webrtc::CodecSpecificInfo info;
630 info.codecType = video_codec_type_;
631
632 // Generate a header describing a single fragment.
633 webrtc::RTPFragmentationHeader header;
634 header.VerifyAndAllocateFragmentationHeader(1);
635 header.fragmentationOffset[0] = 0;
636 header.fragmentationLength[0] = image->_length;
637 header.fragmentationPlType[0] = 0;
638 header.fragmentationTimeDiff[0] = 0;
639
640 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
641 if (retval < 0) {
642 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
643 << retval;
644 }
645
646 // The call through webrtc::EncodedImageCallback is synchronous, so we can
647 // immediately recycle the output buffer back to the Impl.
648 gpu_factories_->GetMessageLoop()->PostTask(
649 FROM_HERE,
650 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
651 impl_,
652 bitstream_buffer_id));
653 }
654
655 void RTCVideoEncoder::NotifyError(int32_t error) {
656 DCHECK(thread_checker_.CalledOnValidThread());
657 DVLOG(1) << "NotifyError(): error=" << error;
658
659 impl_status_ = error;
660 gpu_factories_->GetMessageLoop()->PostTask(
661 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
662 impl_ = NULL;
663 }
664
665 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698