Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Unified Diff: content/renderer/media/rtc_video_encoder.cc

Issue 20632002: Add media::VideoEncodeAccelerator with WebRTC integration (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@git-svn
Patch Set: d3982027 CQ nits. Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « content/renderer/media/rtc_video_encoder.h ('k') | content/renderer/media/rtc_video_encoder_factory.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: content/renderer/media/rtc_video_encoder.cc
diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc
new file mode 100644
index 0000000000000000000000000000000000000000..416317d36354f531bca3797711529fc9cc5b5f3f
--- /dev/null
+++ b/content/renderer/media/rtc_video_encoder.cc
@@ -0,0 +1,658 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "content/renderer/media/rtc_video_encoder.h"
+
+#include "base/bind.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/memory/scoped_vector.h"
+#include "base/message_loop/message_loop_proxy.h"
+#include "base/synchronization/waitable_event.h"
+#include "content/renderer/media/renderer_gpu_video_accelerator_factories.h"
+#include "media/base/bitstream_buffer.h"
+#include "media/base/video_frame.h"
+#include "media/filters/gpu_video_accelerator_factories.h"
+#include "media/video/video_encode_accelerator.h"
+
+#define NOTIFY_ERROR(x) \
+ do { \
+ DLOG(ERROR) << "calling NotifyError(): " << x; \
+ NotifyError(x); \
+ } while (0)
+
+namespace content {
+
+// This private class of RTCVideoEncoder does the actual work of communicating
+// with a media::VideoEncodeAccelerator for handling video encoding. It can
+// be created on any thread, but should subsequently be posted to (and Destroy()
+// called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
+// thread on which the instance was constructed.
+//
+// This class separates state related to the thread that RTCVideoEncoder
+// operates on (presently the libjingle worker thread) from the thread that
+// |gpu_factories_| provides for accelerator operations (presently the media
+// thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
+// RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
+class RTCVideoEncoder::Impl
+ : public media::VideoEncodeAccelerator::Client,
+ public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
+ public:
+ Impl(
+ const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories);
+
+ // Create the VEA and call Initialize() on it. Called once per instantiation,
+ // and then the instance is bound forevermore to whichever thread made the
+ // call.
+ // RTCVideoEncoder expects to be able to call this function synchronously from
+ // its own thread, hence the |async_waiter| and |async_retval| arguments.
+ void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
+ uint32 bitrate,
+ media::VideoCodecProfile profile,
+ base::WaitableEvent* async_waiter,
+ int32_t* async_retval);
+ // Enqueue a frame from WebRTC for encoding.
+ // RTCVideoEncoder expects to be able to call this function synchronously from
+ // its own thread, hence the |async_waiter| and |async_retval| arguments.
+ void Enqueue(const webrtc::I420VideoFrame* input_frame,
+ bool force_keyframe,
+ base::WaitableEvent* async_waiter,
+ int32_t* async_retval);
+
+ // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
+ // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
+ // the buffer is returned to Impl by its index using this function.
+ void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
+
+ // Request encoding parameter change for the underlying encoder.
+ void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
+
+ // Destroy this Impl's encoder. The destructor is not explicitly called, as
+ // Impl is a base::RefCountedThreadSafe.
+ void Destroy();
+
+ // media::VideoEncodeAccelerator::Client implementation.
+ virtual void NotifyInitializeDone() OVERRIDE;
+ virtual void RequireBitstreamBuffers(unsigned int input_count,
+ const gfx::Size& input_coded_size,
+ size_t output_buffer_size) OVERRIDE;
+ virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
+ size_t payload_size,
+ bool key_frame) OVERRIDE;
+ virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
+
+ private:
+ friend class base::RefCountedThreadSafe<Impl>;
+
+ enum {
+ kInputBufferExtraCount = 1, // The number of input buffers allocated, more
+ // than what is requested by
+ // VEA::RequireBitstreamBuffers().
+ kOutputBufferCount = 3,
+ };
+
+ virtual ~Impl();
+
+ // Perform encoding on an input frame from the input queue.
+ void EncodeOneFrame();
+
+ // Notify that an input frame is finished for encoding. |index| is the index
+ // of the completed frame in |input_buffers_|.
+ void EncodeFrameFinished(int index);
+
+ // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
+ void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
+ void SignalAsyncWaiter(int32_t retval);
+
+ base::ThreadChecker thread_checker_;
+
+ // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
+ // notifications.
+ const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
+
+ // The message loop on which to post callbacks to |weak_encoder_|.
+ const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
+
+ // Factory for creating VEAs, shared memory buffers, etc.
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories> gpu_factories_;
+
+ // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
+ // Do this by waiting on the |async_waiter_| and returning the return value in
+ // |async_retval_| when initialization completes, encoding completes, or
+ // an error occurs.
+ base::WaitableEvent* async_waiter_;
+ int32_t* async_retval_;
+
+ // The underlying VEA to perform encoding on.
+ scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
+
+ // Next input frame. Since there is at most one next frame, a single-element
+ // queue is sufficient.
+ const webrtc::I420VideoFrame* input_next_frame_;
+
+ // Whether to encode a keyframe next.
+ bool input_next_frame_keyframe_;
+
+ // Frame sizes.
+ gfx::Size input_frame_coded_size_;
+ gfx::Size input_visible_size_;
+
+ // Shared memory buffers for input/output with the VEA.
+ ScopedVector<base::SharedMemory> input_buffers_;
+ ScopedVector<base::SharedMemory> output_buffers_;
+
+ // Input buffers ready to be filled with input from Encode(). As a LIFO since
+ // we don't care about ordering.
+ std::vector<int> input_buffers_free_;
+
+ // Timestamp of first frame returned from encoder. We calculate subsequent
+ // capture times as deltas from this base.
+ base::Time time_base_;
+
+ DISALLOW_COPY_AND_ASSIGN(Impl);
+};
+
+RTCVideoEncoder::Impl::Impl(
+ const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
+ : weak_encoder_(weak_encoder),
+ encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
+ gpu_factories_(gpu_factories),
+ async_waiter_(NULL),
+ async_retval_(NULL),
+ input_next_frame_(NULL),
+ input_next_frame_keyframe_(false) {
+ thread_checker_.DetachFromThread();
+}
+
+void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
+ const gfx::Size& input_visible_size,
+ uint32 bitrate,
+ media::VideoCodecProfile profile,
+ base::WaitableEvent* async_waiter,
+ int32_t* async_retval) {
+ DVLOG(3) << "Impl::CreateAndInitializeVEA()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ RegisterAsyncWaiter(async_waiter, async_retval);
+
+ // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
+ if (bitrate > kuint32max / 1000) {
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
+ return;
+ }
+
+ video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass();
+ if (!video_encoder_) {
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
+ return;
+ }
+ input_visible_size_ = input_visible_size;
+ video_encoder_->Initialize(
+ media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000);
+}
+
+void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
+ bool force_keyframe,
+ base::WaitableEvent* async_waiter,
+ int32_t* async_retval) {
+ DVLOG(3) << "Impl::Enqueue()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(!input_next_frame_);
+
+ RegisterAsyncWaiter(async_waiter, async_retval);
+ input_next_frame_ = input_frame;
+ input_next_frame_keyframe_ = force_keyframe;
+
+ if (!input_buffers_free_.empty())
+ EncodeOneFrame();
+}
+
+void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
+ int32 bitstream_buffer_id) {
+ DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
+ "bitstream_buffer_id=" << bitstream_buffer_id;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (video_encoder_) {
+ video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
+ bitstream_buffer_id,
+ output_buffers_[bitstream_buffer_id]->handle(),
+ output_buffers_[bitstream_buffer_id]->mapped_size()));
+ }
+}
+
+void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
+ uint32 framerate) {
+ DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
+ << ", framerate=" << framerate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
+ if (bitrate > kuint32max / 1000) {
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
+ return;
+ }
+
+ if (video_encoder_)
+ video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
+}
+
+void RTCVideoEncoder::Impl::Destroy() {
+ DVLOG(3) << "Impl::Destroy()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (video_encoder_)
+ video_encoder_.release()->Destroy();
+}
+
+void RTCVideoEncoder::Impl::NotifyInitializeDone() {
+ DVLOG(3) << "Impl::NotifyInitializeDone()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+ SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
+}
+
+void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
+ unsigned int input_count,
+ const gfx::Size& input_coded_size,
+ size_t output_buffer_size) {
+ DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
+ << ", input_coded_size=" << input_coded_size.ToString()
+ << ", output_buffer_size=" << output_buffer_size;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (!video_encoder_)
+ return;
+
+ input_frame_coded_size_ = input_coded_size;
+
+ for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
+ base::SharedMemory* shm =
+ gpu_factories_->CreateSharedMemory(input_coded_size.GetArea() * 3 / 2);
+ if (!shm) {
+ DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
+ "failed to create input buffer " << i;
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
+ return;
+ }
+ input_buffers_.push_back(shm);
+ input_buffers_free_.push_back(i);
+ }
+
+ for (int i = 0; i < kOutputBufferCount; ++i) {
+ base::SharedMemory* shm =
+ gpu_factories_->CreateSharedMemory(output_buffer_size);
+ if (!shm) {
+ DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
+ "failed to create output buffer " << i;
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
+ return;
+ }
+ output_buffers_.push_back(shm);
+ }
+
+ // Immediately provide all output buffers to the VEA.
+ for (size_t i = 0; i < output_buffers_.size(); ++i) {
+ video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
+ i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
+ }
+}
+
+void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
+ size_t payload_size,
+ bool key_frame) {
+ DVLOG(3) << "Impl::BitstreamBufferReady(): "
+ "bitstream_buffer_id=" << bitstream_buffer_id
+ << ", payload_size=" << payload_size
+ << ", key_frame=" << key_frame;
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ if (bitstream_buffer_id < 0 ||
+ bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
+ DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
+ << bitstream_buffer_id;
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
+ return;
+ }
+ base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
+ if (payload_size > output_buffer->mapped_size()) {
+ DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
+ << payload_size;
+ NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
+ return;
+ }
+
+ const base::Time now = base::Time::Now();
+ if (time_base_.is_null())
+ time_base_ = now;
+ const base::TimeDelta delta = now - time_base_;
+
+ scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
+ reinterpret_cast<uint8_t*>(output_buffer->memory()),
+ payload_size,
+ output_buffer->mapped_size()));
+ image->_encodedWidth = input_visible_size_.width();
+ image->_encodedHeight = input_visible_size_.height();
+ // Convert capture time to 90 kHz RTP timestamp.
+ image->_timeStamp = (delta * 90000).InSeconds();
+ image->capture_time_ms_ = delta.InMilliseconds();
+ image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
+ image->_completeFrame = true;
+
+ encoder_message_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
+ weak_encoder_,
+ base::Passed(&image),
+ bitstream_buffer_id));
+}
+
+void RTCVideoEncoder::Impl::NotifyError(
+ media::VideoEncodeAccelerator::Error error) {
+ DVLOG(3) << "Impl::NotifyError(): error=" << error;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ int32_t retval;
+ switch (error) {
+ case media::VideoEncodeAccelerator::kInvalidArgumentError:
+ retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ break;
+ default:
+ retval = WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ if (video_encoder_)
+ video_encoder_.release()->Destroy();
+
+ if (async_waiter_) {
+ SignalAsyncWaiter(retval);
+ } else {
+ encoder_message_loop_proxy_->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
+ }
+}
+
+RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
+
+void RTCVideoEncoder::Impl::EncodeOneFrame() {
+ DVLOG(3) << "Impl::EncodeOneFrame()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(input_next_frame_);
+ DCHECK(!input_buffers_free_.empty());
+
+ // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
+ // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
+ // Encode() gets destroyed early. Handle this by resetting our
+ // input_next_frame_* state before we hand off the VideoFrame to the VEA.
+ const webrtc::I420VideoFrame* next_frame = input_next_frame_;
+ bool next_frame_keyframe = input_next_frame_keyframe_;
+ input_next_frame_ = NULL;
+ input_next_frame_keyframe_ = false;
+
+ if (!video_encoder_) {
+ SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
+ return;
+ }
+
+ const int index = input_buffers_free_.back();
+ base::SharedMemory* input_buffer = input_buffers_[index];
+
+ // Do a strided copy of the input frame to match the input requirements for
+ // the encoder.
+ // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
+ const uint8_t* src = next_frame->buffer(webrtc::kYPlane);
+ uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory());
+ uint8* const y_dst = dst;
+ int width = input_frame_coded_size_.width();
+ int stride = next_frame->stride(webrtc::kYPlane);
+ for (int i = 0; i < next_frame->height(); ++i) {
+ memcpy(dst, src, width);
+ src += stride;
+ dst += width;
+ }
+ src = next_frame->buffer(webrtc::kUPlane);
+ width = input_frame_coded_size_.width() / 2;
+ stride = next_frame->stride(webrtc::kUPlane);
+ for (int i = 0; i < next_frame->height() / 2; ++i) {
+ memcpy(dst, src, width);
+ src += stride;
+ dst += width;
+ }
+ src = next_frame->buffer(webrtc::kVPlane);
+ width = input_frame_coded_size_.width() / 2;
+ stride = next_frame->stride(webrtc::kVPlane);
+ for (int i = 0; i < next_frame->height() / 2; ++i) {
+ memcpy(dst, src, width);
+ src += stride;
+ dst += width;
+ }
+
+ scoped_refptr<media::VideoFrame> frame =
+ media::VideoFrame::WrapExternalSharedMemory(
+ media::VideoFrame::I420,
+ input_frame_coded_size_,
+ gfx::Rect(input_visible_size_),
+ input_visible_size_,
+ y_dst,
+ input_buffer->handle(),
+ base::TimeDelta(),
+ base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
+
+ video_encoder_->Encode(frame, next_frame_keyframe);
+ input_buffers_free_.pop_back();
+ SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
+}
+
+void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
+ DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK_GE(index, 0);
+ DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
+ input_buffers_free_.push_back(index);
+ if (input_next_frame_)
+ EncodeOneFrame();
+}
+
+void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
+ int32_t* retval) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(!async_waiter_);
+ DCHECK(!async_retval_);
+ async_waiter_ = waiter;
+ async_retval_ = retval;
+}
+
+void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ *async_retval_ = retval;
+ async_waiter_->Signal();
+ async_retval_ = NULL;
+ async_waiter_ = NULL;
+}
+
+#undef NOTIFY_ERROR
+
+////////////////////////////////////////////////////////////////////////////////
+//
+// RTCVideoEncoder
+//
+////////////////////////////////////////////////////////////////////////////////
+
+RTCVideoEncoder::RTCVideoEncoder(
+ webrtc::VideoCodecType type,
+ media::VideoCodecProfile profile,
+ const scoped_refptr<RendererGpuVideoAcceleratorFactories>& gpu_factories)
+ : video_codec_type_(type),
+ video_codec_profile_(profile),
+ gpu_factories_(gpu_factories),
+ encoded_image_callback_(NULL),
+ impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED) {
+ DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
+}
+
+RTCVideoEncoder::~RTCVideoEncoder() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ Release();
+ DCHECK(!impl_);
+}
+
+int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
+ int32_t number_of_cores,
+ uint32_t max_payload_size) {
+ DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
+ << ", width=" << codec_settings->width
+ << ", height=" << codec_settings->height
+ << ", startBitrate=" << codec_settings->startBitrate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DCHECK(!impl_);
+
+ weak_this_factory_.reset(new base::WeakPtrFactory<RTCVideoEncoder>(this));
+ impl_ = new Impl(weak_this_factory_->GetWeakPtr(), gpu_factories_);
+ base::WaitableEvent initialization_waiter(true, false);
+ int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
+ impl_,
+ gfx::Size(codec_settings->width, codec_settings->height),
+ codec_settings->startBitrate,
+ video_codec_profile_,
+ &initialization_waiter,
+ &initialization_retval));
+
+ // webrtc::VideoEncoder expects this call to be synchronous.
+ initialization_waiter.Wait();
+ return initialization_retval;
+}
+
+int32_t RTCVideoEncoder::Encode(
+ const webrtc::I420VideoFrame& input_image,
+ const webrtc::CodecSpecificInfo* codec_specific_info,
+ const std::vector<webrtc::VideoFrameType>* frame_types) {
+ DVLOG(3) << "Encode()";
+ // TODO(sheu): figure out why this check fails.
+ // DCHECK(thread_checker_.CalledOnValidThread());
+ if (!impl_) {
+ DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
+ return impl_status_;
+ }
+
+ base::WaitableEvent encode_waiter(true, false);
+ int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::Impl::Enqueue,
+ impl_,
+ &input_image,
+ (frame_types->front() == webrtc::kKeyFrame),
+ &encode_waiter,
+ &encode_retval));
+
+ // webrtc::VideoEncoder expects this call to be synchronous.
+ encode_waiter.Wait();
+ DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
+ return encode_retval;
+}
+
+int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* callback) {
+ DVLOG(3) << "RegisterEncodeCompleteCallback()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (!impl_) {
+ DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
+ return impl_status_;
+ }
+
+ encoded_image_callback_ = callback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t RTCVideoEncoder::Release() {
+ DVLOG(3) << "Release()";
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // Reset the gpu_factory_, in case we reuse this encoder.
+ gpu_factories_->Abort();
+ gpu_factories_ = gpu_factories_->Clone();
+ if (impl_) {
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
+ impl_ = NULL;
+ weak_this_factory_.reset();
+ impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
+ DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
+ << ", rtt=" << rtt;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ // Ignored.
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
+ DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
+ << ", frame_rate=" << frame_rate;
+ DCHECK(thread_checker_.CalledOnValidThread());
+ if (!impl_) {
+ DVLOG(3) << "SetRates(): returning " << impl_status_;
+ return impl_status_;
+ }
+
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
+ impl_,
+ new_bit_rate,
+ frame_rate));
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
+ int32 bitstream_buffer_id) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DVLOG(3) << "ReturnEncodedImage(): "
+ "bitstream_buffer_id=" << bitstream_buffer_id;
+
+ if (!encoded_image_callback_)
+ return;
+
+ webrtc::CodecSpecificInfo info;
+ info.codecType = video_codec_type_;
+
+ // Generate a header describing a single fragment.
+ webrtc::RTPFragmentationHeader header;
+ header.VerifyAndAllocateFragmentationHeader(1);
+ header.fragmentationOffset[0] = 0;
+ header.fragmentationLength[0] = image->_length;
+ header.fragmentationPlType[0] = 0;
+ header.fragmentationTimeDiff[0] = 0;
+
+ int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
+ if (retval < 0) {
+ DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
+ << retval;
+ }
+
+ // The call through webrtc::EncodedImageCallback is synchronous, so we can
+ // immediately recycle the output buffer back to the Impl.
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE,
+ base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
+ impl_,
+ bitstream_buffer_id));
+}
+
+void RTCVideoEncoder::NotifyError(int32_t error) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+ DVLOG(1) << "NotifyError(): error=" << error;
+
+ impl_status_ = error;
+ gpu_factories_->GetMessageLoop()->PostTask(
+ FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
+ impl_ = NULL;
+}
+
+} // namespace content
« no previous file with comments | « content/renderer/media/rtc_video_encoder.h ('k') | content/renderer/media/rtc_video_encoder_factory.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698