Chromium Code Reviews| Index: content/renderer/media/rtc_video_encoder.cc |
| diff --git a/content/renderer/media/rtc_video_encoder.cc b/content/renderer/media/rtc_video_encoder.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..54079c46b640f6fc009725ce98a70f33c27ea77c |
| --- /dev/null |
| +++ b/content/renderer/media/rtc_video_encoder.cc |
| @@ -0,0 +1,585 @@ |
| +// Copyright 2013 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "content/renderer/media/rtc_video_encoder.h" |
| + |
| +#include "base/bind.h" |
| +#include "base/location.h" |
| +#include "base/logging.h" |
| +#include "base/memory/scoped_vector.h" |
| +#include "base/message_loop/message_loop_proxy.h" |
| +#include "base/synchronization/waitable_event.h" |
| +#include "media/base/bitstream_buffer.h" |
| +#include "media/filters/gpu_video_accelerator_factories.h" |
| +#include "media/video/video_encode_accelerator.h" |
| + |
| +#define NOTIFY_ERROR(x) \ |
| + do { \ |
| + DLOG(ERROR) << "calling NotifyError(): " << x; \ |
| + NotifyError(x); \ |
| + } while (0) |
| + |
| +namespace content { |
| + |
| +// This private class of RTCVideoEncoder does the actual work of communicating |
| +// with a media::VideoEncodeAccelerator for handling video encoding. It can |
| +// be created on any thread, but should subsequently be posted to (and Destroy() |
| +// called on) the thread that calls CreateAndInitializeVEA(). Notifications |
| +// returned by the encoder are posted to the thread on which the instance was |
| +// constructed. |
| +// |
| +// This class is separated from the RTCVideoEncoder class to allow |
| +// RTCVideoEncoder to be deleted directly by WebRTC, while RTCVideoEncoder::Impl |
| +// stays long enough to properly shut down the VEA. |
|
Ami GONE FROM CHROMIUM
2013/08/02 22:00:49
You seem to have missed my super-insight into why
sheu
2013/08/03 01:31:03
I moved state around and so pimpl is making more s
|
| +class RTCVideoEncoder::Impl |
| + : public media::VideoEncodeAccelerator::Client, |
| + public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> { |
| + public: |
| + explicit Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder); |
| + |
| + // Create the VEA and call Initialize() on it. This instance of Impl is bound |
| + // to whichever thread makes this call. |
| + void CreateAndInitializeVEA( |
| + const webrtc::VideoCodec& codecSettings, |
| + media::VideoCodecProfile profile, |
| + base::WaitableEvent* async_waiter, |
| + int32_t* async_retval, |
| + const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories); |
| + void Enqueue(const webrtc::I420VideoFrame* input_frame, bool force_keyframe, |
| + base::WaitableEvent* async_waiter, int32_t* async_retval); |
| + void UseOutputBitstreamBufferId(int32 bitstream_buffer_id); |
| + void RequestEncodingParameterChange(int32 bitrate); |
| + void Destroy(); |
| + |
| + // media::VideoEncodeAccelerator::Client implementation. |
| + virtual void NotifyInitializeDone() OVERRIDE; |
| + virtual void RequireBitstreamBuffers(int input_count, |
| + const gfx::Size& input_dimensions, |
| + size_t output_size) OVERRIDE; |
| + virtual void NotifyInputDone(int32 bitstream_buffer_id) OVERRIDE; |
| + virtual void BitstreamBufferReady(int32 bitstream_buffer_id, |
| + size_t payload_size, |
| + bool key_frame) OVERRIDE; |
| + virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE; |
| + |
| + private: |
| + friend class base::RefCountedThreadSafe<Impl>; |
| + |
| + enum { |
| + kInputBufferExtraCount = 1, // The number of input buffers allocated, more |
| + // than what is requested by |
| + // VEA::RequireBitstreamBuffers(). |
| + kOutputBufferCount = 3, |
| + }; |
| + |
| + virtual ~Impl(); |
| + |
| + // Perform encoding on an input frame from the input queue. |
| + void EncodeOneFrame(); |
| + |
| + base::ThreadChecker thread_checker_; |
| + |
| + // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client |
| + // notifications. |
| + const base::WeakPtr<RTCVideoEncoder> weak_encoder_; |
| + |
| + // Factory for creating VEAs, shared memory buffers, etc. |
| + scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_; |
| + |
| + // The message loop on which to post notifications. |
| + const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_; |
| + |
| + // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous. |
| + // Do this by waiting on the |async_waiter_| and returning the return value in |
| + // |async_retval_| when we initialization completes, encoding completes, or |
| + // an error occurs. |
| + base::WaitableEvent* async_waiter_; |
| + int32_t* async_retval_; |
| + |
| + // The underling VEA to perform encoding on. |
| + scoped_ptr<media::VideoEncodeAccelerator> video_encoder_; |
| + |
| + // Next input frame. Since there is at most one next frame, a single-element |
| + // queue is sufficient. |
| + const webrtc::I420VideoFrame* input_next_frame_; |
| + |
| + // Whether to encode a keyframe next. |
| + bool input_next_frame_keyframe_; |
| + |
| + // Frame sizes. |
| + gfx::Size input_frame_dimensions_; |
| + gfx::Size output_frame_dimensions_; |
| + |
| + // Shared memory buffers for input/output with the VEA. |
| + ScopedVector<base::SharedMemory> input_buffers_; |
| + ScopedVector<base::SharedMemory> output_buffers_; |
| + |
| + // Input buffers ready to be filled with input from Encode(). As a LIFO since |
| + // we don't care about ordering. |
| + std::vector<int> input_buffers_free_; |
| + |
| + |
| + DISALLOW_COPY_AND_ASSIGN(Impl); |
| +}; |
| + |
| +RTCVideoEncoder::Impl::Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder) |
| + : weak_encoder_(weak_encoder), |
| + encoder_message_loop_proxy_(base::MessageLoopProxy::current()), |
| + async_waiter_(NULL), |
| + async_retval_(NULL), |
| + input_next_frame_(NULL), |
| + input_next_frame_keyframe_(false) { |
| + thread_checker_.DetachFromThread(); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::CreateAndInitializeVEA( |
| + const webrtc::VideoCodec& codecSettings, |
| + media::VideoCodecProfile profile, |
| + base::WaitableEvent* async_waiter, |
| + int32_t* async_retval, |
| + const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories) { |
| + DVLOG(3) << "Impl::CreateAndInitializeVEA()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + async_waiter_ = async_waiter; |
| + async_retval_ = async_retval; |
| + gpu_factories_ = gpu_factories; |
| + |
| + video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass(); |
| + if (!video_encoder_) { |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| + return; |
| + } |
| + output_frame_dimensions_.SetSize(codecSettings.width, codecSettings.height); |
| + video_encoder_->Initialize( |
| + media::VideoFrame::I420, |
| + output_frame_dimensions_, |
| + profile, |
| + codecSettings.startBitrate * 1000); // startBitrate is in kbits/sec. |
|
hshi1
2013/08/02 17:29:36
(copied from the EVS review comment by palmer@) sh
sheu
2013/08/03 01:31:03
Done.
|
| +} |
| + |
| +void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame, |
| + bool force_keyframe, |
| + base::WaitableEvent* async_waiter, |
| + int32_t* async_retval) { |
| + DVLOG(3) << "Impl::Enqueue()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DCHECK(!input_next_frame_); |
| + |
| + async_waiter_ = async_waiter; |
| + async_retval_ = async_retval; |
| + input_next_frame_ = input_frame; |
| + input_next_frame_keyframe_ = force_keyframe; |
| + |
| + if (!input_buffers_free_.empty()) |
| + EncodeOneFrame(); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId( |
| + int32 bitstream_buffer_id) { |
| + DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): " |
| + "bitstream_buffer_id=" << bitstream_buffer_id; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (video_encoder_) { |
| + video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| + bitstream_buffer_id, |
| + output_buffers_[bitstream_buffer_id]->handle(), |
| + output_buffers_[bitstream_buffer_id]->mapped_size())); |
| + } |
| +} |
| + |
| +void RTCVideoEncoder::Impl::RequestEncodingParameterChange(int32 bitrate) { |
| + DVLOG(3) << "Impl::RequestEncodingParameterChange(): bitrate=" << bitrate; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (video_encoder_) |
| + video_encoder_->RequestEncodingParameterChange(bitrate); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::Destroy() { |
| + DVLOG(3) << "Impl::Destroy()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (video_encoder_) |
| + video_encoder_.release()->Destroy(); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::NotifyInitializeDone() { |
| + DVLOG(3) << "Impl::NotifyInitializeDone()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + *async_retval_ = WEBRTC_VIDEO_CODEC_OK; |
| + async_waiter_->Signal(); |
| + async_retval_ = NULL; |
| + async_waiter_ = NULL; |
| +} |
| + |
| +void RTCVideoEncoder::Impl::RequireBitstreamBuffers( |
| + int input_count, |
| + const gfx::Size& input_dimensions, |
| + size_t output_size) { |
| + DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count |
| + << ", input_dimensions=" << input_dimensions.ToString() |
| + << ", output_size=" << output_size; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + if (!video_encoder_) |
| + return; |
| + |
| + input_frame_dimensions_ = input_dimensions; |
| + |
| + for (int i = 0; i < input_count + kInputBufferExtraCount; ++i) { |
| + base::SharedMemory* shm = gpu_factories_->CreateSharedMemory( |
| + input_dimensions.GetArea() * 3 / 2); |
| + if (!shm) { |
| + DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
| + "failed to create input buffer " << i; |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| + return; |
| + } |
| + input_buffers_.push_back(shm); |
| + } |
| + for (size_t i = 0; i < input_buffers_.size(); ++i) |
| + input_buffers_free_.push_back(i); |
| + |
| + for (int i = 0; i < kOutputBufferCount; ++i) { |
| + base::SharedMemory* shm = gpu_factories_->CreateSharedMemory(output_size); |
| + if (!shm) { |
| + DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): " |
| + "failed to create output buffer " << i; |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError); |
| + return; |
| + } |
| + output_buffers_.push_back(shm); |
| + } |
| + |
| + // Immediately provide all output buffers to the VEA. |
| + for (size_t i = 0; i < output_buffers_.size(); ++i) { |
| + video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer( |
| + i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size())); |
| + } |
| +} |
| + |
| +void RTCVideoEncoder::Impl::NotifyInputDone(int32 bitstream_buffer_id) { |
| + DVLOG(3) << "Impl::NotifyInputDone(): bitstream_buffer_id=" |
| + << bitstream_buffer_id; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (bitstream_buffer_id < 0 || |
| + bitstream_buffer_id >= static_cast<int>(input_buffers_.size())) { |
| + DLOG(ERROR) << "Impl::NotifyInputDone(): invalid bitstream_buffer_id=" |
| + << bitstream_buffer_id; |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
| + return; |
| + } |
| + |
| + input_buffers_free_.push_back(bitstream_buffer_id); |
| + if (input_next_frame_) |
| + EncodeOneFrame(); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id, |
| + size_t payload_size, |
| + bool key_frame) { |
| + DVLOG(3) << "Impl::BitstreamBufferReady(): " |
| + "bitstream_buffer_id=" << bitstream_buffer_id |
| + << ", payload_size=" << payload_size |
| + << ", key_frame=" << key_frame; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + if (bitstream_buffer_id < 0 || |
| + bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) { |
| + DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id=" |
| + << bitstream_buffer_id; |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
| + return; |
| + } |
| + base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id]; |
| + if (payload_size > output_buffer->mapped_size()) { |
| + DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size=" |
| + << payload_size; |
| + NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError); |
| + return; |
| + } |
| + |
| + scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage( |
| + reinterpret_cast<uint8_t*>(output_buffer->memory()), |
| + payload_size, |
| + output_buffer->mapped_size())); |
| + image->_encodedWidth = output_frame_dimensions_.width(); |
| + image->_encodedHeight = output_frame_dimensions_.height(); |
| + image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame); |
| + image->_completeFrame = true; |
| + |
| + encoder_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::ReturnEncodedImage, |
| + weak_encoder_, |
| + make_scoped_refptr(this), |
| + base::Passed(&image), |
| + bitstream_buffer_id)); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::NotifyError( |
| + media::VideoEncodeAccelerator::Error error) { |
| + DVLOG(3) << "Impl::NotifyError(): error=" << error; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + int32_t retval; |
| + switch (error) { |
| + default: |
| + retval = WEBRTC_VIDEO_CODEC_ERROR; |
| + } |
| + |
| + if (video_encoder_) |
| + video_encoder_.release()->Destroy(); |
| + |
| + if (async_waiter_) { |
| + *async_retval_ = retval; |
| + async_waiter_->Signal(); |
| + async_retval_ = NULL; |
| + async_waiter_ = NULL; |
| + } else { |
| + encoder_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::NotifyError, |
| + weak_encoder_, |
| + make_scoped_refptr(this), |
| + retval)); |
| + } |
| +} |
| + |
| +RTCVideoEncoder::Impl::~Impl() { |
| + DCHECK(!video_encoder_); |
| +} |
| + |
| +void RTCVideoEncoder::Impl::EncodeOneFrame() { |
| + DVLOG(3) << "Impl::EncodeOneFrame()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DCHECK(input_next_frame_); |
| + DCHECK(!input_buffers_free_.empty()); |
| + DCHECK(async_waiter_); |
| + DCHECK(async_retval_); |
| + |
| + const int index = input_buffers_free_.back(); |
| + base::SharedMemory* input_buffer = input_buffers_[index]; |
| + |
| + if (video_encoder_) { |
| + // Do a strided copy of the input frame to match the input requirements for |
| + // the encoder. |
| + const uint8_t* src = input_next_frame_->buffer(webrtc::kYPlane); |
| + uint8* dst = reinterpret_cast<uint8*>(input_buffer->memory()); |
| + int width = input_frame_dimensions_.width(); |
| + int stride = input_next_frame_->stride(webrtc::kYPlane); |
| + for (int i = 0; i < input_next_frame_->height(); ++i) { |
| + memcpy(dst, src, width); |
| + dst += stride; |
| + src += width; |
| + } |
| + src = input_next_frame_->buffer(webrtc::kUPlane); |
| + width = input_frame_dimensions_.width() / 2; |
| + stride = input_next_frame_->stride(webrtc::kUPlane); |
| + for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
| + memcpy(dst, src, width); |
| + dst += stride; |
| + src += width; |
| + } |
| + src = input_next_frame_->buffer(webrtc::kVPlane); |
| + width = input_frame_dimensions_.width() / 2; |
| + stride = input_next_frame_->stride(webrtc::kVPlane); |
| + for (int i = 0; i < input_next_frame_->height() / 2; ++i) { |
| + memcpy(dst, src, width); |
| + dst += stride; |
| + src += width; |
| + } |
| + |
| + video_encoder_->Encode( |
| + media::BitstreamBuffer( |
| + index, |
| + input_buffer->handle(), |
| + input_frame_dimensions_.GetArea() * 3 / 2), |
| + input_next_frame_keyframe_); |
| + } |
| + |
| + input_next_frame_ = NULL; |
| + input_next_frame_keyframe_ = false; |
| + input_buffers_free_.pop_back(); |
| + |
| + *async_retval_ = WEBRTC_VIDEO_CODEC_OK; |
| + async_waiter_->Signal(); |
| + async_retval_ = NULL; |
| + async_waiter_ = NULL; |
| +} |
| + |
| +RTCVideoEncoder::RTCVideoEncoder( |
| + media::VideoCodecProfile profile, |
| + const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories) |
| + : video_codec_profile_(profile), |
| + gpu_factories_(gpu_factories), |
| + impl_message_loop_proxy_(gpu_factories_->GetMessageLoop()), |
| + weak_this_factory_(this), |
| + weak_this_(weak_this_factory_.GetWeakPtr()), |
| + encoded_image_callback_(NULL), |
| + impl_status_(WEBRTC_VIDEO_CODEC_OK) { |
| + DVLOG(1) << "RTCVideoEncoder(): profile=" << profile; |
| +} |
| + |
| +RTCVideoEncoder::~RTCVideoEncoder() { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + Release(); |
| + DCHECK(!impl_); |
| +} |
| + |
| +int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings, |
| + int32_t number_of_cores, |
| + uint32_t max_payload_size) { |
| + DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType |
| + << ", width=" << codec_settings->width |
| + << ", height=" << codec_settings->height |
| + << ", startBitrate=" << codec_settings->startBitrate; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + DCHECK(!impl_); |
| + |
| + impl_ = new Impl(weak_this_); |
| + base::WaitableEvent initialization_waiter(true, false); |
| + int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA, |
| + impl_, |
| + *codec_settings, |
| + video_codec_profile_, |
| + &initialization_waiter, |
| + &initialization_retval, |
| + gpu_factories_)); |
| + |
| + // webrtc::VideoEncoder expects this call to be synchronous. |
| + initialization_waiter.Wait(); |
| + return initialization_retval; |
| +} |
| + |
| +int32_t RTCVideoEncoder::Encode( |
| + const webrtc::I420VideoFrame& input_image, |
| + const webrtc::CodecSpecificInfo* codec_specific_info, |
| + const std::vector<webrtc::VideoFrameType>* frame_types) { |
| + DVLOG(3) << "Encode()"; |
| + //DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (!impl_) { |
| + DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_; |
| + return impl_status_; |
| + } |
| + |
| + base::WaitableEvent encode_waiter(true, false); |
| + int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED; |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::Enqueue, |
| + impl_, |
| + &input_image, |
| + (frame_types->front() == webrtc::kKeyFrame), |
| + &encode_waiter, |
| + &encode_retval)); |
| + |
| + // webrtc::VideoEncoder expects this call to be synchronous. |
| + encode_waiter.Wait(); |
| + DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval; |
| + return encode_retval; |
| +} |
| + |
| +int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback( |
| + webrtc::EncodedImageCallback* callback) { |
| + DVLOG(3) << "RegisterEncodeCompleteCallback()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (!impl_) { |
| + DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_; |
| + return impl_status_; |
| + } |
| + |
| + encoded_image_callback_ = callback; |
| + return WEBRTC_VIDEO_CODEC_OK; |
| +} |
| + |
| +int32_t RTCVideoEncoder::Release() { |
| + DVLOG(3) << "Release()"; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + // Reset the gpu_factory_, in case we reuse this encoder. |
| + gpu_factories_->Abort(); |
| + gpu_factories_ = gpu_factories_->Clone(); |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
|
hshi1
2013/08/02 17:29:36
nit: the above 2 lines ("FROM_HERE, base::Bind(&RT
sheu
2013/08/03 01:31:03
Done.
|
| + impl_ = NULL; |
| + impl_status_ = WEBRTC_VIDEO_CODEC_OK; |
| + return WEBRTC_VIDEO_CODEC_OK; |
| +} |
| + |
| +int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) { |
| + DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss |
| + << ", rtt=" << rtt; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + // Ignored. |
| + return WEBRTC_VIDEO_CODEC_OK; |
| +} |
| + |
| +int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) { |
| + DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate |
| + << ", frame_rate=" << frame_rate; |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + if (!impl_) { |
| + DVLOG(3) << "SetRates(): returning " << impl_status_; |
| + return impl_status_; |
| + } |
| + |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParameterChange, |
| + impl_, |
| + new_bit_rate)); |
| + return WEBRTC_VIDEO_CODEC_OK; |
| +} |
| + |
| +void RTCVideoEncoder::ReturnEncodedImage(const scoped_refptr<Impl>& impl, |
| + scoped_ptr<webrtc::EncodedImage> image, |
| + int32 bitstream_buffer_id) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + if (impl != impl_) |
| + return; |
| + |
| + DVLOG(3) << "ReturnEncodedImage(): " |
| + "bitstream_buffer_id=" << bitstream_buffer_id; |
| + |
| + if (!impl_) |
| + return; |
| + if (!encoded_image_callback_) |
| + return; |
| + |
| + int32_t retval = encoded_image_callback_->Encoded(*image, NULL, NULL); |
| + if (retval < 0) { |
| + NotifyError(impl_, retval); |
| + return; |
| + } |
| + |
| + // The call through webrtc::EncodedImageCallback is synchronous, so we can |
| + // immediately recycle the output buffer back to the Impl. |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId, |
| + impl_, |
| + bitstream_buffer_id)); |
| +} |
| + |
| +void RTCVideoEncoder::NotifyError(const scoped_refptr<Impl>& impl, |
| + int32_t error) { |
| + DCHECK(thread_checker_.CalledOnValidThread()); |
| + |
| + if (impl != impl_) |
| + return; |
| + |
| + DVLOG(1) << "NotifyError(): error=" << error; |
| + |
| + impl_status_ = error; |
| + impl_message_loop_proxy_->PostTask( |
| + FROM_HERE, |
| + base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_)); |
|
hshi1
2013/08/02 17:29:36
nit: ditto
sheu
2013/08/03 01:31:03
Done.
|
| + impl_ = NULL; |
| +} |
| + |
| +} // namespace content |