| Index: media/capture/service/video_capture_device_client_impl.cc
|
| diff --git a/media/capture/service/video_capture_device_client_impl.cc b/media/capture/service/video_capture_device_client_impl.cc
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..4d28c6391f45c5124777ed6abff24fc90571740d
|
| --- /dev/null
|
| +++ b/media/capture/service/video_capture_device_client_impl.cc
|
| @@ -0,0 +1,353 @@
|
| +// Copyright 2016 The Chromium Authors. All rights reserved.
|
| +// Use of this source code is governed by a BSD-style license that can be
|
| +// found in the LICENSE file.
|
| +
|
| +#include "media/capture/service/video_capture_device_client_impl.h"
|
| +
|
| +#include "base/time/time.h"
|
| +#include "media/base/bind_to_current_loop.h"
|
| +#include "media/capture/service/mojo_video_frame.h"
|
| +#include "third_party/libyuv/include/libyuv.h"
|
| +#include "ui/gfx/geometry/rect.h"
|
| +#include "ui/gfx/geometry/size.h"
|
| +
|
| +namespace media {
|
| +
|
| +namespace {
|
| +
|
| +// This implements a VideoCaptureDevice::Client::Buffer as a lightweight wrapper
|
| +// around a MojoVideoFrame. This Buffer is not meant to be used beyond the
|
| +// VideoCaptureDevice and its Client implementation.
|
| +class BufferImpl : public media::VideoCaptureDevice::Client::Buffer {
|
| + public:
|
| + BufferImpl(scoped_refptr<MojoVideoFrame> video_frame)
|
| + : video_frame_(video_frame) {}
|
| +
|
| + ~BufferImpl() override {}
|
| +
|
| + // media::VideoCaptureDevice::Client::Buffer:
|
| + int id() const override { // Not needed.
|
| + NOTREACHED();
|
| + return 0;
|
| + }
|
| + gfx::Size dimensions() const override { return video_frame_->coded_size(); }
|
| + size_t mapped_size() const override { return video_frame_->mapped_size(); }
|
| + void* data(int plane) override { return video_frame_->data(plane); }
|
| + ClientBuffer AsClientBuffer(int plane) override {
|
| + NOTREACHED();
|
| + return nullptr;
|
| + }
|
| +#if defined(OS_POSIX) && !(defined(OS_MACOSX) && !defined(OS_IOS))
|
| + base::FileDescriptor AsPlatformFile() override {
|
| + NOTREACHED();
|
| + return base::FileDescriptor();
|
| + }
|
| +#endif
|
| +
|
| + const scoped_refptr<MojoVideoFrame> video_frame() { return video_frame_; }
|
| +
|
| + private:
|
| + const scoped_refptr<MojoVideoFrame> video_frame_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(BufferImpl);
|
| +};
|
| +
|
| +} // namespace
|
| +
|
| +// A pool of -possibly heterogenous- MojoVideoFrames, provides MojoVideoFrames
|
| +// if available and of the appropriate format, otherwise allocates it. The MVF
|
| +// are returned via registration of DestructionObserver. This class is used from
|
| +// the mojo component main thread and from a number of device threads.
|
| +class VideoCaptureDeviceClientImpl::VideoFramePool
|
| + : public base::RefCountedThreadSafe<
|
| + VideoCaptureDeviceClientImpl::VideoFramePool> {
|
| + public:
|
| + VideoFramePool();
|
| +
|
| + // Produces a frame matching the specified |dimensions|, either by grabbing it
|
| + // from the pool or creating a new frame if no suitable one is found.
|
| + scoped_refptr<MojoVideoFrame> CreateFrame(const gfx::Size& dimensions,
|
| + base::TimeDelta timestamp);
|
| +
|
| + // Once this is called frames will no longer be inserted into |frames_|.
|
| + void Shutdown();
|
| +
|
| + private:
|
| + // Can't use a WeakPtr since this class is used by multiple threads.
|
| + friend class base::RefCountedThreadSafe<
|
| + VideoCaptureDeviceClientImpl::VideoFramePool>;
|
| + ~VideoFramePool();
|
| +
|
| + // Called when the frame wrapper gets destroyed. |frame| is the actual frame
|
| + // that was wrapped and is re inserted in |frames_| by this function.
|
| + void FrameReleased(const scoped_refptr<MojoVideoFrame>& frame);
|
| +
|
| + // For mutual exclusion when accessing from multiple threads.
|
| + base::Lock lock_;
|
| + bool is_shutdown_;
|
| +
|
| + std::list<scoped_refptr<MojoVideoFrame>> frames_;
|
| +
|
| + DISALLOW_COPY_AND_ASSIGN(VideoFramePool);
|
| +};
|
| +
|
| +VideoCaptureDeviceClientImpl::VideoCaptureDeviceClientImpl(Delegate* delegate)
|
| + : delegate_(delegate),
|
| + video_frame_pool_(new VideoFramePool()),
|
| + weak_factory_(this) {
|
| + DCHECK(delegate_);
|
| + // |this| is used exclusively by internal video capture device code which may,
|
| + // and ususally will, operate on a different thread from the constructor.
|
| + thread_checker_.DetachFromThread();
|
| +}
|
| +
|
| +VideoCaptureDeviceClientImpl::~VideoCaptureDeviceClientImpl() {
|
| + // This happens on an OS thread.
|
| + video_frame_pool_->Shutdown();
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::OnIncomingCapturedData(
|
| + const uint8_t* data,
|
| + int length,
|
| + const media::VideoCaptureFormat& frame_format,
|
| + int clockwise_rotation,
|
| + const base::TimeTicks& timestamp) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + // N.B.: All of the logic throughout this method is lifted directly from
|
| + // content::VideoCaptureDeviceClient and should generally continue to mirror
|
| + // that implementation until this one becomes the source of truth.
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DCHECK_EQ(media::PIXEL_STORAGE_CPU, frame_format.pixel_storage);
|
| +
|
| + if (!frame_format.IsValid())
|
| + return;
|
| +
|
| + const int width_parity = frame_format.frame_size.width() & 1;
|
| + const int height_parity = frame_format.frame_size.height() & 1;
|
| + const int even_width = frame_format.frame_size.width() & ~1;
|
| + const int even_height = frame_format.frame_size.height() & ~1;
|
| +
|
| + int destination_width = even_width;
|
| + int destination_height = even_height;
|
| +
|
| + if (clockwise_rotation == 90 || clockwise_rotation == 270)
|
| + std::swap(destination_width, destination_height);
|
| +
|
| + DCHECK_EQ(clockwise_rotation % 90, 0)
|
| + << "Rotation must be a multiple of 90, now: " << clockwise_rotation;
|
| + libyuv::RotationMode rotation_mode = libyuv::kRotate0;
|
| + if (clockwise_rotation == 90)
|
| + rotation_mode = libyuv::kRotate90;
|
| + else if (clockwise_rotation == 180)
|
| + rotation_mode = libyuv::kRotate180;
|
| + else if (clockwise_rotation == 270)
|
| + rotation_mode = libyuv::kRotate270;
|
| +
|
| + libyuv::FourCC colorspace = libyuv::FOURCC_ANY;
|
| + bool flip = false;
|
| + switch (frame_format.pixel_format) {
|
| + case media::PIXEL_FORMAT_UNKNOWN:
|
| + break;
|
| + case media::PIXEL_FORMAT_I420:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_I420;
|
| + break;
|
| + case media::PIXEL_FORMAT_YV12:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_YV12;
|
| + break;
|
| + case media::PIXEL_FORMAT_NV12:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_NV12;
|
| + break;
|
| + case media::PIXEL_FORMAT_NV21:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_NV21;
|
| + break;
|
| + case media::PIXEL_FORMAT_YUY2:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_YUY2;
|
| + break;
|
| + case media::PIXEL_FORMAT_UYVY:
|
| + DCHECK(!width_parity && !height_parity);
|
| + colorspace = libyuv::FOURCC_UYVY;
|
| + break;
|
| + case media::PIXEL_FORMAT_RGB24:
|
| +// Linux RGB24 defines red at lowest byte address,
|
| +// see http://linuxtv.org/downloads/v4l-dvb-apis/packed-rgb.html.
|
| +// Windows RGB24 defines blue at lowest byte,
|
| +// see https://msdn.microsoft.com/en-us/library/windows/desktop/dd407253
|
| +#if defined(OS_LINUX)
|
| + colorspace = libyuv::FOURCC_RAW;
|
| +#elif defined(OS_WIN)
|
| + colorspace = libyuv::FOURCC_24BG;
|
| +#else
|
| + NOTREACHED() << "RGB24 is only available in Linux and Windows platforms";
|
| +#endif
|
| +#if defined(OS_WIN)
|
| + // TODO(wjia): Currently, for RGB24 on WIN, capture device always
|
| + // passes in positive src_width and src_height. Remove this hardcoded
|
| + // value when nagative src_height is supported. The negative src_height
|
| + // indicates that vertical flipping is needed.
|
| + flip = true;
|
| +#endif
|
| + break;
|
| + case media::PIXEL_FORMAT_RGB32:
|
| +// Fallback to PIXEL_FORMAT_ARGB setting |flip| in Windows
|
| +// platforms.
|
| +#if defined(OS_WIN)
|
| + flip = true;
|
| +#endif
|
| + case media::PIXEL_FORMAT_ARGB:
|
| + colorspace = libyuv::FOURCC_ARGB;
|
| + break;
|
| + case media::PIXEL_FORMAT_MJPEG:
|
| + colorspace = libyuv::FOURCC_MJPG;
|
| + break;
|
| + default:
|
| + NOTREACHED() << media::VideoPixelFormatToString(
|
| + frame_format.pixel_format);
|
| + }
|
| +
|
| + const gfx::Size dimensions(destination_width, destination_height);
|
| + scoped_ptr<Buffer> buffer = ReserveOutputBuffer(
|
| + dimensions, media::PIXEL_FORMAT_I420, media::PIXEL_STORAGE_CPU);
|
| + if (!buffer)
|
| + return;
|
| +
|
| + int y_stride = dimensions.width();
|
| + int uv_stride = y_stride / 2;
|
| + if (libyuv::ConvertToI420(
|
| + data, length,
|
| + static_cast<uint8_t*>(buffer->data(media::VideoFrame::kYPlane)),
|
| + y_stride,
|
| + static_cast<uint8_t*>(buffer->data(media::VideoFrame::kUPlane)),
|
| + uv_stride,
|
| + static_cast<uint8_t*>(buffer->data(media::VideoFrame::kVPlane)),
|
| + uv_stride, 0 /* crop_x */, 0 /* crop_y */,
|
| + frame_format.frame_size.width(),
|
| + (flip ? -1 : 1) * frame_format.frame_size.height(), even_width,
|
| + even_height, rotation_mode, colorspace)) {
|
| + DLOG(ERROR) << "Failed to convert frame pixels to I420";
|
| + return;
|
| + }
|
| +
|
| + OnIncomingCapturedBuffer(std::move(buffer), media::VideoCaptureFormat(
|
| + frame_format.frame_size, 0.0f,
|
| + media::PIXEL_FORMAT_I420),
|
| + timestamp);
|
| +}
|
| +
|
| +scoped_ptr<media::VideoCaptureDevice::Client::Buffer>
|
| +VideoCaptureDeviceClientImpl::ReserveOutputBuffer(
|
| + const gfx::Size& dimensions,
|
| + media::VideoPixelFormat pixel_format,
|
| + media::VideoPixelStorage /* pixel_storage */) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
|
| + DCHECK(!dimensions.IsEmpty());
|
| +
|
| + return make_scoped_ptr<Buffer>(new BufferImpl(video_frame_pool_->CreateFrame(
|
| + dimensions, base::TimeTicks::Now() - base::TimeTicks())));
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::OnIncomingCapturedBuffer(
|
| + scoped_ptr<Buffer> buffer,
|
| + const media::VideoCaptureFormat& frame_format,
|
| + const base::TimeTicks& timestamp) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DCHECK(buffer);
|
| + DCHECK_EQ(frame_format.pixel_format, media::PIXEL_FORMAT_I420);
|
| + DCHECK_EQ(frame_format.pixel_storage, media::PIXEL_STORAGE_CPU);
|
| +
|
| + // |buffer| can get out of scope since we have the important frame.
|
| + delegate_->OnFrame(static_cast<BufferImpl*>(buffer.get())->video_frame(),
|
| + timestamp);
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::OnIncomingCapturedVideoFrame(
|
| + scoped_ptr<Buffer> buffer,
|
| + const scoped_refptr<media::VideoFrame>& frame,
|
| + const base::TimeTicks& timestamp) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DCHECK(buffer);
|
| +
|
| + // TODO(mcasas): Contents capture calls here. Wire this path by wrapping
|
| + // |frame| in a MojoVideoFrame.
|
| + NOTIMPLEMENTED();
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::OnError(
|
| + const tracked_objects::Location& from_here,
|
| + const std::string& reason) {
|
| + DVLOG(1) << __FUNCTION__ << " " << reason;
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + delegate_->OnError(reason);
|
| +}
|
| +
|
| +double VideoCaptureDeviceClientImpl::GetBufferPoolUtilization() const {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + NOTIMPLEMENTED();
|
| + // TODO(mcasas): In content/, the frame pool did have a maximum. Consider
|
| + // doing the same in this VideoFramePool, and return here the amount in use
|
| + // there.
|
| + return 0.0;
|
| +}
|
| +
|
| +VideoCaptureDeviceClientImpl::VideoFramePool::VideoFramePool()
|
| + : is_shutdown_(false) {}
|
| +
|
| +scoped_refptr<MojoVideoFrame>
|
| +VideoCaptureDeviceClientImpl::VideoFramePool::CreateFrame(
|
| + const gfx::Size& dimensions,
|
| + base::TimeDelta timestamp) {
|
| + DVLOG(1) << __FUNCTION__ << " " << dimensions.ToString();
|
| + base::AutoLock auto_lock(lock_);
|
| + DCHECK(!is_shutdown_);
|
| +
|
| + std::list<scoped_refptr<MojoVideoFrame>>::iterator suitable_frame =
|
| + std::find_if(frames_.begin(), frames_.end(),
|
| + [dimensions](const scoped_refptr<MojoVideoFrame>& frame) {
|
| + return frame->coded_size() == dimensions;
|
| + });
|
| +
|
| + scoped_refptr<MojoVideoFrame> frame;
|
| + if (suitable_frame != frames_.end()) {
|
| + frame = *suitable_frame;
|
| + frames_.erase(suitable_frame);
|
| + DCHECK(frame->handle().is_valid());
|
| + frame->set_timestamp(timestamp);
|
| + frame->metadata()->Clear();
|
| + } else {
|
| + frame = MojoVideoFrame::CreateMojoVideoFrame(dimensions, timestamp);
|
| + }
|
| + LOG_IF(ERROR, !frame.get()) << "Failed to get/create a video frame";
|
| +
|
| + // Make a copy of |frame| to register a Destruction Observer.
|
| + scoped_refptr<MojoVideoFrame> wrapped_frame = frame;
|
| + wrapped_frame->AddDestructionObserver(
|
| + base::Bind(&VideoFramePool::FrameReleased, this, frame));
|
| + return wrapped_frame;
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::VideoFramePool::Shutdown() {
|
| + base::AutoLock auto_lock(lock_);
|
| + is_shutdown_ = true;
|
| + frames_.clear();
|
| +}
|
| +
|
| +VideoCaptureDeviceClientImpl::VideoFramePool::~VideoFramePool() {
|
| + DCHECK(is_shutdown_);
|
| +}
|
| +
|
| +void VideoCaptureDeviceClientImpl::VideoFramePool::FrameReleased(
|
| + const scoped_refptr<MojoVideoFrame>& frame) {
|
| + DVLOG(1) << __FUNCTION__;
|
| + if (is_shutdown_)
|
| + return;
|
| + frames_.push_back(frame);
|
| +}
|
| +
|
| +} // namespace media
|
|
|