Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1524)

Unified Diff: content/renderer/pepper/pepper_media_stream_video_track_host.cc

Issue 150403006: [PPAPI][MediaStream] Support configure for video input. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Update Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: content/renderer/pepper/pepper_media_stream_video_track_host.cc
diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.cc b/content/renderer/pepper/pepper_media_stream_video_track_host.cc
index 7512c5a84ca8b2de5974e5662cc59d02a2bf218e..f260e457eaad9e6074ab98186497383abef981a8 100644
--- a/content/renderer/pepper/pepper_media_stream_video_track_host.cc
+++ b/content/renderer/pepper/pepper_media_stream_video_track_host.cc
@@ -5,35 +5,137 @@
#include "content/renderer/pepper/pepper_media_stream_video_track_host.h"
#include "base/logging.h"
+#include "media/base/yuv_convert.h"
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_video_frame.h"
+#include "ppapi/host/dispatch_host_message.h"
+#include "ppapi/host/host_message_context.h"
+#include "ppapi/proxy/ppapi_messages.h"
#include "ppapi/shared_impl/media_stream_buffer.h"
+#include "third_party/libyuv/include/libyuv/scale.h"
using media::VideoFrame;
+using ppapi::host::HostMessageContext;
+using ppapi::MediaStreamVideoTrackShared;
+using ppapi::proxy::SerializedHandle;
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 order
Peng 2014/02/13 17:19:11 Sorry. Which one is not in order?
namespace {
-// TODO(penghuang): make it configurable.
-const int32_t kNumberOfFrames = 4;
+const int32_t kNumberOfBuffers = 4;
+const int32_t kMaxNumberOfBuffers = 8;
PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) {
switch (format) {
case VideoFrame::YV12:
return PP_VIDEOFRAME_FORMAT_YV12;
- case VideoFrame::YV16:
- return PP_VIDEOFRAME_FORMAT_YV16;
case VideoFrame::I420:
return PP_VIDEOFRAME_FORMAT_I420;
- case VideoFrame::YV12A:
- return PP_VIDEOFRAME_FORMAT_YV12A;
- case VideoFrame::YV12J:
- return PP_VIDEOFRAME_FORMAT_YV12J;
default:
DVLOG(1) << "Unsupported pixel format " << format;
return PP_VIDEOFRAME_FORMAT_UNKNOWN;
}
}
+VideoFrame::Format FromPpapiFormat(PP_VideoFrame_Format format) {
+ switch (format) {
+ case PP_VIDEOFRAME_FORMAT_YV12:
+ return VideoFrame::YV12;
+ case PP_VIDEOFRAME_FORMAT_I420:
+ return VideoFrame::I420;
+ default:
+ DVLOG(1) << "Unsupported pixel format " << format;
+ return VideoFrame::UNKNOWN;
+ }
+}
+
+gfx::Size ComputeSize(const gfx::Size& frame,
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 ComputeSize and ComputeFormat are not so obvious.
Peng 2014/02/13 17:19:11 Done.
+ const gfx::Size& plugin) {
+ return gfx::Size(plugin.width() ? plugin.width() : frame.width(),
+ plugin.height() ? plugin.height() : frame.height());
+}
+
+PP_VideoFrame_Format ComputeFormat(PP_VideoFrame_Format frame,
+ PP_VideoFrame_Format plugin) {
+ return plugin != PP_VIDEOFRAME_FORMAT_UNKNOWN ? plugin : frame;
+}
+
+void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src,
+ uint8_t* dst,
+ PP_VideoFrame_Format dst_format,
+ const gfx::Size& dst_size) {
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 nit, prefer to have read only args and then read/w
Peng 2014/02/13 17:19:11 Done.
+ CHECK(src->format() == VideoFrame::YV12 ||
+ src->format() == VideoFrame::I420);
+ if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) {
+ if (src->coded_size() == dst_size) {
+ media::ConvertYUVToRGB32(src->data(VideoFrame::kYPlane),
+ src->data(VideoFrame::kUPlane),
+ src->data(VideoFrame::kVPlane),
+ dst,
+ dst_size.width(),
+ dst_size.height(),
+ src->stride(VideoFrame::kYPlane),
+ src->stride(VideoFrame::kUPlane),
+ dst_size.width() * 4,
+ media::YV12);
+ } else {
+ media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane),
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 I think call ScaleYUVToRGB32 with the same size is
Peng 2014/02/13 17:19:11 I am not sure. Probably they are not same. I check
+ src->data(VideoFrame::kUPlane),
+ src->data(VideoFrame::kVPlane),
+ dst,
+ src->coded_size().width(),
+ src->coded_size().height(),
+ dst_size.width(),
+ dst_size.height(),
+ src->stride(VideoFrame::kYPlane),
+ src->stride(VideoFrame::kUPlane),
+ dst_size.width() * 4,
+ media::YV12,
+ media::ROTATE_0,
+ media::FILTER_BILINEAR);
+ }
+ } else if (dst_format == PP_VIDEOFRAME_FORMAT_YV12 ||
+ dst_format == PP_VIDEOFRAME_FORMAT_I420) {
+ static const size_t kPlanes[][3] = {
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 kPlanesOrder?
Peng 2014/02/13 17:19:11 Done.
+ { VideoFrame::kYPlane, VideoFrame::kVPlane, VideoFrame::kUPlane }, // YV12
+ { VideoFrame::kYPlane, VideoFrame::kUPlane, VideoFrame::kVPlane }, // I420
+ };
+ const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1;
+
+ if (src->coded_size() == dst_size) {
+ for (int i = 0; i < 3; i++) {
+ size_t plane = kPlanes[plane_order][i];
+ int32_t n = src->stride(plane) * src->rows(plane);
+ memcpy(dst, src->data(plane), n);
+ dst += n;
+ }
+ } else {
+ int w = dst_size.width();
+ int h = dst_size.height();
+ libyuv::ScalePlane(src->data(kPlanes[plane_order][0]),
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 dito, I think ScalePlane will just do copy if the
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 And I think you should be able to just use I420Sca
Peng 2014/02/13 17:19:11 Done.
Peng 2014/02/13 17:19:11 Because we support YV12 & I420 two formats. Use I4
+ src->stride(kPlanes[plane_order][0]),
+ src->coded_size().width(),
+ src->coded_size().height(),
+ dst, w, w, h, libyuv::kFilterBox);
+ dst += w * h;
+ w /= 2;
+ h /= 2;
+ libyuv::ScalePlane(src->data(kPlanes[plane_order][1]),
+ src->stride(kPlanes[plane_order][1]),
+ src->coded_size().width() / 2,
+ src->coded_size().height() / 2,
+ dst, w, w, h, libyuv::kFilterBox);
+ dst += w * h;
+ libyuv::ScalePlane(src->data(kPlanes[plane_order][2]),
+ src->stride(kPlanes[plane_order][2]),
+ src->coded_size().width() / 2,
+ src->coded_size().height() / 2,
+ dst, w, w, h, libyuv::kFilterBox);
+ }
+ } else {
+ NOTREACHED();
+ }
+}
+
} // namespace
namespace content {
@@ -46,7 +148,10 @@ PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost(
: PepperMediaStreamTrackHostBase(host, instance, resource),
track_(track),
connected_(false),
- frame_format_(VideoFrame::UNKNOWN),
+ buffers_initialized_(false),
+ buffers_(kNumberOfBuffers),
+ frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
+ plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
frame_data_size_(0) {
DCHECK(!track_.isNull());
}
@@ -55,6 +160,30 @@ PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() {
OnClose();
}
+void PepperMediaStreamVideoTrackHost::InitBuffers() {
+ gfx::Size size = ComputeSize(frame_size_, plugin_frame_size_);
+ DCHECK(!size.IsEmpty());
+
+ PP_VideoFrame_Format format =
+ ComputeFormat(frame_format_, plugin_frame_format_);
+ DCHECK_NE(format, PP_VIDEOFRAME_FORMAT_UNKNOWN);
+
+ if (format == PP_VIDEOFRAME_FORMAT_BGRA) {
+ frame_data_size_ = size.width() * size.height() * 4;
+ } else {
+ frame_data_size_ = VideoFrame::AllocationSize(FromPpapiFormat(format),
+ size);
+ }
+
+ DCHECK_GT(frame_data_size_, 0U);
+ int32_t buffer_size =
+ sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_;
+ bool result = PepperMediaStreamTrackHostBase::InitBuffers(buffers_,
+ buffer_size);
+ // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin.
+ CHECK(result);
+}
+
void PepperMediaStreamVideoTrackHost::OnClose() {
if (connected_) {
MediaStreamVideoSink::RemoveFromVideoTrack(this, track_);
@@ -70,53 +199,37 @@ void PepperMediaStreamVideoTrackHost::OnVideoFrame(
if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN)
return;
- if (frame_size_ != frame->coded_size() || frame_format_ != frame->format()) {
+ if (!buffers_initialized_) {
frame_size_ = frame->coded_size();
- frame_format_ = frame->format();
- // TODO(penghuang): Support changing |frame_size_| & |frame_format_| more
- // than once.
- DCHECK(!frame_data_size_);
- frame_data_size_ = VideoFrame::AllocationSize(frame_format_, frame_size_);
- int32_t size = sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_;
- bool result = InitBuffers(kNumberOfFrames, size);
- // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin.
- CHECK(result);
+ frame_format_ = ppformat;
+ InitBuffers();
+ buffers_initialized_ = true;
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 set buffers_initialized_ inside InitBuffers
Peng 2014/02/13 17:19:11 because we will called InitBuffers() multiple time
}
int32_t index = buffer_manager()->DequeueBuffer();
// Drop frames if the underlying buffer is full.
- if (index < 0)
+ if (index < 0) {
+ DVLOG(1) << "A frame is dropped.";
return;
+ }
+
+ DCHECK(frame->coded_size() == frame_size_);
+ DCHECK_EQ(ppformat, frame_format_);
- // TODO(penghuang): support format conversion and size scaling.
+ gfx::Size size = ComputeSize(frame_size_, plugin_frame_size_);
+ PP_VideoFrame_Format format = ComputeFormat(frame_format_,
+ plugin_frame_format_);
ppapi::MediaStreamBuffer::Video* buffer =
&(buffer_manager()->GetBufferPointer(index)->video);
buffer->header.size = buffer_manager()->buffer_size();
buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO;
buffer->timestamp = frame->GetTimestamp().InSecondsF();
- buffer->format = ppformat;
- buffer->size.width = frame->coded_size().width();
- buffer->size.height = frame->coded_size().height();
+ buffer->format = format;
+ buffer->size.width = size.width();
+ buffer->size.height = size.height();
buffer->data_size = frame_data_size_;
-
- COMPILE_ASSERT(VideoFrame::kYPlane == 0, y_plane_should_be_0);
- COMPILE_ASSERT(VideoFrame::kUPlane == 1, u_plane_should_be_1);
- COMPILE_ASSERT(VideoFrame::kVPlane == 2, v_plane_should_be_2);
-
uint8_t* dst = buffer->data;
- size_t num_planes = VideoFrame::NumPlanes(frame->format());
- for (size_t i = 0; i < num_planes; ++i) {
- const uint8_t* src = frame->data(i);
- const size_t row_bytes = frame->row_bytes(i);
- const size_t src_stride = frame->stride(i);
- int rows = frame->rows(i);
- for (int j = 0; j < rows; ++j) {
- memcpy(dst, src, row_bytes);
- dst += row_bytes;
- src += src_stride;
- }
- }
-
+ ConvertFromMediaVideoFrame(frame, dst, format, size);
SendEnqueueBufferMessageToPlugin(index);
}
@@ -127,4 +240,69 @@ void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() {
}
}
+int32_t PepperMediaStreamVideoTrackHost::OnResourceMessageReceived(
+ const IPC::Message& msg,
+ HostMessageContext* context) {
+ IPC_BEGIN_MESSAGE_MAP(PepperMediaStreamVideoTrackHost, msg)
+ PPAPI_DISPATCH_HOST_RESOURCE_CALL(
+ PpapiHostMsg_MediaStreamVideoTrack_Configure,
+ OnHostMsgConfigure)
+ IPC_END_MESSAGE_MAP()
+ return PepperMediaStreamTrackHostBase::OnResourceMessageReceived(msg,
+ context);
+}
+
+int32_t PepperMediaStreamVideoTrackHost::OnHostMsgConfigure(
+ HostMessageContext* context,
+ const MediaStreamVideoTrackShared::Attributes& attributes) {
+ CHECK(MediaStreamVideoTrackShared::VerifyAttributes(attributes));
+
+ bool changed = false;
+ const uint32_t kWHMask = MediaStreamVideoTrackShared::Attributes::MASK_WIDTH |
+ MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT;
+ if (attributes.mask & kWHMask) {
+ gfx::Size new_size = plugin_frame_size_;
+ if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_WIDTH)
+ new_size.set_width(attributes.width);
+ if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT)
+ new_size.set_height(attributes.height);
+ if (ComputeSize(frame_size_, plugin_frame_size_) !=
+ ComputeSize(frame_size_, new_size)) {
+ changed = true;
+ }
+ plugin_frame_size_ = new_size;
+ }
+
+ if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_BUFFERS) {
+ int32_t buffers = attributes.buffers ?
+ std::min(kMaxNumberOfBuffers, attributes.buffers) : kNumberOfBuffers;
+ if (buffers != buffers_) {
+ buffers_ = buffers;
+ changed = true;
+ }
+ }
+
+ if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_FORMAT) {
+ if (plugin_frame_format_ != attributes.format) {
+ PP_VideoFrame_Format original_format = ComputeFormat(
+ frame_format_, plugin_frame_format_);
+ PP_VideoFrame_Format new_format = ComputeFormat(
+ frame_format_, attributes.format);
+ if (new_format != original_format)
+ changed = true;
+ plugin_frame_format_ = attributes.format;
+ }
+ LOG(ERROR) << "format =" << plugin_frame_format_;
+ }
+
+ // If buffers has been initialized, we need re-initialize it with
+ // new settings. Otherwise, we will initialize buffer when we receive
Ronghua Wu (Left Chromium) 2014/02/12 19:54:15 why do we need to "initialize buffer when receive
Peng 2014/02/13 17:19:11 Because plugin may only provide part for attribute
+ // the first frame.
+ if (changed && buffers_initialized_)
+ InitBuffers();
+
+ context->reply_msg = PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply();
+ return PP_OK;
+}
+
} // namespace content

Powered by Google App Engine
This is Rietveld 408576698