OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/pepper/pepper_media_stream_video_track_host.h" | 5 #include "content/renderer/pepper/pepper_media_stream_video_track_host.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
| 8 #include "media/base/yuv_convert.h" |
8 #include "ppapi/c/pp_errors.h" | 9 #include "ppapi/c/pp_errors.h" |
9 #include "ppapi/c/ppb_video_frame.h" | 10 #include "ppapi/c/ppb_video_frame.h" |
| 11 #include "ppapi/host/dispatch_host_message.h" |
| 12 #include "ppapi/host/host_message_context.h" |
| 13 #include "ppapi/proxy/ppapi_messages.h" |
10 #include "ppapi/shared_impl/media_stream_buffer.h" | 14 #include "ppapi/shared_impl/media_stream_buffer.h" |
| 15 #include "third_party/libyuv/include/libyuv/scale.h" |
11 | 16 |
12 using media::VideoFrame; | 17 using media::VideoFrame; |
| 18 using ppapi::host::HostMessageContext; |
| 19 using ppapi::MediaStreamVideoTrackShared; |
13 | 20 |
14 namespace { | 21 namespace { |
15 | 22 |
16 // TODO(penghuang): make it configurable. | 23 const int32_t kDefaultNumberOfBuffers = 4; |
17 const int32_t kNumberOfFrames = 4; | 24 const int32_t kMaxNumberOfBuffers = 8; |
18 | 25 |
19 PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) { | 26 PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) { |
20 switch (format) { | 27 switch (format) { |
21 case VideoFrame::YV12: | 28 case VideoFrame::YV12: |
22 return PP_VIDEOFRAME_FORMAT_YV12; | 29 return PP_VIDEOFRAME_FORMAT_YV12; |
23 case VideoFrame::YV16: | |
24 return PP_VIDEOFRAME_FORMAT_YV16; | |
25 case VideoFrame::I420: | 30 case VideoFrame::I420: |
26 return PP_VIDEOFRAME_FORMAT_I420; | 31 return PP_VIDEOFRAME_FORMAT_I420; |
27 case VideoFrame::YV12A: | |
28 return PP_VIDEOFRAME_FORMAT_YV12A; | |
29 case VideoFrame::YV12J: | |
30 return PP_VIDEOFRAME_FORMAT_YV12J; | |
31 default: | 32 default: |
32 DVLOG(1) << "Unsupported pixel format " << format; | 33 DVLOG(1) << "Unsupported pixel format " << format; |
33 return PP_VIDEOFRAME_FORMAT_UNKNOWN; | 34 return PP_VIDEOFRAME_FORMAT_UNKNOWN; |
34 } | 35 } |
35 } | 36 } |
36 | 37 |
| 38 VideoFrame::Format FromPpapiFormat(PP_VideoFrame_Format format) { |
| 39 switch (format) { |
| 40 case PP_VIDEOFRAME_FORMAT_YV12: |
| 41 return VideoFrame::YV12; |
| 42 case PP_VIDEOFRAME_FORMAT_I420: |
| 43 return VideoFrame::I420; |
| 44 default: |
| 45 DVLOG(1) << "Unsupported pixel format " << format; |
| 46 return VideoFrame::UNKNOWN; |
| 47 } |
| 48 } |
| 49 |
| 50 // Compute size base on the size of frame received from MediaStreamVideoSink |
| 51 // and size specified by plugin. |
| 52 gfx::Size ComputeSize(const gfx::Size& source, |
| 53 const gfx::Size& plugin) { |
| 54 return gfx::Size(plugin.width() ? plugin.width() : source.width(), |
| 55 plugin.height() ? plugin.height() : source.height()); |
| 56 } |
| 57 |
| 58 // Compute format base on the format of frame received from MediaStreamVideoSink |
| 59 // and format specified by plugin. |
| 60 PP_VideoFrame_Format ComputeFormat(PP_VideoFrame_Format source, |
| 61 PP_VideoFrame_Format plugin) { |
| 62 return plugin != PP_VIDEOFRAME_FORMAT_UNKNOWN ? plugin : source; |
| 63 } |
| 64 |
| 65 void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src, |
| 66 PP_VideoFrame_Format dst_format, |
| 67 const gfx::Size& dst_size, |
| 68 uint8_t* dst) { |
| 69 CHECK(src->format() == VideoFrame::YV12 || |
| 70 src->format() == VideoFrame::I420); |
| 71 if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) { |
| 72 if (src->coded_size() == dst_size) { |
| 73 media::ConvertYUVToRGB32(src->data(VideoFrame::kYPlane), |
| 74 src->data(VideoFrame::kUPlane), |
| 75 src->data(VideoFrame::kVPlane), |
| 76 dst, |
| 77 dst_size.width(), |
| 78 dst_size.height(), |
| 79 src->stride(VideoFrame::kYPlane), |
| 80 src->stride(VideoFrame::kUPlane), |
| 81 dst_size.width() * 4, |
| 82 media::YV12); |
| 83 } else { |
| 84 media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane), |
| 85 src->data(VideoFrame::kUPlane), |
| 86 src->data(VideoFrame::kVPlane), |
| 87 dst, |
| 88 src->coded_size().width(), |
| 89 src->coded_size().height(), |
| 90 dst_size.width(), |
| 91 dst_size.height(), |
| 92 src->stride(VideoFrame::kYPlane), |
| 93 src->stride(VideoFrame::kUPlane), |
| 94 dst_size.width() * 4, |
| 95 media::YV12, |
| 96 media::ROTATE_0, |
| 97 media::FILTER_BILINEAR); |
| 98 } |
| 99 } else if (dst_format == PP_VIDEOFRAME_FORMAT_YV12 || |
| 100 dst_format == PP_VIDEOFRAME_FORMAT_I420) { |
| 101 static const size_t kPlanesOrder[][3] = { |
| 102 { VideoFrame::kYPlane, VideoFrame::kVPlane, VideoFrame::kUPlane }, // YV12 |
| 103 { VideoFrame::kYPlane, VideoFrame::kUPlane, VideoFrame::kVPlane }, // I420 |
| 104 }; |
| 105 const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1; |
| 106 int dst_width = dst_size.width(); |
| 107 int dst_height = dst_size.height(); |
| 108 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][0]), |
| 109 src->stride(kPlanesOrder[plane_order][0]), |
| 110 src->coded_size().width(), |
| 111 src->coded_size().height(), |
| 112 dst, dst_width, dst_width, dst_height, |
| 113 libyuv::kFilterBox); |
| 114 dst += dst_width * dst_height; |
| 115 const int src_halfwidth = (src->coded_size().width() + 1) >> 1; |
| 116 const int src_halfheight = (src->coded_size().height() + 1) >> 1; |
| 117 const int dst_halfwidth = (dst_width + 1) >> 1; |
| 118 const int dst_halfheight = (dst_height + 1) >> 1; |
| 119 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][1]), |
| 120 src->stride(kPlanesOrder[plane_order][1]), |
| 121 src_halfwidth, src_halfheight, |
| 122 dst, dst_halfwidth, dst_halfwidth, dst_halfheight, |
| 123 libyuv::kFilterBox); |
| 124 dst += dst_halfwidth * dst_halfheight; |
| 125 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][2]), |
| 126 src->stride(kPlanesOrder[plane_order][2]), |
| 127 src_halfwidth, src_halfheight, |
| 128 dst, dst_halfwidth, dst_halfwidth, dst_halfheight, |
| 129 libyuv::kFilterBox); |
| 130 } else { |
| 131 NOTREACHED(); |
| 132 } |
| 133 } |
| 134 |
37 } // namespace | 135 } // namespace |
38 | 136 |
39 namespace content { | 137 namespace content { |
40 | 138 |
41 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost( | 139 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost( |
42 RendererPpapiHost* host, | 140 RendererPpapiHost* host, |
43 PP_Instance instance, | 141 PP_Instance instance, |
44 PP_Resource resource, | 142 PP_Resource resource, |
45 const blink::WebMediaStreamTrack& track) | 143 const blink::WebMediaStreamTrack& track) |
46 : PepperMediaStreamTrackHostBase(host, instance, resource), | 144 : PepperMediaStreamTrackHostBase(host, instance, resource), |
47 track_(track), | 145 track_(track), |
48 connected_(false), | 146 connected_(false), |
49 frame_format_(VideoFrame::UNKNOWN), | 147 buffers_(kDefaultNumberOfBuffers), |
| 148 source_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN), |
| 149 plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN), |
50 frame_data_size_(0) { | 150 frame_data_size_(0) { |
51 DCHECK(!track_.isNull()); | 151 DCHECK(!track_.isNull()); |
52 } | 152 } |
53 | 153 |
54 PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() { | 154 PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() { |
55 OnClose(); | 155 OnClose(); |
56 } | 156 } |
57 | 157 |
| 158 void PepperMediaStreamVideoTrackHost::InitBuffers() { |
| 159 gfx::Size size = ComputeSize(source_frame_size_, plugin_frame_size_); |
| 160 DCHECK(!size.IsEmpty()); |
| 161 |
| 162 PP_VideoFrame_Format format = |
| 163 ComputeFormat(source_frame_format_, plugin_frame_format_); |
| 164 DCHECK_NE(format, PP_VIDEOFRAME_FORMAT_UNKNOWN); |
| 165 |
| 166 if (format == PP_VIDEOFRAME_FORMAT_BGRA) { |
| 167 frame_data_size_ = size.width() * size.height() * 4; |
| 168 } else { |
| 169 frame_data_size_ = VideoFrame::AllocationSize(FromPpapiFormat(format), |
| 170 size); |
| 171 } |
| 172 |
| 173 DCHECK_GT(frame_data_size_, 0U); |
| 174 int32_t buffer_size = |
| 175 sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_; |
| 176 bool result = PepperMediaStreamTrackHostBase::InitBuffers(buffers_, |
| 177 buffer_size); |
| 178 // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin. |
| 179 CHECK(result); |
| 180 } |
| 181 |
58 void PepperMediaStreamVideoTrackHost::OnClose() { | 182 void PepperMediaStreamVideoTrackHost::OnClose() { |
59 if (connected_) { | 183 if (connected_) { |
60 MediaStreamVideoSink::RemoveFromVideoTrack(this, track_); | 184 MediaStreamVideoSink::RemoveFromVideoTrack(this, track_); |
61 connected_ = false; | 185 connected_ = false; |
62 } | 186 } |
63 } | 187 } |
64 | 188 |
65 void PepperMediaStreamVideoTrackHost::OnVideoFrame( | 189 void PepperMediaStreamVideoTrackHost::OnVideoFrame( |
66 const scoped_refptr<VideoFrame>& frame) { | 190 const scoped_refptr<VideoFrame>& frame) { |
67 DCHECK(frame); | 191 DCHECK(frame); |
68 // TODO(penghuang): Check |frame->end_of_stream()| and close the track. | 192 // TODO(penghuang): Check |frame->end_of_stream()| and close the track. |
69 PP_VideoFrame_Format ppformat = ToPpapiFormat(frame->format()); | 193 PP_VideoFrame_Format ppformat = ToPpapiFormat(frame->format()); |
70 if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN) | 194 if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN) |
71 return; | 195 return; |
72 | 196 |
73 if (frame_size_ != frame->coded_size() || frame_format_ != frame->format()) { | 197 if (source_frame_size_.IsEmpty()) { |
74 frame_size_ = frame->coded_size(); | 198 source_frame_size_ = frame->coded_size(); |
75 frame_format_ = frame->format(); | 199 source_frame_format_ = ppformat; |
76 // TODO(penghuang): Support changing |frame_size_| & |frame_format_| more | 200 InitBuffers(); |
77 // than once. | |
78 DCHECK(!frame_data_size_); | |
79 frame_data_size_ = VideoFrame::AllocationSize(frame_format_, frame_size_); | |
80 int32_t size = sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_; | |
81 bool result = InitBuffers(kNumberOfFrames, size); | |
82 // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin. | |
83 CHECK(result); | |
84 } | 201 } |
85 | 202 |
86 int32_t index = buffer_manager()->DequeueBuffer(); | 203 int32_t index = buffer_manager()->DequeueBuffer(); |
87 // Drop frames if the underlying buffer is full. | 204 // Drop frames if the underlying buffer is full. |
88 if (index < 0) | 205 if (index < 0) { |
| 206 DVLOG(1) << "A frame is dropped."; |
89 return; | 207 return; |
| 208 } |
90 | 209 |
91 // TODO(penghuang): support format conversion and size scaling. | 210 DCHECK(frame->coded_size() == source_frame_size_); |
| 211 DCHECK_EQ(ppformat, source_frame_format_); |
| 212 |
| 213 gfx::Size size = ComputeSize(source_frame_size_, plugin_frame_size_); |
| 214 PP_VideoFrame_Format format = ComputeFormat(source_frame_format_, |
| 215 plugin_frame_format_); |
92 ppapi::MediaStreamBuffer::Video* buffer = | 216 ppapi::MediaStreamBuffer::Video* buffer = |
93 &(buffer_manager()->GetBufferPointer(index)->video); | 217 &(buffer_manager()->GetBufferPointer(index)->video); |
94 buffer->header.size = buffer_manager()->buffer_size(); | 218 buffer->header.size = buffer_manager()->buffer_size(); |
95 buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO; | 219 buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO; |
96 buffer->timestamp = frame->GetTimestamp().InSecondsF(); | 220 buffer->timestamp = frame->GetTimestamp().InSecondsF(); |
97 buffer->format = ppformat; | 221 buffer->format = format; |
98 buffer->size.width = frame->coded_size().width(); | 222 buffer->size.width = size.width(); |
99 buffer->size.height = frame->coded_size().height(); | 223 buffer->size.height = size.height(); |
100 buffer->data_size = frame_data_size_; | 224 buffer->data_size = frame_data_size_; |
101 | 225 ConvertFromMediaVideoFrame(frame, format, size, buffer->data); |
102 COMPILE_ASSERT(VideoFrame::kYPlane == 0, y_plane_should_be_0); | |
103 COMPILE_ASSERT(VideoFrame::kUPlane == 1, u_plane_should_be_1); | |
104 COMPILE_ASSERT(VideoFrame::kVPlane == 2, v_plane_should_be_2); | |
105 | |
106 uint8_t* dst = buffer->data; | |
107 size_t num_planes = VideoFrame::NumPlanes(frame->format()); | |
108 for (size_t i = 0; i < num_planes; ++i) { | |
109 const uint8_t* src = frame->data(i); | |
110 const size_t row_bytes = frame->row_bytes(i); | |
111 const size_t src_stride = frame->stride(i); | |
112 int rows = frame->rows(i); | |
113 for (int j = 0; j < rows; ++j) { | |
114 memcpy(dst, src, row_bytes); | |
115 dst += row_bytes; | |
116 src += src_stride; | |
117 } | |
118 } | |
119 | |
120 SendEnqueueBufferMessageToPlugin(index); | 226 SendEnqueueBufferMessageToPlugin(index); |
121 } | 227 } |
122 | 228 |
123 void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() { | 229 void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() { |
124 if (!connected_) { | 230 if (!connected_) { |
125 MediaStreamVideoSink::AddToVideoTrack(this, track_); | 231 MediaStreamVideoSink::AddToVideoTrack(this, track_); |
126 connected_ = true; | 232 connected_ = true; |
127 } | 233 } |
128 } | 234 } |
129 | 235 |
| 236 int32_t PepperMediaStreamVideoTrackHost::OnResourceMessageReceived( |
| 237 const IPC::Message& msg, |
| 238 HostMessageContext* context) { |
| 239 IPC_BEGIN_MESSAGE_MAP(PepperMediaStreamVideoTrackHost, msg) |
| 240 PPAPI_DISPATCH_HOST_RESOURCE_CALL( |
| 241 PpapiHostMsg_MediaStreamVideoTrack_Configure, |
| 242 OnHostMsgConfigure) |
| 243 IPC_END_MESSAGE_MAP() |
| 244 return PepperMediaStreamTrackHostBase::OnResourceMessageReceived(msg, |
| 245 context); |
| 246 } |
| 247 |
| 248 int32_t PepperMediaStreamVideoTrackHost::OnHostMsgConfigure( |
| 249 HostMessageContext* context, |
| 250 const MediaStreamVideoTrackShared::Attributes& attributes) { |
| 251 CHECK(MediaStreamVideoTrackShared::VerifyAttributes(attributes)); |
| 252 |
| 253 bool changed = false; |
| 254 const uint32_t kWHMask = MediaStreamVideoTrackShared::Attributes::MASK_WIDTH | |
| 255 MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT; |
| 256 if (attributes.mask & kWHMask) { |
| 257 gfx::Size new_size = plugin_frame_size_; |
| 258 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_WIDTH) |
| 259 new_size.set_width(attributes.width); |
| 260 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT) |
| 261 new_size.set_height(attributes.height); |
| 262 if (ComputeSize(source_frame_size_, plugin_frame_size_) != |
| 263 ComputeSize(source_frame_size_, new_size)) { |
| 264 changed = true; |
| 265 } |
| 266 plugin_frame_size_ = new_size; |
| 267 } |
| 268 |
| 269 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_BUFFERS) { |
| 270 int32_t buffers = attributes.buffers ? |
| 271 std::min(kMaxNumberOfBuffers, attributes.buffers) : |
| 272 kDefaultNumberOfBuffers; |
| 273 if (buffers != buffers_) { |
| 274 buffers_ = buffers; |
| 275 changed = true; |
| 276 } |
| 277 } |
| 278 |
| 279 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_FORMAT) { |
| 280 if (plugin_frame_format_ != attributes.format) { |
| 281 PP_VideoFrame_Format original_format = ComputeFormat( |
| 282 source_frame_format_, plugin_frame_format_); |
| 283 PP_VideoFrame_Format new_format = ComputeFormat( |
| 284 source_frame_format_, attributes.format); |
| 285 if (new_format != original_format) |
| 286 changed = true; |
| 287 plugin_frame_format_ = attributes.format; |
| 288 } |
| 289 } |
| 290 |
| 291 // If the first frame has been received, we will re-initialize buffers with |
| 292 // new settings. Otherwise, we will initialize buffer when we receive |
| 293 // the first frame, because plugin can only provide part of attributes |
| 294 // which are not enough to initialize buffers. |
| 295 if (changed && !source_frame_size_.IsEmpty()) |
| 296 InitBuffers(); |
| 297 |
| 298 context->reply_msg = PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply(); |
| 299 return PP_OK; |
| 300 } |
| 301 |
130 } // namespace content | 302 } // namespace content |
OLD | NEW |