OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/pepper/pepper_media_stream_video_track_host.h" | 5 #include "content/renderer/pepper/pepper_media_stream_video_track_host.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "media/base/yuv_convert.h" | |
8 #include "ppapi/c/pp_errors.h" | 9 #include "ppapi/c/pp_errors.h" |
9 #include "ppapi/c/ppb_video_frame.h" | 10 #include "ppapi/c/ppb_video_frame.h" |
11 #include "ppapi/host/dispatch_host_message.h" | |
12 #include "ppapi/host/host_message_context.h" | |
13 #include "ppapi/proxy/ppapi_messages.h" | |
10 #include "ppapi/shared_impl/media_stream_buffer.h" | 14 #include "ppapi/shared_impl/media_stream_buffer.h" |
15 #include "third_party/libyuv/include/libyuv/scale.h" | |
11 | 16 |
12 using media::VideoFrame; | 17 using media::VideoFrame; |
18 using ppapi::host::HostMessageContext; | |
19 using ppapi::MediaStreamVideoTrackShared; | |
20 using ppapi::proxy::SerializedHandle; | |
yzshen1
2014/02/13 19:23:26
You are not using it.
Peng
2014/02/13 21:28:34
Done.
| |
13 | 21 |
14 namespace { | 22 namespace { |
15 | 23 |
16 // TODO(penghuang): make it configurable. | 24 const int32_t kNumberOfBuffers = 4; |
yzshen1
2014/02/13 19:23:26
Is it k*Default*....?
Peng
2014/02/13 21:28:34
Done.
| |
17 const int32_t kNumberOfFrames = 4; | 25 const int32_t kMaxNumberOfBuffers = 8; |
18 | 26 |
19 PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) { | 27 PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) { |
20 switch (format) { | 28 switch (format) { |
21 case VideoFrame::YV12: | 29 case VideoFrame::YV12: |
22 return PP_VIDEOFRAME_FORMAT_YV12; | 30 return PP_VIDEOFRAME_FORMAT_YV12; |
23 case VideoFrame::YV16: | |
24 return PP_VIDEOFRAME_FORMAT_YV16; | |
25 case VideoFrame::I420: | 31 case VideoFrame::I420: |
26 return PP_VIDEOFRAME_FORMAT_I420; | 32 return PP_VIDEOFRAME_FORMAT_I420; |
27 case VideoFrame::YV12A: | |
28 return PP_VIDEOFRAME_FORMAT_YV12A; | |
29 case VideoFrame::YV12J: | |
30 return PP_VIDEOFRAME_FORMAT_YV12J; | |
31 default: | 33 default: |
32 DVLOG(1) << "Unsupported pixel format " << format; | 34 DVLOG(1) << "Unsupported pixel format " << format; |
33 return PP_VIDEOFRAME_FORMAT_UNKNOWN; | 35 return PP_VIDEOFRAME_FORMAT_UNKNOWN; |
34 } | 36 } |
35 } | 37 } |
36 | 38 |
39 VideoFrame::Format FromPpapiFormat(PP_VideoFrame_Format format) { | |
40 switch (format) { | |
41 case PP_VIDEOFRAME_FORMAT_YV12: | |
42 return VideoFrame::YV12; | |
43 case PP_VIDEOFRAME_FORMAT_I420: | |
44 return VideoFrame::I420; | |
45 default: | |
46 DVLOG(1) << "Unsupported pixel format " << format; | |
47 return VideoFrame::UNKNOWN; | |
48 } | |
49 } | |
50 | |
51 // Compute size base on the size of frame received from MediaStreamVideoSink | |
52 // and size specified by plugin. | |
53 gfx::Size ComputeSize(const gfx::Size& frame, | |
54 const gfx::Size& plugin) { | |
55 return gfx::Size(plugin.width() ? plugin.width() : frame.width(), | |
56 plugin.height() ? plugin.height() : frame.height()); | |
57 } | |
58 | |
59 // Compute format base on the format of frame received from MediaStreamVideoSink | |
60 // and format specified by plugin. | |
61 PP_VideoFrame_Format ComputeFormat(PP_VideoFrame_Format frame, | |
62 PP_VideoFrame_Format plugin) { | |
63 return plugin != PP_VIDEOFRAME_FORMAT_UNKNOWN ? plugin : frame; | |
64 } | |
65 | |
66 void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src, | |
67 PP_VideoFrame_Format dst_format, | |
68 const gfx::Size& dst_size, | |
69 uint8_t* dst) { | |
70 CHECK(src->format() == VideoFrame::YV12 || | |
71 src->format() == VideoFrame::I420); | |
72 if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) { | |
73 if (src->coded_size() == dst_size) { | |
74 media::ConvertYUVToRGB32(src->data(VideoFrame::kYPlane), | |
75 src->data(VideoFrame::kUPlane), | |
76 src->data(VideoFrame::kVPlane), | |
77 dst, | |
78 dst_size.width(), | |
79 dst_size.height(), | |
80 src->stride(VideoFrame::kYPlane), | |
81 src->stride(VideoFrame::kUPlane), | |
82 dst_size.width() * 4, | |
83 media::YV12); | |
84 } else { | |
85 media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane), | |
86 src->data(VideoFrame::kUPlane), | |
87 src->data(VideoFrame::kVPlane), | |
88 dst, | |
89 src->coded_size().width(), | |
90 src->coded_size().height(), | |
91 dst_size.width(), | |
92 dst_size.height(), | |
93 src->stride(VideoFrame::kYPlane), | |
94 src->stride(VideoFrame::kUPlane), | |
95 dst_size.width() * 4, | |
96 media::YV12, | |
97 media::ROTATE_0, | |
98 media::FILTER_BILINEAR); | |
99 } | |
100 } else if (dst_format == PP_VIDEOFRAME_FORMAT_YV12 || | |
101 dst_format == PP_VIDEOFRAME_FORMAT_I420) { | |
102 static const size_t kPlanesOrder[][3] = { | |
103 { VideoFrame::kYPlane, VideoFrame::kVPlane, VideoFrame::kUPlane }, // YV12 | |
104 { VideoFrame::kYPlane, VideoFrame::kUPlane, VideoFrame::kVPlane }, // I420 | |
105 }; | |
106 const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1; | |
107 int dst_width = dst_size.width(); | |
108 int dst_height = dst_size.height(); | |
109 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][0]), | |
110 src->stride(kPlanesOrder[plane_order][0]), | |
111 src->coded_size().width(), | |
112 src->coded_size().height(), | |
113 dst, dst_width, dst_width, dst_height, | |
114 libyuv::kFilterBox); | |
115 dst += dst_width * dst_height; | |
116 const int src_halfwidth = (src->coded_size().width() + 1) >> 1; | |
117 const int src_halfheight = (src->coded_size().height() + 1) >> 1; | |
118 const int dst_halfwidth = (dst_width + 1) >> 1; | |
119 const int dst_halfheight = (dst_height + 1) >> 1; | |
120 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][1]), | |
121 src->stride(kPlanesOrder[plane_order][1]), | |
122 src_halfwidth, src_halfheight, | |
123 dst, dst_halfwidth, dst_halfwidth, dst_halfheight, | |
124 libyuv::kFilterBox); | |
125 dst += dst_halfwidth * dst_halfheight; | |
126 libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][2]), | |
127 src->stride(kPlanesOrder[plane_order][2]), | |
128 src_halfwidth, src_halfheight, | |
129 dst, dst_halfwidth, dst_halfwidth, dst_halfheight, | |
130 libyuv::kFilterBox); | |
131 } else { | |
132 NOTREACHED(); | |
133 } | |
134 } | |
135 | |
37 } // namespace | 136 } // namespace |
38 | 137 |
39 namespace content { | 138 namespace content { |
40 | 139 |
41 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost( | 140 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost( |
42 RendererPpapiHost* host, | 141 RendererPpapiHost* host, |
43 PP_Instance instance, | 142 PP_Instance instance, |
44 PP_Resource resource, | 143 PP_Resource resource, |
45 const blink::WebMediaStreamTrack& track) | 144 const blink::WebMediaStreamTrack& track) |
46 : PepperMediaStreamTrackHostBase(host, instance, resource), | 145 : PepperMediaStreamTrackHostBase(host, instance, resource), |
47 track_(track), | 146 track_(track), |
48 connected_(false), | 147 connected_(false), |
49 frame_format_(VideoFrame::UNKNOWN), | 148 buffers_(kNumberOfBuffers), |
149 frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN), | |
150 plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN), | |
50 frame_data_size_(0) { | 151 frame_data_size_(0) { |
51 DCHECK(!track_.isNull()); | 152 DCHECK(!track_.isNull()); |
52 } | 153 } |
53 | 154 |
54 PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() { | 155 PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() { |
55 OnClose(); | 156 OnClose(); |
56 } | 157 } |
57 | 158 |
159 void PepperMediaStreamVideoTrackHost::InitBuffers() { | |
160 gfx::Size size = ComputeSize(frame_size_, plugin_frame_size_); | |
161 DCHECK(!size.IsEmpty()); | |
162 | |
163 PP_VideoFrame_Format format = | |
164 ComputeFormat(frame_format_, plugin_frame_format_); | |
165 DCHECK_NE(format, PP_VIDEOFRAME_FORMAT_UNKNOWN); | |
166 | |
167 if (format == PP_VIDEOFRAME_FORMAT_BGRA) { | |
168 frame_data_size_ = size.width() * size.height() * 4; | |
169 } else { | |
170 frame_data_size_ = VideoFrame::AllocationSize(FromPpapiFormat(format), | |
171 size); | |
172 } | |
173 | |
174 DCHECK_GT(frame_data_size_, 0U); | |
175 int32_t buffer_size = | |
176 sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_; | |
177 bool result = PepperMediaStreamTrackHostBase::InitBuffers(buffers_, | |
178 buffer_size); | |
179 // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin. | |
180 CHECK(result); | |
181 } | |
182 | |
58 void PepperMediaStreamVideoTrackHost::OnClose() { | 183 void PepperMediaStreamVideoTrackHost::OnClose() { |
59 if (connected_) { | 184 if (connected_) { |
60 MediaStreamVideoSink::RemoveFromVideoTrack(this, track_); | 185 MediaStreamVideoSink::RemoveFromVideoTrack(this, track_); |
61 connected_ = false; | 186 connected_ = false; |
62 } | 187 } |
63 } | 188 } |
64 | 189 |
65 void PepperMediaStreamVideoTrackHost::OnVideoFrame( | 190 void PepperMediaStreamVideoTrackHost::OnVideoFrame( |
66 const scoped_refptr<VideoFrame>& frame) { | 191 const scoped_refptr<VideoFrame>& frame) { |
67 DCHECK(frame); | 192 DCHECK(frame); |
68 // TODO(penghuang): Check |frame->end_of_stream()| and close the track. | 193 // TODO(penghuang): Check |frame->end_of_stream()| and close the track. |
69 PP_VideoFrame_Format ppformat = ToPpapiFormat(frame->format()); | 194 PP_VideoFrame_Format ppformat = ToPpapiFormat(frame->format()); |
70 if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN) | 195 if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN) |
71 return; | 196 return; |
72 | 197 |
73 if (frame_size_ != frame->coded_size() || frame_format_ != frame->format()) { | 198 if (frame_size_.IsEmpty()) { |
74 frame_size_ = frame->coded_size(); | 199 frame_size_ = frame->coded_size(); |
75 frame_format_ = frame->format(); | 200 frame_format_ = ppformat; |
76 // TODO(penghuang): Support changing |frame_size_| & |frame_format_| more | 201 InitBuffers(); |
77 // than once. | |
78 DCHECK(!frame_data_size_); | |
79 frame_data_size_ = VideoFrame::AllocationSize(frame_format_, frame_size_); | |
80 int32_t size = sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_; | |
81 bool result = InitBuffers(kNumberOfFrames, size); | |
82 // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin. | |
83 CHECK(result); | |
84 } | 202 } |
85 | 203 |
86 int32_t index = buffer_manager()->DequeueBuffer(); | 204 int32_t index = buffer_manager()->DequeueBuffer(); |
87 // Drop frames if the underlying buffer is full. | 205 // Drop frames if the underlying buffer is full. |
88 if (index < 0) | 206 if (index < 0) { |
207 DVLOG(1) << "A frame is dropped."; | |
89 return; | 208 return; |
209 } | |
90 | 210 |
91 // TODO(penghuang): support format conversion and size scaling. | 211 DCHECK(frame->coded_size() == frame_size_); |
yzshen1
2014/02/13 19:23:26
(just to double check) So OnVideoFrame never retur
Peng
2014/02/13 21:28:34
Currently, it will be always same. But w3c has an
yzshen1
2014/02/14 18:25:43
If the backend changes to support that in the futu
Peng
2014/02/14 20:04:49
How about use CHECK()? I think it can help us loca
| |
212 DCHECK_EQ(ppformat, frame_format_); | |
213 | |
214 gfx::Size size = ComputeSize(frame_size_, plugin_frame_size_); | |
215 PP_VideoFrame_Format format = ComputeFormat(frame_format_, | |
216 plugin_frame_format_); | |
92 ppapi::MediaStreamBuffer::Video* buffer = | 217 ppapi::MediaStreamBuffer::Video* buffer = |
93 &(buffer_manager()->GetBufferPointer(index)->video); | 218 &(buffer_manager()->GetBufferPointer(index)->video); |
94 buffer->header.size = buffer_manager()->buffer_size(); | 219 buffer->header.size = buffer_manager()->buffer_size(); |
95 buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO; | 220 buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO; |
96 buffer->timestamp = frame->GetTimestamp().InSecondsF(); | 221 buffer->timestamp = frame->GetTimestamp().InSecondsF(); |
97 buffer->format = ppformat; | 222 buffer->format = format; |
98 buffer->size.width = frame->coded_size().width(); | 223 buffer->size.width = size.width(); |
99 buffer->size.height = frame->coded_size().height(); | 224 buffer->size.height = size.height(); |
100 buffer->data_size = frame_data_size_; | 225 buffer->data_size = frame_data_size_; |
yzshen1
2014/02/13 19:23:26
I think the naming is confusing: |frame_data_size_
Peng
2014/02/13 21:28:34
I think plugin_frame_data_size_ is not a good name
yzshen1
2014/02/14 18:25:43
Okay.
| |
101 | 226 ConvertFromMediaVideoFrame(frame, format, size, buffer->data); |
102 COMPILE_ASSERT(VideoFrame::kYPlane == 0, y_plane_should_be_0); | |
103 COMPILE_ASSERT(VideoFrame::kUPlane == 1, u_plane_should_be_1); | |
104 COMPILE_ASSERT(VideoFrame::kVPlane == 2, v_plane_should_be_2); | |
105 | |
106 uint8_t* dst = buffer->data; | |
107 size_t num_planes = VideoFrame::NumPlanes(frame->format()); | |
108 for (size_t i = 0; i < num_planes; ++i) { | |
109 const uint8_t* src = frame->data(i); | |
110 const size_t row_bytes = frame->row_bytes(i); | |
111 const size_t src_stride = frame->stride(i); | |
112 int rows = frame->rows(i); | |
113 for (int j = 0; j < rows; ++j) { | |
114 memcpy(dst, src, row_bytes); | |
115 dst += row_bytes; | |
116 src += src_stride; | |
117 } | |
118 } | |
119 | |
120 SendEnqueueBufferMessageToPlugin(index); | 227 SendEnqueueBufferMessageToPlugin(index); |
121 } | 228 } |
122 | 229 |
123 void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() { | 230 void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() { |
124 if (!connected_) { | 231 if (!connected_) { |
125 MediaStreamVideoSink::AddToVideoTrack(this, track_); | 232 MediaStreamVideoSink::AddToVideoTrack(this, track_); |
126 connected_ = true; | 233 connected_ = true; |
127 } | 234 } |
128 } | 235 } |
129 | 236 |
237 int32_t PepperMediaStreamVideoTrackHost::OnResourceMessageReceived( | |
238 const IPC::Message& msg, | |
239 HostMessageContext* context) { | |
240 IPC_BEGIN_MESSAGE_MAP(PepperMediaStreamVideoTrackHost, msg) | |
241 PPAPI_DISPATCH_HOST_RESOURCE_CALL( | |
242 PpapiHostMsg_MediaStreamVideoTrack_Configure, | |
243 OnHostMsgConfigure) | |
244 IPC_END_MESSAGE_MAP() | |
245 return PepperMediaStreamTrackHostBase::OnResourceMessageReceived(msg, | |
246 context); | |
247 } | |
248 | |
249 int32_t PepperMediaStreamVideoTrackHost::OnHostMsgConfigure( | |
yzshen1
2014/02/13 19:23:26
The API definition: Do we allow Configure() to be
Peng
2014/02/13 21:28:34
I think we have to support calling it multiple tim
yzshen1
2014/02/14 18:25:43
I feel a little nervous about potential corruption
Peng
2014/02/14 20:04:49
Please check the code in PepperMediaStreamVideoTra
| |
250 HostMessageContext* context, | |
251 const MediaStreamVideoTrackShared::Attributes& attributes) { | |
252 CHECK(MediaStreamVideoTrackShared::VerifyAttributes(attributes)); | |
yzshen1
2014/02/13 19:23:26
Should we reply the incoming request with failure
Peng
2014/02/13 21:28:34
Because MediaStreamVideoTreackResource will use th
| |
253 | |
254 bool changed = false; | |
255 const uint32_t kWHMask = MediaStreamVideoTrackShared::Attributes::MASK_WIDTH | | |
256 MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT; | |
257 if (attributes.mask & kWHMask) { | |
258 gfx::Size new_size = plugin_frame_size_; | |
259 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_WIDTH) | |
260 new_size.set_width(attributes.width); | |
261 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_HEIGHT) | |
262 new_size.set_height(attributes.height); | |
263 if (ComputeSize(frame_size_, plugin_frame_size_) != | |
264 ComputeSize(frame_size_, new_size)) { | |
265 changed = true; | |
266 } | |
267 plugin_frame_size_ = new_size; | |
268 } | |
269 | |
270 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_BUFFERS) { | |
271 int32_t buffers = attributes.buffers ? | |
yzshen1
2014/02/13 19:23:26
So it is always > 0, right?
Peng
2014/02/13 21:28:34
Yes. MediaStreamVideoTrackShared::VerifyAttributes
| |
272 std::min(kMaxNumberOfBuffers, attributes.buffers) : kNumberOfBuffers; | |
273 if (buffers != buffers_) { | |
274 buffers_ = buffers; | |
275 changed = true; | |
276 } | |
277 } | |
278 | |
279 if (attributes.mask & MediaStreamVideoTrackShared::Attributes::MASK_FORMAT) { | |
280 if (plugin_frame_format_ != attributes.format) { | |
281 PP_VideoFrame_Format original_format = ComputeFormat( | |
282 frame_format_, plugin_frame_format_); | |
283 PP_VideoFrame_Format new_format = ComputeFormat( | |
284 frame_format_, attributes.format); | |
285 if (new_format != original_format) | |
286 changed = true; | |
287 plugin_frame_format_ = attributes.format; | |
288 } | |
289 } | |
290 | |
291 // If the first frame has been received, we will re init buffers with | |
yzshen1
2014/02/13 19:23:26
re init -> re-initialize
Peng
2014/02/13 21:28:34
Done.
| |
292 // new settings. Otherwise, we will initialize buffer when we receive | |
293 // the first frame, because plugin can only provide part of attributes | |
294 // which are not enough to initialize buffers. | |
295 if (changed && !frame_size_.IsEmpty()) { | |
yzshen1
2014/02/13 19:23:26
nit: no need to have {}
Peng
2014/02/13 21:28:34
Done.
| |
296 InitBuffers(); | |
297 } | |
298 | |
299 context->reply_msg = PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply(); | |
300 return PP_OK; | |
301 } | |
302 | |
130 } // namespace content | 303 } // namespace content |
OLD | NEW |