OLD | NEW |
---|---|
1 // Copyright 2011 The Chromium Authors. All rights reserved. | 1 // Copyright 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/video_layer_impl.h" | 5 #include "cc/video_layer_impl.h" |
6 | 6 |
7 #include "base/logging.h" | 7 #include "base/logging.h" |
8 #include "cc/io_surface_draw_quad.h" | 8 #include "cc/io_surface_draw_quad.h" |
9 #include "cc/layer_tree_impl.h" | 9 #include "cc/layer_tree_impl.h" |
10 #include "cc/math_util.h" | 10 #include "cc/math_util.h" |
11 #include "cc/quad_sink.h" | 11 #include "cc/quad_sink.h" |
12 #include "cc/renderer.h" | 12 #include "cc/renderer.h" |
13 #include "cc/resource_provider.h" | 13 #include "cc/resource_provider.h" |
14 #include "cc/stream_video_draw_quad.h" | 14 #include "cc/stream_video_draw_quad.h" |
15 #include "cc/texture_draw_quad.h" | 15 #include "cc/texture_draw_quad.h" |
16 #include "cc/video_frame_provider_client_impl.h" | 16 #include "cc/video_frame_provider_client_impl.h" |
17 #include "cc/yuv_video_draw_quad.h" | 17 #include "cc/yuv_video_draw_quad.h" |
18 #include "cc/yuva_video_draw_quad.h" | |
18 #include "gpu/GLES2/gl2extchromium.h" | 19 #include "gpu/GLES2/gl2extchromium.h" |
19 #include "media/filters/skcanvas_video_renderer.h" | 20 #include "media/filters/skcanvas_video_renderer.h" |
20 #include "third_party/khronos/GLES2/gl2.h" | 21 #include "third_party/khronos/GLES2/gl2.h" |
21 #include "third_party/khronos/GLES2/gl2ext.h" | 22 #include "third_party/khronos/GLES2/gl2ext.h" |
22 | 23 |
23 namespace cc { | 24 namespace cc { |
24 | 25 |
25 // static | 26 // static |
26 scoped_ptr<VideoLayerImpl> VideoLayerImpl::create(LayerTreeImpl* treeImpl, int i d, VideoFrameProvider* provider) | 27 scoped_ptr<VideoLayerImpl> VideoLayerImpl::create(LayerTreeImpl* treeImpl, int i d, VideoFrameProvider* provider) |
27 { | 28 { |
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
79 { | 80 { |
80 m_providerClientImpl->set_active_video_layer(this); | 81 m_providerClientImpl->set_active_video_layer(this); |
81 } | 82 } |
82 | 83 |
83 // Convert media::VideoFrame::Format to OpenGL enum values. | 84 // Convert media::VideoFrame::Format to OpenGL enum values. |
84 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) | 85 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame) |
85 { | 86 { |
86 switch (frame.format()) { | 87 switch (frame.format()) { |
87 case media::VideoFrame::YV12: | 88 case media::VideoFrame::YV12: |
88 case media::VideoFrame::YV16: | 89 case media::VideoFrame::YV16: |
90 case media::VideoFrame::YV12A: | |
89 return GL_LUMINANCE; | 91 return GL_LUMINANCE; |
90 case media::VideoFrame::NATIVE_TEXTURE: | 92 case media::VideoFrame::NATIVE_TEXTURE: |
91 return frame.texture_target(); | 93 return frame.texture_target(); |
92 case media::VideoFrame::INVALID: | 94 case media::VideoFrame::INVALID: |
93 case media::VideoFrame::RGB32: | 95 case media::VideoFrame::RGB32: |
94 case media::VideoFrame::EMPTY: | 96 case media::VideoFrame::EMPTY: |
95 case media::VideoFrame::I420: | 97 case media::VideoFrame::I420: |
96 NOTREACHED(); | 98 NOTREACHED(); |
97 break; | 99 break; |
98 } | 100 } |
99 return GL_INVALID_VALUE; | 101 return GL_INVALID_VALUE; |
100 } | 102 } |
101 | 103 |
102 size_t VideoLayerImpl::numPlanes() const | 104 size_t VideoLayerImpl::numPlanes() const |
103 { | 105 { |
104 if (!m_frame) | 106 if (!m_frame) |
105 return 0; | 107 return 0; |
106 | 108 |
107 if (m_convertYUV) | 109 if (m_convertYUV) |
108 return 1; | 110 return 1; |
109 | 111 |
110 switch (m_frame->format()) { | 112 switch (m_frame->format()) { |
111 case media::VideoFrame::RGB32: | 113 case media::VideoFrame::RGB32: |
112 return 1; | 114 return 1; |
113 case media::VideoFrame::YV12: | 115 case media::VideoFrame::YV12: |
114 case media::VideoFrame::YV16: | 116 case media::VideoFrame::YV16: |
115 return 3; | 117 return 3; |
118 case media::VideoFrame::YV12A: | |
119 return 4; | |
116 case media::VideoFrame::INVALID: | 120 case media::VideoFrame::INVALID: |
117 case media::VideoFrame::EMPTY: | 121 case media::VideoFrame::EMPTY: |
118 case media::VideoFrame::I420: | 122 case media::VideoFrame::I420: |
119 break; | 123 break; |
120 case media::VideoFrame::NATIVE_TEXTURE: | 124 case media::VideoFrame::NATIVE_TEXTURE: |
121 return 0; | 125 return 0; |
122 } | 126 } |
123 NOTREACHED(); | 127 NOTREACHED(); |
124 return 0; | 128 return 0; |
125 } | 129 } |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
165 m_frame = 0; | 169 m_frame = 0; |
166 return; | 170 return; |
167 } | 171 } |
168 | 172 |
169 // FIXME: If we're in software compositing mode, we do the YUV -> RGB | 173 // FIXME: If we're in software compositing mode, we do the YUV -> RGB |
170 // conversion here. That involves an extra copy of each frame to a bitmap. | 174 // conversion here. That involves an extra copy of each frame to a bitmap. |
171 // Obviously, this is suboptimal and should be addressed once ubercompositor | 175 // Obviously, this is suboptimal and should be addressed once ubercompositor |
172 // starts shaping up. | 176 // starts shaping up. |
173 m_convertYUV = resourceProvider->defaultResourceType() == ResourceProvider:: Bitmap && | 177 m_convertYUV = resourceProvider->defaultResourceType() == ResourceProvider:: Bitmap && |
174 (m_frame->format() == media::VideoFrame::YV12 || | 178 (m_frame->format() == media::VideoFrame::YV12 || |
179 m_frame->format() == media::VideoFrame::YV12A || | |
175 m_frame->format() == media::VideoFrame::YV16); | 180 m_frame->format() == media::VideoFrame::YV16); |
176 | 181 |
177 if (m_convertYUV) | 182 if (m_convertYUV) |
178 m_format = GL_RGBA; | 183 m_format = GL_RGBA; |
179 | 184 |
180 if (!allocatePlaneData(resourceProvider)) { | 185 if (!allocatePlaneData(resourceProvider)) { |
181 m_providerClientImpl->PutCurrentFrame(m_frame); | 186 m_providerClientImpl->PutCurrentFrame(m_frame); |
182 m_frame = 0; | 187 m_frame = 0; |
183 return; | 188 return; |
184 } | 189 } |
(...skipping 25 matching lines...) Expand all Loading... | |
210 gfx::Size codedSize = m_frame->coded_size(); | 215 gfx::Size codedSize = m_frame->coded_size(); |
211 | 216 |
212 // pixels for macroblocked formats. | 217 // pixels for macroblocked formats. |
213 const float texWidthScale = | 218 const float texWidthScale = |
214 static_cast<float>(visibleRect.width()) / codedSize.width(); | 219 static_cast<float>(visibleRect.width()) / codedSize.width(); |
215 const float texHeightScale = | 220 const float texHeightScale = |
216 static_cast<float>(visibleRect.height()) / codedSize.height(); | 221 static_cast<float>(visibleRect.height()) / codedSize.height(); |
217 | 222 |
218 switch (m_format) { | 223 switch (m_format) { |
219 case GL_LUMINANCE: { | 224 case GL_LUMINANCE: { |
220 // YUV software decoder. | 225 if(m_frame->HasAlpha()) { |
221 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane]; | 226 // YUVA software decoder. |
222 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane]; | 227 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane] ; |
fgalligan1
2013/02/12 01:20:58
You can move yPlane, uPlave, vPlane, and texScale
vigneshv
2013/02/15 18:05:02
Done.
| |
223 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane]; | 228 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane] ; |
224 gfx::SizeF texScale(texWidthScale, texHeightScale); | 229 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane] ; |
225 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::Create(); | 230 const FramePlane& aPlane = m_framePlanes[media::VideoFrame::kAPlane] ; |
226 yuvVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, texScale, yP lane, uPlane, vPlane); | 231 gfx::SizeF texScale(texWidthScale, texHeightScale); |
227 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); | 232 scoped_ptr<YUVAVideoDrawQuad> yuvaVideoQuad = YUVAVideoDrawQuad::Cre ate(); |
233 yuvaVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, texScal e, yPlane, uPlane, vPlane, aPlane); | |
234 quadSink.append(yuvaVideoQuad.PassAs<DrawQuad>(), appendQuadsData); | |
235 } else { | |
236 // YUV software decoder. | |
237 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane] ; | |
238 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane] ; | |
239 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane] ; | |
240 gfx::SizeF texScale(texWidthScale, texHeightScale); | |
241 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::Create (); | |
242 yuvVideoQuad->SetNew(sharedQuadState, quadRect, opaqueRect, texScale , yPlane, uPlane, vPlane); | |
243 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); | |
244 } | |
245 | |
228 break; | 246 break; |
229 } | 247 } |
230 case GL_RGBA: { | 248 case GL_RGBA: { |
231 // RGBA software decoder. | 249 // RGBA software decoder. |
232 const FramePlane& plane = m_framePlanes[media::VideoFrame::kRGBPlane]; | 250 const FramePlane& plane = m_framePlanes[media::VideoFrame::kRGBPlane]; |
233 bool premultipliedAlpha = true; | 251 bool premultipliedAlpha = true; |
234 gfx::PointF uvTopLeft(0.f, 0.f); | 252 gfx::PointF uvTopLeft(0.f, 0.f); |
235 gfx::PointF uvBottomRight(texWidthScale, texHeightScale); | 253 gfx::PointF uvBottomRight(texWidthScale, texHeightScale); |
236 const float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; | 254 const float opacity[] = {1.0f, 1.0f, 1.0f, 1.0f}; |
237 bool flipped = false; | 255 bool flipped = false; |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
294 m_providerClientImpl->PutCurrentFrame(m_frame); | 312 m_providerClientImpl->PutCurrentFrame(m_frame); |
295 m_frame = 0; | 313 m_frame = 0; |
296 | 314 |
297 m_providerClientImpl->ReleaseLock(); | 315 m_providerClientImpl->ReleaseLock(); |
298 } | 316 } |
299 | 317 |
300 static gfx::Size videoFrameDimension(media::VideoFrame* frame, int plane) { | 318 static gfx::Size videoFrameDimension(media::VideoFrame* frame, int plane) { |
301 gfx::Size dimensions = frame->coded_size(); | 319 gfx::Size dimensions = frame->coded_size(); |
302 switch (frame->format()) { | 320 switch (frame->format()) { |
303 case media::VideoFrame::YV12: | 321 case media::VideoFrame::YV12: |
304 if (plane != media::VideoFrame::kYPlane) { | 322 case media::VideoFrame::YV12A: |
323 if (plane != media::VideoFrame::kYPlane && plane != media::VideoFrame:: kAPlane) { | |
305 dimensions.set_width(dimensions.width() / 2); | 324 dimensions.set_width(dimensions.width() / 2); |
306 dimensions.set_height(dimensions.height() / 2); | 325 dimensions.set_height(dimensions.height() / 2); |
307 } | 326 } |
308 break; | 327 break; |
309 case media::VideoFrame::YV16: | 328 case media::VideoFrame::YV16: |
310 if (plane != media::VideoFrame::kYPlane) { | 329 if (plane != media::VideoFrame::kYPlane) { |
311 dimensions.set_width(dimensions.width() / 2); | 330 dimensions.set_width(dimensions.width() / 2); |
312 } | 331 } |
313 break; | 332 break; |
314 default: | 333 default: |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
417 { | 436 { |
418 m_providerClientImpl = providerClientImpl; | 437 m_providerClientImpl = providerClientImpl; |
419 } | 438 } |
420 | 439 |
421 const char* VideoLayerImpl::layerTypeAsString() const | 440 const char* VideoLayerImpl::layerTypeAsString() const |
422 { | 441 { |
423 return "VideoLayer"; | 442 return "VideoLayer"; |
424 } | 443 } |
425 | 444 |
426 } // namespace cc | 445 } // namespace cc |
OLD | NEW |