Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(423)

Side by Side Diff: cc/video_layer_impl.cc

Issue 11274017: Added support for YUV videos to the software compositor. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Rebase. Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « cc/video_layer_impl.h ('k') | webkit/compositor_bindings/web_video_layer_impl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 The Chromium Authors. All rights reserved. 1 // Copyright 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "config.h" 5 #include "config.h"
6 6
7 #include "cc/video_layer_impl.h" 7 #include "cc/video_layer_impl.h"
8 8
9 #include "NotImplemented.h" 9 #include "base/logging.h"
10 #include "cc/io_surface_draw_quad.h" 10 #include "cc/io_surface_draw_quad.h"
11 #include "cc/layer_tree_host_impl.h" 11 #include "cc/layer_tree_host_impl.h"
12 #include "cc/proxy.h" 12 #include "cc/proxy.h"
13 #include "cc/quad_sink.h" 13 #include "cc/quad_sink.h"
14 #include "cc/resource_provider.h" 14 #include "cc/resource_provider.h"
15 #include "cc/stream_video_draw_quad.h" 15 #include "cc/stream_video_draw_quad.h"
16 #include "cc/texture_draw_quad.h" 16 #include "cc/texture_draw_quad.h"
17 #include "cc/yuv_video_draw_quad.h" 17 #include "cc/yuv_video_draw_quad.h"
18 #include "media/filters/skcanvas_video_renderer.h"
18 #include "third_party/khronos/GLES2/gl2.h" 19 #include "third_party/khronos/GLES2/gl2.h"
19 #include "third_party/khronos/GLES2/gl2ext.h" 20 #include "third_party/khronos/GLES2/gl2ext.h"
20 #include <public/WebVideoFrame.h>
21 21
22 namespace cc { 22 namespace cc {
23 23
24 VideoLayerImpl::VideoLayerImpl(int id, WebKit::WebVideoFrameProvider* provider) 24 VideoLayerImpl::VideoLayerImpl(int id, WebKit::WebVideoFrameProvider* provider,
25 const FrameUnwrapper& unwrapper)
25 : LayerImpl(id) 26 : LayerImpl(id)
26 , m_provider(provider) 27 , m_provider(provider)
28 , m_unwrapper(unwrapper)
29 , m_webFrame(0)
27 , m_frame(0) 30 , m_frame(0)
31 , m_format(GL_INVALID_VALUE)
32 , m_convertYUV(false)
28 , m_externalTextureResource(0) 33 , m_externalTextureResource(0)
29 { 34 {
30 // This matrix is the default transformation for stream textures, and flips on the Y axis. 35 // This matrix is the default transformation for stream textures, and flips on the Y axis.
31 m_streamTextureMatrix = WebKit::WebTransformationMatrix( 36 m_streamTextureMatrix = WebKit::WebTransformationMatrix(
32 1, 0, 0, 0, 37 1, 0, 0, 0,
33 0, -1, 0, 0, 38 0, -1, 0, 0,
34 0, 0, 1, 0, 39 0, 0, 1, 0,
35 0, 1, 0, 1); 40 0, 1, 0, 1);
36 41
37 // This only happens during a commit on the compositor thread while the main 42 // This only happens during a commit on the compositor thread while the main
38 // thread is blocked. That makes this a thread-safe call to set the video 43 // thread is blocked. That makes this a thread-safe call to set the video
39 // frame provider client that does not require a lock. The same is true of 44 // frame provider client that does not require a lock. The same is true of
40 // the call in the destructor. 45 // the call in the destructor.
41 DCHECK(Proxy::isMainThreadBlocked()); 46 DCHECK(Proxy::isMainThreadBlocked());
42 m_provider->setVideoFrameProviderClient(this); 47 m_provider->setVideoFrameProviderClient(this);
43 } 48 }
44 49
45 VideoLayerImpl::~VideoLayerImpl() 50 VideoLayerImpl::~VideoLayerImpl()
46 { 51 {
47 // See comment in constructor for why this doesn't need a lock. 52 // See comment in constructor for why this doesn't need a lock.
48 DCHECK(Proxy::isMainThreadBlocked()); 53 DCHECK(Proxy::isMainThreadBlocked());
49 if (m_provider) { 54 if (m_provider) {
50 m_provider->setVideoFrameProviderClient(0); 55 m_provider->setVideoFrameProviderClient(0);
51 m_provider = 0; 56 m_provider = 0;
52 } 57 }
53 freePlaneData(layerTreeHostImpl()->resourceProvider()); 58 freePlaneData(layerTreeHostImpl()->resourceProvider());
54 59
55 #ifndef NDEBUG 60 #ifndef NDEBUG
56 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) 61 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
57 DCHECK(!m_framePlanes[i].resourceId); 62 DCHECK(!m_framePlanes[i].resourceId);
58 DCHECK(!m_externalTextureResource); 63 DCHECK(!m_externalTextureResource);
59 #endif 64 #endif
60 } 65 }
61 66
62 void VideoLayerImpl::stopUsingProvider() 67 void VideoLayerImpl::stopUsingProvider()
63 { 68 {
64 // Block the provider from shutting down until this client is done 69 // Block the provider from shutting down until this client is done
65 // using the frame. 70 // using the frame.
66 base::AutoLock locker(m_providerLock); 71 base::AutoLock locker(m_providerLock);
67 DCHECK(!m_frame); 72 DCHECK(!m_frame);
68 m_provider = 0; 73 m_provider = 0;
69 } 74 }
70 75
71 // Convert WebKit::WebVideoFrame::Format to GraphicsContext3D's format enum valu es. 76 // Convert media::VideoFrame::Format to OpenGL enum values.
72 static GLenum convertVFCFormatToGC3DFormat(const WebKit::WebVideoFrame& frame) 77 static GLenum convertVFCFormatToGLenum(const media::VideoFrame& frame)
73 { 78 {
74 switch (frame.format()) { 79 switch (frame.format()) {
75 case WebKit::WebVideoFrame::FormatYV12: 80 case media::VideoFrame::YV12:
76 case WebKit::WebVideoFrame::FormatYV16: 81 case media::VideoFrame::YV16:
77 return GL_LUMINANCE; 82 return GL_LUMINANCE;
78 case WebKit::WebVideoFrame::FormatNativeTexture: 83 case media::VideoFrame::NATIVE_TEXTURE:
79 return frame.textureTarget(); 84 return frame.texture_target();
80 case WebKit::WebVideoFrame::FormatInvalid: 85 case media::VideoFrame::INVALID:
81 case WebKit::WebVideoFrame::FormatRGB32: 86 case media::VideoFrame::RGB32:
82 case WebKit::WebVideoFrame::FormatEmpty: 87 case media::VideoFrame::EMPTY:
83 case WebKit::WebVideoFrame::FormatI420: 88 case media::VideoFrame::I420:
84 notImplemented(); 89 NOTREACHED();
90 break;
85 } 91 }
86 return GL_INVALID_VALUE; 92 return GL_INVALID_VALUE;
87 } 93 }
88 94
95 size_t VideoLayerImpl::numPlanes() const
96 {
97 if (!m_frame)
98 return 0;
99
100 if (m_convertYUV)
101 return 1;
102
103 switch (m_frame->format()) {
104 case media::VideoFrame::RGB32:
105 return 1;
106 case media::VideoFrame::YV12:
107 case media::VideoFrame::YV16:
108 return 3;
109 case media::VideoFrame::INVALID:
110 case media::VideoFrame::EMPTY:
111 case media::VideoFrame::I420:
112 break;
113 case media::VideoFrame::NATIVE_TEXTURE:
114 return 0;
115 }
116 NOTREACHED();
117 return 0;
118 }
119
89 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider) 120 void VideoLayerImpl::willDraw(ResourceProvider* resourceProvider)
90 { 121 {
91 DCHECK(Proxy::isImplThread()); 122 DCHECK(Proxy::isImplThread());
92 LayerImpl::willDraw(resourceProvider); 123 LayerImpl::willDraw(resourceProvider);
93 124
94 // Explicitly acquire and release the provider mutex so it can be held from 125 // Explicitly acquire and release the provider mutex so it can be held from
95 // willDraw to didDraw. Since the compositor thread is in the middle of 126 // willDraw to didDraw. Since the compositor thread is in the middle of
96 // drawing, the layer will not be destroyed before didDraw is called. 127 // drawing, the layer will not be destroyed before didDraw is called.
97 // Therefore, the only thing that will prevent this lock from being released 128 // Therefore, the only thing that will prevent this lock from being released
98 // is the GPU process locking it. As the GPU process can't cause the 129 // is the GPU process locking it. As the GPU process can't cause the
(...skipping 11 matching lines...) Expand all
110 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider) 141 void VideoLayerImpl::willDrawInternal(ResourceProvider* resourceProvider)
111 { 142 {
112 DCHECK(Proxy::isImplThread()); 143 DCHECK(Proxy::isImplThread());
113 DCHECK(!m_externalTextureResource); 144 DCHECK(!m_externalTextureResource);
114 145
115 if (!m_provider) { 146 if (!m_provider) {
116 m_frame = 0; 147 m_frame = 0;
117 return; 148 return;
118 } 149 }
119 150
120 m_frame = m_provider->getCurrentFrame(); 151 m_webFrame = m_provider->getCurrentFrame();
152 m_frame = m_unwrapper.Run(m_webFrame);
121 153
122 if (!m_frame) 154 if (!m_frame)
123 return; 155 return;
124 156
125 m_format = convertVFCFormatToGC3DFormat(*m_frame); 157 m_format = convertVFCFormatToGLenum(*m_frame);
126 158
127 if (m_format == GL_INVALID_VALUE) { 159 if (m_format == GL_INVALID_VALUE) {
128 m_provider->putCurrentFrame(m_frame); 160 m_provider->putCurrentFrame(m_webFrame);
129 m_frame = 0; 161 m_frame = 0;
130 return; 162 return;
131 } 163 }
132 164
133 if (m_frame->planes() > WebKit::WebVideoFrame::maxPlanes) { 165 // FIXME: If we're in software compositing mode, we do the YUV -> RGB
134 m_provider->putCurrentFrame(m_frame); 166 // conversion here. That involves an extra copy of each frame to a bitmap.
135 m_frame = 0; 167 // Obviously, this is suboptimal and should be addressed once ubercompositor
136 return; 168 // starts shaping up.
137 } 169 m_convertYUV = resourceProvider->defaultResourceType() == ResourceProvider:: Bitmap &&
170 (m_frame->format() == media::VideoFrame::YV12 ||
171 m_frame->format() == media::VideoFrame::YV16);
172
173 if (m_convertYUV)
174 m_format = GL_RGBA;
138 175
139 if (!allocatePlaneData(resourceProvider)) { 176 if (!allocatePlaneData(resourceProvider)) {
140 m_provider->putCurrentFrame(m_frame); 177 m_provider->putCurrentFrame(m_webFrame);
141 m_frame = 0; 178 m_frame = 0;
142 return; 179 return;
143 } 180 }
144 181
145 if (!copyPlaneData(resourceProvider)) { 182 if (!copyPlaneData(resourceProvider)) {
146 m_provider->putCurrentFrame(m_frame); 183 m_provider->putCurrentFrame(m_webFrame);
147 m_frame = 0; 184 m_frame = 0;
148 return; 185 return;
149 } 186 }
150 187
151 if (m_format == GL_TEXTURE_2D) 188 if (m_format == GL_TEXTURE_2D)
152 m_externalTextureResource = resourceProvider->createResourceFromExternal Texture(m_frame->textureId()); 189 m_externalTextureResource = resourceProvider->createResourceFromExternal Texture(m_frame->texture_id());
153 } 190 }
154 191
155 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad sData) 192 void VideoLayerImpl::appendQuads(QuadSink& quadSink, AppendQuadsData& appendQuad sData)
156 { 193 {
157 DCHECK(Proxy::isImplThread()); 194 DCHECK(Proxy::isImplThread());
158 195
159 if (!m_frame) 196 if (!m_frame)
160 return; 197 return;
161 198
162 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState()); 199 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState());
163 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData); 200 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData);
164 201
165 // FIXME: When we pass quads out of process, we need to double-buffer, or 202 // FIXME: When we pass quads out of process, we need to double-buffer, or
166 // otherwise synchonize use of all textures in the quad. 203 // otherwise synchonize use of all textures in the quad.
167 204
168 IntRect quadRect(IntPoint(), contentBounds()); 205 gfx::Rect quadRect(contentBounds());
169 206
170 switch (m_format) { 207 switch (m_format) {
171 case GL_LUMINANCE: { 208 case GL_LUMINANCE: {
172 // YUV software decoder. 209 // YUV software decoder.
173 const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane]; 210 const FramePlane& yPlane = m_framePlanes[media::VideoFrame::kYPlane];
174 const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane]; 211 const FramePlane& uPlane = m_framePlanes[media::VideoFrame::kUPlane];
175 const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane]; 212 const FramePlane& vPlane = m_framePlanes[media::VideoFrame::kVPlane];
176 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sha redQuadState, quadRect, yPlane, uPlane, vPlane); 213 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sha redQuadState, quadRect, yPlane, uPlane, vPlane);
177 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 214 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
178 break; 215 break;
179 } 216 }
180 case GL_RGBA: { 217 case GL_RGBA: {
181 // RGBA software decoder. 218 // RGBA software decoder.
182 const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane] ; 219 const FramePlane& plane = m_framePlanes[media::VideoFrame::kRGBPlane];
220 bool premultipliedAlpha = true;
183 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width(); 221 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width();
184 222 gfx::RectF uvRect(widthScaleFactor, 1);
185 bool premultipliedAlpha = true;
186 FloatRect uvRect(0, 0, widthScaleFactor, 1);
187 bool flipped = false; 223 bool flipped = false;
188 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped); 224 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped);
189 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 225 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
190 break; 226 break;
191 } 227 }
192 case GL_TEXTURE_2D: { 228 case GL_TEXTURE_2D: {
193 // NativeTexture hardware decoder. 229 // NativeTexture hardware decoder.
194 bool premultipliedAlpha = true; 230 bool premultipliedAlpha = true;
195 FloatRect uvRect(0, 0, 1, 1); 231 gfx::RectF uvRect(1, 1);
196 bool flipped = false; 232 bool flipped = false;
197 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped); 233 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped);
198 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 234 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
199 break; 235 break;
200 } 236 }
201 case GL_TEXTURE_RECTANGLE_ARB: { 237 case GL_TEXTURE_RECTANGLE_ARB: {
202 IntSize textureSize(m_frame->width(), m_frame->height()); 238 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, m_frame->data_size(), m_frame->texture_id(), IOSurfac eDrawQuad::Unflipped);
203 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, textureSize, m_frame->textureId(), IOSurfaceDrawQuad: :Unflipped);
204 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); 239 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData);
205 break; 240 break;
206 } 241 }
207 case GL_TEXTURE_EXTERNAL_OES: { 242 case GL_TEXTURE_EXTERNAL_OES: {
208 // StreamTexture hardware decoder. 243 // StreamTexture hardware decoder.
209 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::c reate(sharedQuadState, quadRect, m_frame->textureId(), m_streamTextureMatrix); 244 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::c reate(sharedQuadState, quadRect, m_frame->texture_id(), m_streamTextureMatrix);
210 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 245 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
211 break; 246 break;
212 } 247 }
213 default: 248 default:
214 CRASH(); // Someone updated convertVFCFormatToGC3DFormat above but updat e this! 249 NOTREACHED(); // Someone updated convertVFCFormatToGLenum above but upd ate this!
250 break;
215 } 251 }
216 } 252 }
217 253
218 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) 254 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider)
219 { 255 {
220 DCHECK(Proxy::isImplThread()); 256 DCHECK(Proxy::isImplThread());
221 LayerImpl::didDraw(resourceProvider); 257 LayerImpl::didDraw(resourceProvider);
222 258
223 if (!m_frame) 259 if (!m_frame)
224 return; 260 return;
225 261
226 if (m_format == GL_TEXTURE_2D) { 262 if (m_format == GL_TEXTURE_2D) {
227 DCHECK(m_externalTextureResource); 263 DCHECK(m_externalTextureResource);
228 // FIXME: the following assert will not be true when sending resources t o a 264 // FIXME: the following assert will not be true when sending resources t o a
229 // parent compositor. We will probably need to hold on to m_frame for 265 // parent compositor. We will probably need to hold on to m_frame for
230 // longer, and have several "current frames" in the pipeline. 266 // longer, and have several "current frames" in the pipeline.
231 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource)); 267 DCHECK(!resourceProvider->inUseByConsumer(m_externalTextureResource));
232 resourceProvider->deleteResource(m_externalTextureResource); 268 resourceProvider->deleteResource(m_externalTextureResource);
233 m_externalTextureResource = 0; 269 m_externalTextureResource = 0;
234 } 270 }
235 271
236 m_provider->putCurrentFrame(m_frame); 272 m_provider->putCurrentFrame(m_webFrame);
237 m_frame = 0; 273 m_frame = 0;
238 274
239 m_providerLock.Release(); 275 m_providerLock.Release();
240 } 276 }
241 277
242 static int videoFrameDimension(int originalDimension, unsigned plane, int format ) 278 static int videoFrameDimension(int originalDimension, size_t plane, int format)
243 { 279 {
244 if (format == WebKit::WebVideoFrame::FormatYV12 && plane != WebKit::WebVideo Frame::yPlane) 280 if (format == media::VideoFrame::YV12 && plane != media::VideoFrame::kYPlane )
245 return originalDimension / 2; 281 return originalDimension / 2;
246 return originalDimension; 282 return originalDimension;
247 } 283 }
248 284
249 static bool hasPaddingBytes(const WebKit::WebVideoFrame& frame, unsigned plane) 285 static bool hasPaddingBytes(const media::VideoFrame& frame, size_t plane)
250 { 286 {
251 return frame.stride(plane) > videoFrameDimension(frame.width(), plane, frame .format()); 287 return frame.stride(plane) > videoFrameDimension(frame.data_size().width(), plane, frame.format());
252 } 288 }
253 289
254 IntSize VideoLayerImpl::computeVisibleSize(const WebKit::WebVideoFrame& frame, u nsigned plane) 290 IntSize computeVisibleSize(const media::VideoFrame& frame, size_t plane)
255 { 291 {
256 int visibleWidth = videoFrameDimension(frame.width(), plane, frame.format()) ; 292 int visibleWidth = videoFrameDimension(frame.data_size().width(), plane, fra me.format());
257 int originalWidth = visibleWidth; 293 int originalWidth = visibleWidth;
258 int visibleHeight = videoFrameDimension(frame.height(), plane, frame.format( )); 294 int visibleHeight = videoFrameDimension(frame.data_size().height(), plane, f rame.format());
259 295
260 // When there are dead pixels at the edge of the texture, decrease 296 // When there are dead pixels at the edge of the texture, decrease
261 // the frame width by 1 to prevent the rightmost pixels from 297 // the frame width by 1 to prevent the rightmost pixels from
262 // interpolating with the dead pixels. 298 // interpolating with the dead pixels.
263 if (hasPaddingBytes(frame, plane)) 299 if (hasPaddingBytes(frame, plane))
264 --visibleWidth; 300 --visibleWidth;
265 301
266 // In YV12, every 2x2 square of Y values corresponds to one U and 302 // In YV12, every 2x2 square of Y values corresponds to one U and
267 // one V value. If we decrease the width of the UV plane, we must decrease t he 303 // one V value. If we decrease the width of the UV plane, we must decrease t he
268 // width of the Y texture by 2 for proper alignment. This must happen 304 // width of the Y texture by 2 for proper alignment. This must happen
269 // always, even if Y's texture does not have padding bytes. 305 // always, even if Y's texture does not have padding bytes.
270 if (plane == WebKit::WebVideoFrame::yPlane && frame.format() == WebKit::WebV ideoFrame::FormatYV12) { 306 if (plane == media::VideoFrame::kYPlane && frame.format() == media::VideoFra me::YV12) {
271 if (hasPaddingBytes(frame, WebKit::WebVideoFrame::uPlane)) 307 if (hasPaddingBytes(frame, media::VideoFrame::kUPlane))
272 visibleWidth = originalWidth - 2; 308 visibleWidth = originalWidth - 2;
273 } 309 }
274 310
275 return IntSize(visibleWidth, visibleHeight); 311 return IntSize(visibleWidth, visibleHeight);
276 } 312 }
277 313
278 bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider ) 314 bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider )
279 { 315 {
280 if (resourceId) 316 if (resourceId)
281 return true; 317 return true;
282 318
283 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny); 319 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny);
284 return resourceId; 320 return resourceId;
285 } 321 }
286 322
287 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider) 323 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider)
288 { 324 {
289 if (!resourceId) 325 if (!resourceId)
290 return; 326 return;
291 327
292 resourceProvider->deleteResource(resourceId); 328 resourceProvider->deleteResource(resourceId);
293 resourceId = 0; 329 resourceId = 0;
294 } 330 }
295 331
296 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider) 332 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider)
297 { 333 {
298 int maxTextureSize = resourceProvider->maxTextureSize(); 334 const int maxTextureSize = resourceProvider->maxTextureSize();
299 for (unsigned planeIndex = 0; planeIndex < m_frame->planes(); ++planeIndex) { 335 const size_t planeCount = numPlanes();
336 for (size_t planeIndex = 0; planeIndex < planeCount; ++planeIndex) {
300 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex]; 337 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex];
301 338
302 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen sion(m_frame->height(), planeIndex, m_frame->format())); 339 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen sion(m_frame->data_size().height(), planeIndex, m_frame->format()));
303 // FIXME: Remove the test against maxTextureSize when tiled layers are i mplemented. 340 // FIXME: Remove the test against maxTextureSize when tiled layers are i mplemented.
304 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex tureSize || requiredTextureSize.height() > maxTextureSize) 341 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex tureSize || requiredTextureSize.height() > maxTextureSize)
305 return false; 342 return false;
306 343
307 if (plane.size != requiredTextureSize || plane.format != m_format) { 344 if (plane.size != requiredTextureSize || plane.format != m_format) {
308 plane.freeData(resourceProvider); 345 plane.freeData(resourceProvider);
309 plane.size = requiredTextureSize; 346 plane.size = requiredTextureSize;
310 plane.format = m_format; 347 plane.format = m_format;
311 } 348 }
312 349
313 if (!plane.resourceId) { 350 if (!plane.resourceId) {
314 if (!plane.allocateData(resourceProvider)) 351 if (!plane.allocateData(resourceProvider))
315 return false; 352 return false;
316 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex); 353 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex);
317 } 354 }
318 } 355 }
319 return true; 356 return true;
320 } 357 }
321 358
322 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider) 359 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider)
323 { 360 {
324 size_t softwarePlaneCount = m_frame->planes(); 361 const size_t planeCount = numPlanes();
325 if (!softwarePlaneCount) 362 if (!planeCount)
326 return true; 363 return true;
327 364
328 for (size_t softwarePlaneIndex = 0; softwarePlaneIndex < softwarePlaneCount; ++softwarePlaneIndex) { 365 if (m_convertYUV) {
329 VideoLayerImpl::FramePlane& plane = m_framePlanes[softwarePlaneIndex]; 366 if (!m_videoRenderer)
330 const uint8_t* softwarePlanePixels = static_cast<const uint8_t*>(m_frame ->data(softwarePlaneIndex)); 367 m_videoRenderer.reset(new media::SkCanvasVideoRenderer);
368 VideoLayerImpl::FramePlane& plane = m_framePlanes[media::VideoFrame::kRG BPlane];
369 ResourceProvider::ScopedWriteLockSoftware lock(resourceProvider, plane.r esourceId);
370 m_videoRenderer->Paint(m_frame, lock.skCanvas(), gfx::Rect(plane.size), 0xFF);
371 return true;
372 }
373
374 for (size_t planeIndex = 0; planeIndex < planeCount; ++planeIndex) {
375 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex];
376 const uint8_t* softwarePlanePixels = m_frame->data(planeIndex);
331 IntRect planeRect(IntPoint(), plane.size); 377 IntRect planeRect(IntPoint(), plane.size);
332 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec t, planeRect, IntSize()); 378 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec t, planeRect, IntSize());
333 } 379 }
334 return true; 380 return true;
335 } 381 }
336 382
337 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider) 383 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider)
338 { 384 {
339 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) 385 for (size_t i = 0; i < media::VideoFrame::kMaxPlanes; ++i)
340 m_framePlanes[i].freeData(resourceProvider); 386 m_framePlanes[i].freeData(resourceProvider);
341 } 387 }
342 388
343 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider) 389 void VideoLayerImpl::freeUnusedPlaneData(ResourceProvider* resourceProvider)
344 { 390 {
345 unsigned firstUnusedPlane = m_frame ? m_frame->planes() : 0; 391 size_t firstUnusedPlane = numPlanes();
346 for (unsigned i = firstUnusedPlane; i < WebKit::WebVideoFrame::maxPlanes; ++ i) 392 for (size_t i = firstUnusedPlane; i < media::VideoFrame::kMaxPlanes; ++i)
347 m_framePlanes[i].freeData(resourceProvider); 393 m_framePlanes[i].freeData(resourceProvider);
348 } 394 }
349 395
350 void VideoLayerImpl::didReceiveFrame() 396 void VideoLayerImpl::didReceiveFrame()
351 { 397 {
352 setNeedsRedraw(); 398 setNeedsRedraw();
353 } 399 }
354 400
355 void VideoLayerImpl::didUpdateMatrix(const float matrix[16]) 401 void VideoLayerImpl::didUpdateMatrix(const float matrix[16])
356 { 402 {
(...skipping 21 matching lines...) Expand all
378 str->append("video layer\n"); 424 str->append("video layer\n");
379 LayerImpl::dumpLayerProperties(str, indent); 425 LayerImpl::dumpLayerProperties(str, indent);
380 } 426 }
381 427
382 const char* VideoLayerImpl::layerTypeAsString() const 428 const char* VideoLayerImpl::layerTypeAsString() const
383 { 429 {
384 return "VideoLayer"; 430 return "VideoLayer";
385 } 431 }
386 432
387 } 433 }
OLDNEW
« no previous file with comments | « cc/video_layer_impl.h ('k') | webkit/compositor_bindings/web_video_layer_impl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698