Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(566)

Side by Side Diff: cc/video_layer_impl.cc

Issue 11269017: Plumb through cropped output size for VideoFrame (Closed) Base URL: https://git.chromium.org/git/chromium/src@git-svn
Patch Set: Created 8 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2011 The Chromium Authors. All rights reserved. 1 // Copyright 2011 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "config.h" 5 #include "config.h"
6 6
7 #include "cc/video_layer_impl.h" 7 #include "cc/video_layer_impl.h"
8 8
9 #include "NotImplemented.h" 9 #include "NotImplemented.h"
10 #include "cc/io_surface_draw_quad.h" 10 #include "cc/io_surface_draw_quad.h"
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after
115 if (!m_provider) { 115 if (!m_provider) {
116 m_frame = 0; 116 m_frame = 0;
117 return; 117 return;
118 } 118 }
119 119
120 m_frame = m_provider->getCurrentFrame(); 120 m_frame = m_provider->getCurrentFrame();
121 121
122 if (!m_frame) 122 if (!m_frame)
123 return; 123 return;
124 124
125 // If these fail, we'll have to add draw logic that handles offset bitmap/
126 // texture UVs. For now, just expect (0, 0) offset, since all our decoders
127 // so far don't offset.
128 DCHECK_EQ(m_frame->visibleRect().x, 0);
129 DCHECK_EQ(m_frame->visibleRect().y, 0);
130
125 m_format = convertVFCFormatToGC3DFormat(*m_frame); 131 m_format = convertVFCFormatToGC3DFormat(*m_frame);
126 132
127 if (m_format == GL_INVALID_VALUE) { 133 if (m_format == GL_INVALID_VALUE) {
128 m_provider->putCurrentFrame(m_frame); 134 m_provider->putCurrentFrame(m_frame);
129 m_frame = 0; 135 m_frame = 0;
130 return; 136 return;
131 } 137 }
132 138
133 if (m_frame->planes() > WebKit::WebVideoFrame::maxPlanes) { 139 if (m_frame->planes() > WebKit::WebVideoFrame::maxPlanes) {
134 m_provider->putCurrentFrame(m_frame); 140 m_provider->putCurrentFrame(m_frame);
(...skipping 24 matching lines...) Expand all
159 if (!m_frame) 165 if (!m_frame)
160 return; 166 return;
161 167
162 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState()); 168 SharedQuadState* sharedQuadState = quadSink.useSharedQuadState(createSharedQ uadState());
163 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData); 169 appendDebugBorderQuad(quadSink, sharedQuadState, appendQuadsData);
164 170
165 // FIXME: When we pass quads out of process, we need to double-buffer, or 171 // FIXME: When we pass quads out of process, we need to double-buffer, or
166 // otherwise synchonize use of all textures in the quad. 172 // otherwise synchonize use of all textures in the quad.
167 173
168 IntRect quadRect(IntPoint(), contentBounds()); 174 IntRect quadRect(IntPoint(), contentBounds());
175 WebKit::WebRect visibleRect = m_frame->visibleRect();
176 WebKit::WebSize textureSize = m_frame->textureSize();
177
178 // pixels for macroblocked formats.
179 const float texWidthScale =
180 static_cast<float>(visibleRect.width) / textureSize.width;
181 const float texHeightScale =
182 static_cast<float>(visibleRect.height) / textureSize.height;
169 183
170 switch (m_format) { 184 switch (m_format) {
171 case GL_LUMINANCE: { 185 case GL_LUMINANCE: {
172 // YUV software decoder. 186 // YUV software decoder.
173 const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane]; 187 const FramePlane& yPlane = m_framePlanes[WebKit::WebVideoFrame::yPlane];
174 const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane]; 188 const FramePlane& uPlane = m_framePlanes[WebKit::WebVideoFrame::uPlane];
175 const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane]; 189 const FramePlane& vPlane = m_framePlanes[WebKit::WebVideoFrame::vPlane];
176 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(sha redQuadState, quadRect, yPlane, uPlane, vPlane); 190 FloatSize texScale(texWidthScale, texHeightScale);
191 scoped_ptr<YUVVideoDrawQuad> yuvVideoQuad = YUVVideoDrawQuad::create(
192 sharedQuadState, quadRect, texScale, yPlane, uPlane, vPlane);
177 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 193 quadSink.append(yuvVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
178 break; 194 break;
179 } 195 }
180 case GL_RGBA: { 196 case GL_RGBA: {
181 // RGBA software decoder. 197 // RGBA software decoder.
182 const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane] ; 198 const FramePlane& plane = m_framePlanes[WebKit::WebVideoFrame::rgbPlane] ;
183 float widthScaleFactor = static_cast<float>(plane.visibleSize.width()) / plane.size.width();
184
185 bool premultipliedAlpha = true; 199 bool premultipliedAlpha = true;
186 FloatRect uvRect(0, 0, widthScaleFactor, 1); 200 FloatRect uvRect(0, 0, texWidthScale, texHeightScale);
187 bool flipped = false; 201 bool flipped = false;
188 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped); 202 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, plane.resourceId, premultipliedAlpha, uvRect, flipped);
189 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 203 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
190 break; 204 break;
191 } 205 }
192 case GL_TEXTURE_2D: { 206 case GL_TEXTURE_2D: {
193 // NativeTexture hardware decoder. 207 // NativeTexture hardware decoder.
194 bool premultipliedAlpha = true; 208 bool premultipliedAlpha = true;
195 FloatRect uvRect(0, 0, 1, 1); 209 FloatRect uvRect(0, 0, texWidthScale, texHeightScale);
196 bool flipped = false; 210 bool flipped = false;
197 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped); 211 scoped_ptr<TextureDrawQuad> textureQuad = TextureDrawQuad::create(shared QuadState, quadRect, m_externalTextureResource, premultipliedAlpha, uvRect, flip ped);
198 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData); 212 quadSink.append(textureQuad.PassAs<DrawQuad>(), appendQuadsData);
199 break; 213 break;
200 } 214 }
201 case GL_TEXTURE_RECTANGLE_ARB: { 215 case GL_TEXTURE_RECTANGLE_ARB: {
202 IntSize textureSize(m_frame->width(), m_frame->height()); 216 IntSize visibleSize(visibleRect.width, visibleRect.height);
203 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, textureSize, m_frame->textureId(), IOSurfaceDrawQuad: :Unflipped); 217 scoped_ptr<IOSurfaceDrawQuad> ioSurfaceQuad = IOSurfaceDrawQuad::create( sharedQuadState, quadRect, visibleSize, m_frame->textureId(), IOSurfaceDrawQuad: :Unflipped);
204 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData); 218 quadSink.append(ioSurfaceQuad.PassAs<DrawQuad>(), appendQuadsData);
205 break; 219 break;
206 } 220 }
207 case GL_TEXTURE_EXTERNAL_OES: { 221 case GL_TEXTURE_EXTERNAL_OES: {
208 // StreamTexture hardware decoder. 222 // StreamTexture hardware decoder.
209 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad = StreamVideoDrawQuad::c reate(sharedQuadState, quadRect, m_frame->textureId(), m_streamTextureMatrix); 223 WebKit::WebTransformationMatrix transform(m_streamTextureMatrix);
224 transform.scaleNonUniform(texWidthScale, texHeightScale);
225 scoped_ptr<StreamVideoDrawQuad> streamVideoQuad =
226 StreamVideoDrawQuad::create(sharedQuadState, quadRect,
227 m_frame->textureId(),
228 m_streamTextureMatrix);
210 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData); 229 quadSink.append(streamVideoQuad.PassAs<DrawQuad>(), appendQuadsData);
211 break; 230 break;
212 } 231 }
213 default: 232 default:
214 CRASH(); // Someone updated convertVFCFormatToGC3DFormat above but updat e this! 233 CRASH(); // Someone updated convertVFCFormatToGC3DFormat above but updat e this!
215 } 234 }
216 } 235 }
217 236
218 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider) 237 void VideoLayerImpl::didDraw(ResourceProvider* resourceProvider)
219 { 238 {
(...skipping 12 matching lines...) Expand all
232 resourceProvider->deleteResource(m_externalTextureResource); 251 resourceProvider->deleteResource(m_externalTextureResource);
233 m_externalTextureResource = 0; 252 m_externalTextureResource = 0;
234 } 253 }
235 254
236 m_provider->putCurrentFrame(m_frame); 255 m_provider->putCurrentFrame(m_frame);
237 m_frame = 0; 256 m_frame = 0;
238 257
239 m_providerLock.Release(); 258 m_providerLock.Release();
240 } 259 }
241 260
242 static int videoFrameDimension(int originalDimension, unsigned plane, int format ) 261 bool VideoLayerImpl::FramePlane::allocateData(
243 { 262 ResourceProvider* resourceProvider)
244 if (format == WebKit::WebVideoFrame::FormatYV12 && plane != WebKit::WebVideo Frame::yPlane)
245 return originalDimension / 2;
246 return originalDimension;
247 }
248
249 static bool hasPaddingBytes(const WebKit::WebVideoFrame& frame, unsigned plane)
250 {
251 return frame.stride(plane) > videoFrameDimension(frame.width(), plane, frame .format());
252 }
253
254 IntSize VideoLayerImpl::computeVisibleSize(const WebKit::WebVideoFrame& frame, u nsigned plane)
255 {
256 int visibleWidth = videoFrameDimension(frame.width(), plane, frame.format()) ;
257 int originalWidth = visibleWidth;
258 int visibleHeight = videoFrameDimension(frame.height(), plane, frame.format( ));
259
260 // When there are dead pixels at the edge of the texture, decrease
261 // the frame width by 1 to prevent the rightmost pixels from
262 // interpolating with the dead pixels.
263 if (hasPaddingBytes(frame, plane))
264 --visibleWidth;
265
266 // In YV12, every 2x2 square of Y values corresponds to one U and
267 // one V value. If we decrease the width of the UV plane, we must decrease t he
268 // width of the Y texture by 2 for proper alignment. This must happen
269 // always, even if Y's texture does not have padding bytes.
270 if (plane == WebKit::WebVideoFrame::yPlane && frame.format() == WebKit::WebV ideoFrame::FormatYV12) {
271 if (hasPaddingBytes(frame, WebKit::WebVideoFrame::uPlane))
272 visibleWidth = originalWidth - 2;
273 }
274
275 return IntSize(visibleWidth, visibleHeight);
276 }
277
278 bool VideoLayerImpl::FramePlane::allocateData(ResourceProvider* resourceProvider )
279 { 263 {
280 if (resourceId) 264 if (resourceId)
281 return true; 265 return true;
282 266
283 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny); 267 resourceId = resourceProvider->createResource(Renderer::ImplPool, size, form at, ResourceProvider::TextureUsageAny);
284 return resourceId; 268 return resourceId;
285 } 269 }
286 270
287 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider) 271 void VideoLayerImpl::FramePlane::freeData(ResourceProvider* resourceProvider)
288 { 272 {
289 if (!resourceId) 273 if (!resourceId)
290 return; 274 return;
291 275
292 resourceProvider->deleteResource(resourceId); 276 resourceProvider->deleteResource(resourceId);
293 resourceId = 0; 277 resourceId = 0;
294 } 278 }
295 279
296 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider) 280 bool VideoLayerImpl::allocatePlaneData(ResourceProvider* resourceProvider)
297 { 281 {
282 WebKit::WebSize textureSize = m_frame->textureSize();
298 int maxTextureSize = resourceProvider->maxTextureSize(); 283 int maxTextureSize = resourceProvider->maxTextureSize();
299 for (unsigned planeIndex = 0; planeIndex < m_frame->planes(); ++planeIndex) { 284 for (unsigned planeIdx = 0; planeIdx < m_frame->planes(); ++planeIdx) {
300 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIndex]; 285 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIdx];
301 286
302 IntSize requiredTextureSize(m_frame->stride(planeIndex), videoFrameDimen sion(m_frame->height(), planeIndex, m_frame->format())); 287 IntSize requiredTextureSize(textureSize.width, textureSize.height);
303 // FIXME: Remove the test against maxTextureSize when tiled layers are i mplemented. 288 // FIXME: Remove the test against maxTextureSize when tiled layers are
304 if (requiredTextureSize.isZero() || requiredTextureSize.width() > maxTex tureSize || requiredTextureSize.height() > maxTextureSize) 289 // implemented.
290 if (requiredTextureSize.isZero() ||
291 requiredTextureSize.width() > maxTextureSize ||
292 requiredTextureSize.height() > maxTextureSize)
305 return false; 293 return false;
306 294
307 if (plane.size != requiredTextureSize || plane.format != m_format) { 295 if (plane.size != requiredTextureSize || plane.format != m_format) {
308 plane.freeData(resourceProvider); 296 plane.freeData(resourceProvider);
309 plane.size = requiredTextureSize; 297 plane.size = requiredTextureSize;
310 plane.format = m_format; 298 plane.format = m_format;
311 } 299 }
312 300
313 if (!plane.resourceId) { 301 if (!plane.allocateData(resourceProvider))
314 if (!plane.allocateData(resourceProvider)) 302 return false;
315 return false;
316 plane.visibleSize = computeVisibleSize(*m_frame, planeIndex);
317 }
318 } 303 }
319 return true; 304 return true;
320 } 305 }
321 306
322 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider) 307 bool VideoLayerImpl::copyPlaneData(ResourceProvider* resourceProvider)
323 { 308 {
324 size_t softwarePlaneCount = m_frame->planes(); 309 WebKit::WebSize textureSize = m_frame->textureSize();
325 if (!softwarePlaneCount) 310 for (unsigned planeIdx = 0; planeIdx < m_frame->planes(); ++planeIdx) {
326 return true; 311 VideoLayerImpl::FramePlane& plane = m_framePlanes[planeIdx];
312 const uint8_t* planePixels = static_cast<const uint8_t*>(m_frame->data(p laneIdx));
327 313
328 for (size_t softwarePlaneIndex = 0; softwarePlaneIndex < softwarePlaneCount; ++softwarePlaneIndex) { 314 // Only non-FormatNativeTexture planes should need upload.
329 VideoLayerImpl::FramePlane& plane = m_framePlanes[softwarePlaneIndex]; 315 DCHECK_EQ(plane.format, GL_LUMINANCE);
330 const uint8_t* softwarePlanePixels = static_cast<const uint8_t*>(m_frame ->data(softwarePlaneIndex)); 316
331 IntRect planeRect(IntPoint(), plane.size); 317 IntRect planeRect(0, 0, plane.size.width(), plane.size.height());
332 resourceProvider->upload(plane.resourceId, softwarePlanePixels, planeRec t, planeRect, IntSize()); 318 IntRect visibleRect(0, 0, textureSize.width, textureSize.height);
319 resourceProvider->upload(plane.resourceId, planePixels, planeRect,
320 visibleRect, IntSize());
333 } 321 }
334 return true; 322 return true;
335 } 323 }
336 324
337 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider) 325 void VideoLayerImpl::freePlaneData(ResourceProvider* resourceProvider)
338 { 326 {
339 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i) 327 for (unsigned i = 0; i < WebKit::WebVideoFrame::maxPlanes; ++i)
340 m_framePlanes[i].freeData(resourceProvider); 328 m_framePlanes[i].freeData(resourceProvider);
341 } 329 }
342 330
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
378 str->append("video layer\n"); 366 str->append("video layer\n");
379 LayerImpl::dumpLayerProperties(str, indent); 367 LayerImpl::dumpLayerProperties(str, indent);
380 } 368 }
381 369
382 const char* VideoLayerImpl::layerTypeAsString() const 370 const char* VideoLayerImpl::layerTypeAsString() const
383 { 371 {
384 return "VideoLayer"; 372 return "VideoLayer";
385 } 373 }
386 374
387 } 375 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698