| Index: third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp
|
| diff --git a/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp b/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp
|
| index 5ea30fd1c1b9789fa9c95a9f2eab2fe91a9941cb..a37ce7e649e8092861275be7425d3737ffcfa275 100644
|
| --- a/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp
|
| +++ b/third_party/WebKit/Source/modules/webgl/WebGLRenderingContextBase.cpp
|
| @@ -4395,7 +4395,7 @@ void WebGLRenderingContextBase::texImage2DCanvasByGPU(TexImageFunctionType funct
|
|
|
| if (!canvas->is3D()) {
|
| ImageBuffer* buffer = canvas->buffer();
|
| - if (!buffer->copyToPlatformTexture(webContext(), targetTexture, targetInternalformat, targetType,
|
| + if (!buffer->copyToPlatformTexture(webContext(), GL_TEXTURE_2D, targetTexture, targetInternalformat, targetType,
|
| targetLevel, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
|
| ASSERT_NOT_REACHED();
|
| }
|
| @@ -4471,30 +4471,31 @@ void WebGLRenderingContextBase::texImage2D(GLenum target, GLint level, GLenum in
|
| // Otherwise, it will fall back to the normal SW path.
|
| WebGLTexture* texture = validateTextureBinding("texImage2D", target, true);
|
| ASSERT(texture);
|
| - if (GL_TEXTURE_2D == target) {
|
| - if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level)
|
| - && video->copyVideoTextureToPlatformTexture(webContext(), texture->object(), internalformat, type, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
|
| - texture->setLevelInfo(target, level, internalformat, video->videoWidth(), video->videoHeight(), 1, type);
|
| - return;
|
| - }
|
| -
|
| - // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU.
|
| - OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight())));
|
| - if (surface->isValid()) {
|
| - OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(surface.release()));
|
| - if (imageBuffer) {
|
| - // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface,
|
| - // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it
|
| - // may still do a CPU conversion and upload the results).
|
| - video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
|
| - imageBuffer->canvas()->flush();
|
| -
|
| - // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call.
|
| - if (imageBuffer->copyToPlatformTexture(webContext(), texture->object(), internalformat, type,
|
| + if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, internalformat, type, level)
|
| + && video->copyVideoTextureToPlatformTexture(webContext(), {
|
| + WebMediaPlayer::CopyVideoTextureParams::FullCopy, target, texture->object(),
|
| + internalformat, type, level, 0, 0, m_unpackPremultiplyAlpha, m_unpackFlipY
|
| + })) {
|
| + texture->setLevelInfo(target, level, internalformat, video->videoWidth(), video->videoHeight(), 1, type);
|
| + return;
|
| + }
|
| +
|
| + // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU.
|
| + OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight())));
|
| + if (surface->isValid()) {
|
| + OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(surface.release()));
|
| + if (imageBuffer) {
|
| + // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface,
|
| + // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it
|
| + // may still do a CPU conversion and upload the results).
|
| + video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
|
| + imageBuffer->canvas()->flush();
|
| +
|
| + // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call.
|
| + if (imageBuffer->copyToPlatformTexture(webContext(), target, texture->object(), internalformat, type,
|
| level, m_unpackPremultiplyAlpha, m_unpackFlipY)) {
|
| - texture->setLevelInfo(target, level, internalformat, video->videoWidth(), video->videoHeight(), 1, type);
|
| - return;
|
| - }
|
| + texture->setLevelInfo(target, level, internalformat, video->videoWidth(), video->videoHeight(), 1, type);
|
| + return;
|
| }
|
| }
|
| }
|
| @@ -4712,6 +4713,37 @@ void WebGLRenderingContextBase::texSubImage2D(GLenum target, GLint level, GLint
|
| || !validateTexFunc("texSubImage2D", TexSubImage2D, SourceHTMLVideoElement, target, level, 0, video->videoWidth(), video->videoHeight(), 0, format, type, xoffset, yoffset))
|
| return;
|
|
|
| + // Go through the fast path doing a GPU-GPU textures copy without a readback to system memory if possible.
|
| + // Otherwise, it will fall back to the normal SW path.
|
| + WebGLTexture* texture = validateTextureBinding("texSubImage2D", target, true);
|
| + ASSERT(texture);
|
| + if (Extensions3DUtil::canUseCopyTextureCHROMIUM(target, GL_RGBA, type, level)
|
| + && video->copyVideoTextureToPlatformTexture(webContext(), {
|
| + WebMediaPlayer::CopyVideoTextureParams::SubCopy, target, texture->object(),
|
| + GL_FALSE, GL_FALSE, level, xoffset, yoffset, m_unpackPremultiplyAlpha, m_unpackFlipY
|
| + })) {
|
| + return;
|
| + }
|
| +
|
| + // Try using an accelerated image buffer, this allows YUV conversion to be done on the GPU.
|
| + OwnPtr<ImageBufferSurface> surface = adoptPtr(new AcceleratedImageBufferSurface(IntSize(video->videoWidth(), video->videoHeight())));
|
| + if (surface->isValid()) {
|
| + OwnPtr<ImageBuffer> imageBuffer(ImageBuffer::create(surface.release()));
|
| + if (imageBuffer) {
|
| + // The video element paints an RGBA frame into our surface here. By using an AcceleratedImageBufferSurface,
|
| + // we enable the WebMediaPlayer implementation to do any necessary color space conversion on the GPU (though it
|
| + // may still do a CPU conversion and upload the results).
|
| + video->paintCurrentFrame(imageBuffer->canvas(), IntRect(0, 0, video->videoWidth(), video->videoHeight()), nullptr);
|
| + imageBuffer->canvas()->flush();
|
| +
|
| + // This is a straight GPU-GPU copy, any necessary color space conversion was handled in the paintCurrentFrameInContext() call.
|
| + if (imageBuffer->copySubToPlatformTexture(webContext(), target, texture->object(), level,
|
| + xoffset, yoffset, video->videoWidth(), video->videoHeight(), m_unpackPremultiplyAlpha, m_unpackFlipY)) {
|
| + return;
|
| + }
|
| + }
|
| + }
|
| +
|
| RefPtr<Image> image = videoFrameToImage(video);
|
| if (!image)
|
| return;
|
|
|