Index: Source/modules/webgl/WebGLRenderingContextBase.cpp |
diff --git a/Source/modules/webgl/WebGLRenderingContextBase.cpp b/Source/modules/webgl/WebGLRenderingContextBase.cpp |
index c18ea0b24ac9d1e3afc0d584c14def2d679e99a1..c9b3dec3353f7d7bb3e6d17f498862f2563168ec 100644 |
--- a/Source/modules/webgl/WebGLRenderingContextBase.cpp |
+++ b/Source/modules/webgl/WebGLRenderingContextBase.cpp |
@@ -3709,35 +3709,13 @@ bool WebGLRenderingContextBase::validateReadPixelsFormatTypeCombination(GLenum f |
return true; |
} |
-void WebGLRenderingContextBase::readPixels(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, DOMArrayBufferView* pixels) |
+DOMArrayBufferView::ViewType WebGLRenderingContextBase::readPixelsExpectedArrayBufferViewType(GLenum type) |
{ |
- if (isContextLost()) |
- return; |
- // Due to WebGL's same-origin restrictions, it is not possible to |
- // taint the origin using the WebGL API. |
- ASSERT(canvas()->originClean()); |
- // Validate input parameters. |
- if (!pixels) { |
- synthesizeGLError(GL_INVALID_VALUE, "readPixels", "no destination ArrayBufferView"); |
- return; |
- } |
- if (!validateReadPixelsFormatAndType(format, type)) |
- return; |
- GLenum readBufferInternalFormat = 0, readBufferType = 0; |
- WebGLFramebuffer* readFramebufferBinding = nullptr; |
- if (!validateReadBufferAndGetInfo("readPixels", readFramebufferBinding, &readBufferInternalFormat, &readBufferType)) |
- return; |
- if (!validateReadPixelsFormatTypeCombination(format, type, readBufferInternalFormat, readBufferType)) |
- return; |
- |
DOMArrayBufferView::ViewType expectedViewType; |
Zhenyao Mo
2015/08/14 16:07:42
nit: you don't need this expectedViewType. You ca
|
switch (type) { |
case GL_UNSIGNED_BYTE: |
expectedViewType = DOMArrayBufferView::TypeUint8; |
break; |
- case GL_BYTE: |
- expectedViewType = DOMArrayBufferView::TypeInt8; |
- break; |
case GL_UNSIGNED_SHORT_5_6_5: |
case GL_UNSIGNED_SHORT_4_4_4_4: |
case GL_UNSIGNED_SHORT_5_5_5_1: |
@@ -3746,24 +3724,39 @@ void WebGLRenderingContextBase::readPixels(GLint x, GLint y, GLsizei width, GLsi |
case GL_FLOAT: |
expectedViewType = DOMArrayBufferView::TypeFloat32; |
break; |
- case GL_HALF_FLOAT: |
case GL_HALF_FLOAT_OES: |
expectedViewType = DOMArrayBufferView::TypeUint16; |
break; |
- case GL_UNSIGNED_INT: |
- case GL_UNSIGNED_INT_2_10_10_10_REV: |
- case GL_UNSIGNED_INT_10F_11F_11F_REV: |
- case GL_UNSIGNED_INT_5_9_9_9_REV: |
- expectedViewType = DOMArrayBufferView::TypeUint32; |
- break; |
- case GL_INT: |
- expectedViewType = DOMArrayBufferView::TypeInt32; |
- break; |
default: |
ASSERT_NOT_REACHED(); |
expectedViewType = DOMArrayBufferView::TypeUint8; |
break; |
} |
+ return expectedViewType; |
+} |
+ |
+void WebGLRenderingContextBase::readPixels(GLint x, GLint y, GLsizei width, GLsizei height, GLenum format, GLenum type, DOMArrayBufferView* pixels) |
+{ |
+ if (isContextLost()) |
+ return; |
+ // Due to WebGL's same-origin restrictions, it is not possible to |
+ // taint the origin using the WebGL API. |
+ ASSERT(canvas()->originClean()); |
+ // Validate input parameters. |
+ if (!pixels) { |
+ synthesizeGLError(GL_INVALID_VALUE, "readPixels", "no destination ArrayBufferView"); |
+ return; |
+ } |
+ if (!validateReadPixelsFormatAndType(format, type)) |
+ return; |
+ GLenum readBufferInternalFormat = 0, readBufferType = 0; |
+ WebGLFramebuffer* readFramebufferBinding = nullptr; |
+ if (!validateReadBufferAndGetInfo("readPixels", readFramebufferBinding, &readBufferInternalFormat, &readBufferType)) |
+ return; |
+ if (!validateReadPixelsFormatTypeCombination(format, type, readBufferInternalFormat, readBufferType)) |
+ return; |
+ |
+ DOMArrayBufferView::ViewType expectedViewType = readPixelsExpectedArrayBufferViewType(type); |
// Validate array type against pixel type. |
if (pixels->type() != expectedViewType) { |
synthesizeGLError(GL_INVALID_OPERATION, "readPixels", "ArrayBufferView was the wrong type for the pixel format"); |