Index: Source/modules/webgl/WebGLRenderingContextBase.cpp |
diff --git a/Source/modules/webgl/WebGLRenderingContextBase.cpp b/Source/modules/webgl/WebGLRenderingContextBase.cpp |
index c18ea0b24ac9d1e3afc0d584c14def2d679e99a1..5885572583729d7bfbd17809612760ab69b44e66 100644 |
--- a/Source/modules/webgl/WebGLRenderingContextBase.cpp |
+++ b/Source/modules/webgl/WebGLRenderingContextBase.cpp |
@@ -3738,11 +3738,15 @@ void WebGLRenderingContextBase::readPixels(GLint x, GLint y, GLsizei width, GLsi |
case GL_BYTE: |
expectedViewType = DOMArrayBufferView::TypeInt8; |
break; |
+ case GL_UNSIGNED_SHORT: |
Zhenyao Mo
2015/08/13 15:22:01
This is incorrect. For OpenGL ES 2 / WebGL 1, it
yunchao
2015/08/14 00:07:11
Yeah. I know that WebGL 1.0 should not support UNS
|
case GL_UNSIGNED_SHORT_5_6_5: |
case GL_UNSIGNED_SHORT_4_4_4_4: |
case GL_UNSIGNED_SHORT_5_5_5_1: |
expectedViewType = DOMArrayBufferView::TypeUint16; |
break; |
+ case GL_SHORT: |
+ expectedViewType = DOMArrayBufferView::TypeInt16; |
+ break; |
case GL_FLOAT: |
expectedViewType = DOMArrayBufferView::TypeFloat32; |
break; |