Chromium Code Reviews| Index: src/gpu/gl/GrGLCaps.cpp |
| diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp |
| index 4467ef9a8bd45f5e682d57a746bda93ddb78a74c..09309c11418cfe15ee77ee6ed68258bddb4d6e48 100644 |
| --- a/src/gpu/gl/GrGLCaps.cpp |
| +++ b/src/gpu/gl/GrGLCaps.cpp |
| @@ -646,7 +646,9 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig rtConfig, |
| GrPixelConfig readConfig, |
| std::function<void (GrGLenum, GrGLint*)> getIntegerv, |
| std::function<bool ()> bindRenderTarget) const { |
| - SkASSERT(this->isConfigRenderable(rtConfig, false)); |
| + if (!this->isConfigRenderable(rtConfig, false)) { |
|
egdaniel
2016/01/25 21:28:24
what is the case where this is not assertable any
bsalomon
2016/01/25 21:45:17
The old code was more brittle. It assumed that the
|
| + return false; |
| + } |
| GrGLenum readFormat; |
| GrGLenum readType; |
| @@ -655,7 +657,22 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig rtConfig, |
| } |
| if (kGL_GrGLStandard == fStandard) { |
| - // All of our renderable configs can be converted to each other by glReadPixels in OpenGL. |
| + // Some OpenGL implementations allow GL_ALPHA as a format to glReadPixels. However, |
| + // the manual (https://www.opengl.org/sdk/docs/man/) says only these formats are allowed: |
| + // GL_STENCIL_INDEX, GL_DEPTH_COMPONENT, GL_DEPTH_STENCIL, GL_RED, GL_GREEN, GL_BLUE, |
| + // GL_RGB, GL_BGR, GL_RGBA, and GL_BGRA. We check for the subset that we would use. |
| + if (readFormat != GR_GL_RED && readFormat != GR_GL_RGB && readFormat != GR_GL_RGBA && |
| + readFormat != GR_GL_BGRA) { |
| + return false; |
| + } |
| + // There is also a set of allowed types, but all the types we use are in the set: |
| + // GL_UNSIGNED_BYTE, GL_BYTE, GL_UNSIGNED_SHORT, GL_SHORT, GL_UNSIGNED_INT, GL_INT, |
| + // GL_HALF_FLOAT, GL_FLOAT, GL_UNSIGNED_BYTE_3_3_2, GL_UNSIGNED_BYTE_2_3_3_REV, |
| + // GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_SHORT_5_6_5_REV, GL_UNSIGNED_SHORT_4_4_4_4, |
| + // GL_UNSIGNED_SHORT_4_4_4_4_REV, GL_UNSIGNED_SHORT_5_5_5_1, GL_UNSIGNED_SHORT_1_5_5_5_REV, |
| + // GL_UNSIGNED_INT_8_8_8_8, GL_UNSIGNED_INT_8_8_8_8_REV,GL_UNSIGNED_INT_10_10_10_2, |
| + // GL_UNSIGNED_INT_2_10_10_10_REV, GL_UNSIGNED_INT_24_8, GL_UNSIGNED_INT_10F_11F_11F_REV, |
| + // GL_UNSIGNED_INT_5_9_9_9_REV, or GL_FLOAT_32_UNSIGNED_INT_24_8_REV. |
| return true; |
| } |
| @@ -1073,6 +1090,17 @@ bool GrGLCaps::getExternalFormat(GrPixelConfig surfaceConfig, GrPixelConfig memo |
| *externalFormat = fConfigTable[memoryConfig].fFormats.fExternalFormat[usage]; |
| *externalType = fConfigTable[memoryConfig].fFormats.fExternalType; |
| + // When GL_RED is supported as a texture format, our alpha-only textures are stored using |
| + // GL_RED and we swizzle in order to map all components to 'r'. However, in this case the |
| + // surface is not alpha-only and we want alpha to really mean the alpha component of the |
| + // texture, not the red component. |
| + if (memoryIsAlphaOnly && !surfaceIsAlphaOnly) { |
| + if (this->textureRedSupport()) { |
| + SkASSERT(GR_GL_RED == *externalFormat); |
| + *externalFormat = GR_GL_ALPHA; |
| + } |
| + } |
| + |
| return true; |
| } |