Chromium Code Reviews| Index: src/gpu/gl/GrGLCaps.cpp |
| diff --git a/src/gpu/gl/GrGLCaps.cpp b/src/gpu/gl/GrGLCaps.cpp |
| index 81d0b4483b0b77cd41cd79672d2e93b9de271987..7a4f749fbf7d3fd640843fdea4bdd6910a61404c 100644 |
| --- a/src/gpu/gl/GrGLCaps.cpp |
| +++ b/src/gpu/gl/GrGLCaps.cpp |
| @@ -643,7 +643,11 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig rtConfig, |
| GrPixelConfig readConfig, |
| std::function<void (GrGLenum, GrGLint*)> getIntegerv, |
| std::function<bool ()> bindRenderTarget) const { |
| - SkASSERT(this->isConfigRenderable(rtConfig, false)); |
| + // If it's not possible to even have a render target of rt config then read pixels is |
|
egdaniel
2016/01/25 22:02:39
make rt config rtConfig to match variable? Also of
bsalomon
2016/01/25 22:21:10
Done.
|
| + // not supported regardless of the read config. |
| + if (!this->isConfigRenderable(rtConfig, false)) { |
| + return false; |
| + } |
| GrGLenum readFormat; |
| GrGLenum readType; |
| @@ -652,7 +656,22 @@ bool GrGLCaps::readPixelsSupported(GrPixelConfig rtConfig, |
| } |
| if (kGL_GrGLStandard == fStandard) { |
| - // All of our renderable configs can be converted to each other by glReadPixels in OpenGL. |
| + // Some OpenGL implementations allow GL_ALPHA as a format to glReadPixels. However, |
| + // the manual (https://www.opengl.org/sdk/docs/man/) says only these formats are allowed: |
| + // GL_STENCIL_INDEX, GL_DEPTH_COMPONENT, GL_DEPTH_STENCIL, GL_RED, GL_GREEN, GL_BLUE, |
| + // GL_RGB, GL_BGR, GL_RGBA, and GL_BGRA. We check for the subset that we would use. |
| + if (readFormat != GR_GL_RED && readFormat != GR_GL_RGB && readFormat != GR_GL_RGBA && |
| + readFormat != GR_GL_BGRA) { |
| + return false; |
| + } |
| + // There is also a set of allowed types, but all the types we use are in the set: |
| + // GL_UNSIGNED_BYTE, GL_BYTE, GL_UNSIGNED_SHORT, GL_SHORT, GL_UNSIGNED_INT, GL_INT, |
| + // GL_HALF_FLOAT, GL_FLOAT, GL_UNSIGNED_BYTE_3_3_2, GL_UNSIGNED_BYTE_2_3_3_REV, |
| + // GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_SHORT_5_6_5_REV, GL_UNSIGNED_SHORT_4_4_4_4, |
| + // GL_UNSIGNED_SHORT_4_4_4_4_REV, GL_UNSIGNED_SHORT_5_5_5_1, GL_UNSIGNED_SHORT_1_5_5_5_REV, |
| + // GL_UNSIGNED_INT_8_8_8_8, GL_UNSIGNED_INT_8_8_8_8_REV,GL_UNSIGNED_INT_10_10_10_2, |
| + // GL_UNSIGNED_INT_2_10_10_10_REV, GL_UNSIGNED_INT_24_8, GL_UNSIGNED_INT_10F_11F_11F_REV, |
| + // GL_UNSIGNED_INT_5_9_9_9_REV, or GL_FLOAT_32_UNSIGNED_INT_24_8_REV. |
| return true; |
| } |
| @@ -1070,6 +1089,17 @@ bool GrGLCaps::getExternalFormat(GrPixelConfig surfaceConfig, GrPixelConfig memo |
| *externalFormat = fConfigTable[memoryConfig].fFormats.fExternalFormat[usage]; |
| *externalType = fConfigTable[memoryConfig].fFormats.fExternalType; |
| + // When GL_RED is supported as a texture format, our alpha-only textures are stored using |
| + // GL_RED and we swizzle in order to map all components to 'r'. However, in this case the |
| + // surface is not alpha-only and we want alpha to really mean the alpha component of the |
| + // texture, not the red component. |
| + if (memoryIsAlphaOnly && !surfaceIsAlphaOnly) { |
| + if (this->textureRedSupport()) { |
| + SkASSERT(GR_GL_RED == *externalFormat); |
| + *externalFormat = GR_GL_ALPHA; |
| + } |
| + } |
| + |
| return true; |
| } |