Index: cc/output/gl_renderer.cc |
diff --git a/cc/output/gl_renderer.cc b/cc/output/gl_renderer.cc |
index ac68a08c62f622ed275cca567de070aa52845e20..384537771ff725ed176e4564a998b3549e195df3 100644 |
--- a/cc/output/gl_renderer.cc |
+++ b/cc/output/gl_renderer.cc |
@@ -2216,18 +2216,40 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
base::FeatureList::IsEnabled(media::kVideoColorManagement); |
DCHECK(!(use_nv12 && use_alpha_plane)); |
+ FragmentShaderYUVVideo::HighbitTexture highbit_texture = |
+ FragmentShaderYUVVideo::HIGHBIT_Y8; |
+ if (quad->bits_per_channel > 8) { |
+ // TODO(dshwang): the resource should know its format. crbug.com/624436 |
+ ResourceFormat resource_format = |
+ resource_provider_->GetResourceFormat(quad->y_plane_resource_id()); |
+ if (resource_format == LUMINANCE_F16) { |
+ highbit_texture = FragmentShaderYUVVideo::HIGHBIT_LUMINANCE_F16; |
+ } else { |
+ DCHECK(RG_88 == resource_format || RGBA_8888 == resource_format); |
+ highbit_texture = FragmentShaderYUVVideo::HIGHBIT_RG88; |
+ } |
+ } |
+ DCHECK_LE(YUVVideoDrawQuad::kMinBitsPerChannel, quad->bits_per_channel); |
+ DCHECK_LE(quad->bits_per_channel, YUVVideoDrawQuad::kMaxBitsPerChannel); |
+ GLenum filter = highbit_texture == FragmentShaderYUVVideo::HIGHBIT_RG88 |
+ ? GL_NEAREST |
+ : GL_LINEAR; |
+ |
+ // y_plane can be RG texture, so use manual bilinear shader. |
ResourceProvider::ScopedSamplerGL y_plane_lock( |
- resource_provider_, quad->y_plane_resource_id(), GL_TEXTURE1, GL_LINEAR); |
+ resource_provider_, quad->y_plane_resource_id(), GL_TEXTURE1, filter); |
ResourceProvider::ScopedSamplerGL u_plane_lock( |
- resource_provider_, quad->u_plane_resource_id(), GL_TEXTURE2, GL_LINEAR); |
+ resource_provider_, quad->u_plane_resource_id(), GL_TEXTURE2, filter); |
DCHECK_EQ(y_plane_lock.target(), u_plane_lock.target()); |
// TODO(jbauman): Use base::Optional when available. |
std::unique_ptr<ResourceProvider::ScopedSamplerGL> v_plane_lock; |
if (!use_nv12) { |
v_plane_lock.reset(new ResourceProvider::ScopedSamplerGL( |
- resource_provider_, quad->v_plane_resource_id(), GL_TEXTURE3, |
- GL_LINEAR)); |
+ resource_provider_, quad->v_plane_resource_id(), GL_TEXTURE3, filter)); |
DCHECK_EQ(y_plane_lock.target(), v_plane_lock->target()); |
+ } else { |
+ // |bits_per_channel| of PIXEL_FORMAT_NV12 is 8. |
+ DCHECK_EQ(static_cast<GLenum>(GL_LINEAR), filter); |
} |
std::unique_ptr<ResourceProvider::ScopedSamplerGL> a_plane_lock; |
if (use_alpha_plane) { |
@@ -2235,6 +2257,8 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
resource_provider_, quad->a_plane_resource_id(), GL_TEXTURE4, |
GL_LINEAR)); |
DCHECK_EQ(y_plane_lock.target(), a_plane_lock->target()); |
+ // |bits_per_channel| of PIXEL_FORMAT_YV12A is 8. |
+ DCHECK_EQ(static_cast<GLenum>(GL_LINEAR), filter); |
} |
// All planes must have the same sampler type. |
@@ -2245,6 +2269,8 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
int ya_tex_offset_location = -1; |
int uv_tex_scale_location = -1; |
int uv_tex_offset_location = -1; |
+ int ya_size_location = -1; |
+ int uv_size_location = -1; |
int ya_clamp_rect_location = -1; |
int uv_clamp_rect_location = -1; |
int y_texture_location = -1; |
@@ -2258,8 +2284,9 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
int alpha_location = -1; |
int resource_multiplier_location = -1; |
int resource_offset_location = -1; |
- const VideoYUVProgram* program = GetVideoYUVProgram( |
- tex_coord_precision, sampler, use_alpha_plane, use_nv12, use_color_lut); |
+ const VideoYUVProgram* program = |
+ GetVideoYUVProgram(tex_coord_precision, sampler, use_alpha_plane, |
+ use_nv12, use_color_lut, highbit_texture); |
DCHECK(program && (program->initialized() || IsContextLost())); |
SetUseProgram(program->program()); |
matrix_location = program->vertex_shader().matrix_location(); |
@@ -2275,6 +2302,8 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
lut_texture_location = program->fragment_shader().lut_texture_location(); |
yuv_matrix_location = program->fragment_shader().yuv_matrix_location(); |
yuv_adj_location = program->fragment_shader().yuv_adj_location(); |
+ ya_size_location = program->fragment_shader().ya_size_location(); |
+ uv_size_location = program->fragment_shader().uv_size_location(); |
ya_clamp_rect_location = program->fragment_shader().ya_clamp_rect_location(); |
uv_clamp_rect_location = program->fragment_shader().uv_clamp_rect_location(); |
alpha_location = program->fragment_shader().alpha_location(); |
@@ -2344,83 +2373,115 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
if (use_alpha_plane) |
gl_->Uniform1i(a_texture_location, 4); |
- // These values are magic numbers that are used in the transformation from YUV |
- // to RGB color values. They are taken from the following webpage: |
- // http://www.fourcc.org/fccyvrgb.php |
- float yuv_to_rgb_rec601[9] = { |
- 1.164f, 1.164f, 1.164f, 0.0f, -.391f, 2.018f, 1.596f, -.813f, 0.0f, |
- }; |
- float yuv_to_rgb_jpeg[9] = { |
- 1.f, 1.f, 1.f, 0.0f, -.34414f, 1.772f, 1.402f, -.71414f, 0.0f, |
- }; |
- float yuv_to_rgb_rec709[9] = { |
- 1.164f, 1.164f, 1.164f, 0.0f, -0.213f, 2.112f, 1.793f, -0.533f, 0.0f, |
- }; |
- |
- // They are used in the YUV to RGBA conversion formula: |
- // Y - 16 : Gives 16 values of head and footroom for overshooting |
- // U - 128 : Turns unsigned U into signed U [-128,127] |
- // V - 128 : Turns unsigned V into signed V [-128,127] |
- float yuv_adjust_constrained[3] = { |
- -16.f, -128.f, -128.f, |
- }; |
- |
- // Same as above, but without the head and footroom. |
- float yuv_adjust_full[3] = { |
- 0.0f, -128.f, -128.f, |
- }; |
- |
- float* yuv_to_rgb = NULL; |
- float* yuv_adjust = NULL; |
- |
- switch (quad->color_space) { |
- case YUVVideoDrawQuad::REC_601: |
- yuv_to_rgb = yuv_to_rgb_rec601; |
- yuv_adjust = yuv_adjust_constrained; |
+ float resource_multiplier = 1.f; |
+ switch (highbit_texture) { |
+ case FragmentShaderYUVVideo::HIGHBIT_Y8: |
+ resource_multiplier = 1.f; |
break; |
- case YUVVideoDrawQuad::REC_709: |
- yuv_to_rgb = yuv_to_rgb_rec709; |
- yuv_adjust = yuv_adjust_constrained; |
+ case FragmentShaderYUVVideo::HIGHBIT_LUMINANCE_F16: |
+ resource_multiplier = quad->resource_multiplier; |
break; |
- case YUVVideoDrawQuad::JPEG: |
- yuv_to_rgb = yuv_to_rgb_jpeg; |
- yuv_adjust = yuv_adjust_full; |
+ case FragmentShaderYUVVideo::HIGHBIT_RG88: |
+ // f(r, g) = normalized_float = ((g * 256 * 255) + r * 255) / 1023 |
+ // when |bits_per_channel| is 10 bit, and |r| and |g| is normalized. |
+ resource_multiplier = 255.f / ((1 << quad->bits_per_channel) - 1); |
break; |
} |
- float yuv_to_rgb_multiplied[9]; |
- float yuv_adjust_with_offset[3]; |
- |
- // Formula according to BT.601-7 section 2.5.3. |
- DCHECK_LE(YUVVideoDrawQuad::kMinBitsPerChannel, quad->bits_per_channel); |
- DCHECK_LE(quad->bits_per_channel, YUVVideoDrawQuad::kMaxBitsPerChannel); |
- float adjustment_multiplier = (1 << (quad->bits_per_channel - 8)) * 1.0f / |
- ((1 << quad->bits_per_channel) - 1); |
- |
- for (int i = 0; i < 9; ++i) |
- yuv_to_rgb_multiplied[i] = yuv_to_rgb[i] * quad->resource_multiplier; |
- |
- for (int i = 0; i < 3; ++i) { |
- yuv_adjust_with_offset[i] = |
- yuv_adjust[i] * adjustment_multiplier / quad->resource_multiplier - |
- quad->resource_offset; |
- } |
- |
- if (lut_texture_location != -1) { |
+ if (use_color_lut) { |
+ DCHECK_NE(-1, lut_texture_location); |
unsigned int lut_texture = color_lut_cache_.GetLUT( |
quad->video_color_space, frame->device_color_space, 32); |
gl_->ActiveTexture(GL_TEXTURE5); |
gl_->BindTexture(GL_TEXTURE_2D, lut_texture); |
gl_->Uniform1i(lut_texture_location, 5); |
gl_->ActiveTexture(GL_TEXTURE0); |
- } |
- if (resource_multiplier_location != -1) { |
- gl_->Uniform1f(resource_multiplier_location, quad->resource_multiplier); |
+ if (highbit_texture == FragmentShaderYUVVideo::HIGHBIT_LUMINANCE_F16 || |
+ highbit_texture == FragmentShaderYUVVideo::HIGHBIT_RG88) { |
+ DCHECK_NE(-1, resource_multiplier_location); |
+ gl_->Uniform1f(resource_multiplier_location, resource_multiplier); |
+ } |
+ if (highbit_texture == FragmentShaderYUVVideo::HIGHBIT_LUMINANCE_F16) { |
+ DCHECK_NE(-1, resource_offset_location); |
+ gl_->Uniform1f(resource_offset_location, quad->resource_offset); |
+ } |
+ } else { |
+ DCHECK_NE(-1, yuv_matrix_location); |
+ DCHECK_NE(-1, yuv_adj_location); |
+ |
+ // These values are magic numbers that are used in the transformation from |
+ // YUV to RGB color values. They are taken from the following webpage: |
+ // http://www.fourcc.org/fccyvrgb.php |
+ float yuv_to_rgb_rec601[9] = { |
+ 1.164f, 1.164f, 1.164f, 0.0f, -.391f, 2.018f, 1.596f, -.813f, 0.0f, |
+ }; |
+ float yuv_to_rgb_jpeg[9] = { |
+ 1.f, 1.f, 1.f, 0.0f, -.34414f, 1.772f, 1.402f, -.71414f, 0.0f, |
+ }; |
+ float yuv_to_rgb_rec709[9] = { |
+ 1.164f, 1.164f, 1.164f, 0.0f, -0.213f, 2.112f, 1.793f, -0.533f, 0.0f, |
+ }; |
+ |
+ // They are used in the YUV to RGBA conversion formula: |
+ // Y - 16 : Gives 16 values of head and footroom for overshooting |
+ // U - 128 : Turns unsigned U into signed U [-128,127] |
+ // V - 128 : Turns unsigned V into signed V [-128,127] |
+ float yuv_adjust_constrained[3] = { |
+ -16.f, -128.f, -128.f, |
+ }; |
+ |
+ // Same as above, but without the head and footroom. |
+ float yuv_adjust_full[3] = { |
+ 0.0f, -128.f, -128.f, |
+ }; |
+ |
+ float* yuv_to_rgb = NULL; |
+ float* yuv_adjust = NULL; |
+ |
+ switch (quad->color_space) { |
+ case YUVVideoDrawQuad::REC_601: |
+ yuv_to_rgb = yuv_to_rgb_rec601; |
+ yuv_adjust = yuv_adjust_constrained; |
+ break; |
+ case YUVVideoDrawQuad::REC_709: |
+ yuv_to_rgb = yuv_to_rgb_rec709; |
+ yuv_adjust = yuv_adjust_constrained; |
+ break; |
+ case YUVVideoDrawQuad::JPEG: |
+ yuv_to_rgb = yuv_to_rgb_jpeg; |
+ yuv_adjust = yuv_adjust_full; |
+ break; |
+ } |
+ |
+ float yuv_to_rgb_multiplied[9]; |
+ for (int i = 0; i < 9; ++i) |
+ yuv_to_rgb_multiplied[i] = yuv_to_rgb[i] * resource_multiplier; |
+ gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb_multiplied); |
+ |
+ float yuv_adjust_with_offset[3]; |
+ // Formula according to BT.601-7 section 2.5.3. |
+ float adjustment_multiplier = (1 << (quad->bits_per_channel - 8)) * 1.0f / |
+ ((1 << quad->bits_per_channel) - 1); |
+ float resource_offset = 0.f; |
+ if (highbit_texture == FragmentShaderYUVVideo::HIGHBIT_LUMINANCE_F16) |
+ resource_offset = quad->resource_offset; |
+ for (int i = 0; i < 3; ++i) { |
+ yuv_adjust_with_offset[i] = |
+ yuv_adjust[i] * adjustment_multiplier / resource_multiplier - |
+ resource_offset; |
+ } |
+ |
+ gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust_with_offset); |
} |
- if (resource_offset_location != -1) { |
- gl_->Uniform1f(resource_offset_location, quad->resource_offset); |
+ if (highbit_texture == FragmentShaderYUVVideo::HIGHBIT_RG88) { |
+ DCHECK_NE(-1, ya_size_location); |
+ DCHECK_NE(-1, uv_size_location); |
+ gl_->Uniform2f(ya_size_location, quad->ya_tex_size.width(), |
+ quad->ya_tex_size.height()); |
+ gl_->Uniform2f(uv_size_location, quad->uv_tex_size.width(), |
+ quad->uv_tex_size.height()); |
} |
// The transform and vertex data are used to figure out the extents that the |
@@ -2428,13 +2489,6 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame, |
// quad passed in via uniform is the actual geometry that gets used to draw |
// it. This is why this centered rect is used and not the original quad_rect. |
auto tile_rect = gfx::RectF(quad->rect); |
- if (yuv_matrix_location != -1) { |
- gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb_multiplied); |
- } |
- |
- if (yuv_adj_location) { |
- gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust_with_offset); |
- } |
SetShaderOpacity(quad->shared_quad_state->opacity, alpha_location); |
if (!clip_region) { |
@@ -3634,18 +3688,19 @@ const GLRenderer::VideoYUVProgram* GLRenderer::GetVideoYUVProgram( |
SamplerType sampler, |
bool use_alpha_plane, |
bool use_nv12, |
- bool use_color_lut) { |
+ bool use_color_lut, |
+ FragmentShaderYUVVideo::HighbitTexture highbit_texture) { |
DCHECK_GE(precision, 0); |
DCHECK_LE(precision, LAST_TEX_COORD_PRECISION); |
DCHECK_GE(sampler, 0); |
DCHECK_LE(sampler, LAST_SAMPLER_TYPE); |
VideoYUVProgram* program = |
&video_yuv_program_[precision][sampler][use_alpha_plane][use_nv12] |
- [use_color_lut]; |
+ [use_color_lut][highbit_texture]; |
if (!program->initialized()) { |
TRACE_EVENT0("cc", "GLRenderer::videoYUVProgram::initialize"); |
- program->mutable_fragment_shader()->SetFeatures(use_alpha_plane, use_nv12, |
- use_color_lut); |
+ program->mutable_fragment_shader()->SetFeatures( |
+ use_alpha_plane, use_nv12, use_color_lut, highbit_texture); |
program->Initialize(output_surface_->context_provider(), precision, |
sampler); |
} |
@@ -3690,7 +3745,10 @@ void GLRenderer::CleanupSharedObjects() { |
for (int k = 0; k < 2; k++) { |
for (int l = 0; l < 2; l++) { |
for (int m = 0; m < 2; m++) { |
- video_yuv_program_[i][j][k][l][m].Cleanup(gl_); |
+ for (int n = 0; n <= FragmentShaderYUVVideo::LAST_HIGHBIT_TEXTURE; |
+ n++) { |
+ video_yuv_program_[i][j][k][l][m][n].Cleanup(gl_); |
+ } |
} |
} |
} |