Chromium Code Reviews| Index: content/common/gpu/media/gpu_video_decode_accelerator.cc |
| diff --git a/content/common/gpu/media/gpu_video_decode_accelerator.cc b/content/common/gpu/media/gpu_video_decode_accelerator.cc |
| index df8ec5bc0ec8c72f35ba64dea56b0f1952197b62..e8b9040d846b3d3428620a1bb5f67679339e1303 100644 |
| --- a/content/common/gpu/media/gpu_video_decode_accelerator.cc |
| +++ b/content/common/gpu/media/gpu_video_decode_accelerator.cc |
| @@ -370,21 +370,19 @@ void GpuVideoDecodeAccelerator::OnAssignPictureBuffers( |
| NotifyError(media::VideoDecodeAccelerator::INVALID_ARGUMENT); |
| return; |
| } |
| + GLenum format = GL_RGBA; |
| +// FIXME: after moving to D3D11, remove this workaround. crbug.com/438691 |
| +#if defined(OS_WIN) |
| + format = static_cast<DXVAVideoDecodeAccelerator*>( |
|
Ken Russell (switch to Gerrit)
2014/12/05 20:53:43
Could you please avoid this downcast by defining a
dshwang
2014/12/08 13:47:28
Done.
|
| + video_decode_accelerator_.get())->GetSufaceInternalFormat(); |
| +#endif |
| if (texture_target_ == GL_TEXTURE_EXTERNAL_OES || |
| - texture_target_ == GL_TEXTURE_RECTANGLE) { |
| + texture_target_ == GL_TEXTURE_RECTANGLE || format != GL_RGBA) { |
| // These textures have their dimensions defined by the underlying storage. |
| // Use |texture_dimensions_| for this size. |
| - texture_manager->SetLevelInfo(texture_ref, |
| - texture_target_, |
| - 0, |
| - GL_RGBA, |
| - texture_dimensions_.width(), |
| - texture_dimensions_.height(), |
| - 1, |
| - 0, |
| - GL_RGBA, |
| - 0, |
| - false); |
| + texture_manager->SetLevelInfo( |
| + texture_ref, texture_target_, 0, format, texture_dimensions_.width(), |
| + texture_dimensions_.height(), 1, 0, format, 0, false); |
| } else { |
| // For other targets, texture dimensions should already be defined. |
| GLsizei width = 0, height = 0; |