Chromium Code Reviews| Index: media/filters/gpu_video_decoder.cc |
| diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc |
| index 08a1cf1cffe13a4d7d2126a845c8c573b54d3d34..e6079531bf49537380af79c7de8dae1382d03b55 100644 |
| --- a/media/filters/gpu_video_decoder.cc |
| +++ b/media/filters/gpu_video_decoder.cc |
| @@ -194,6 +194,19 @@ void GpuVideoDecoder::SetVDA(VideoDecodeAccelerator* vda) { |
| weak_vda_ = vda->AsWeakPtr(); |
| } |
| +void GpuVideoDecoder::DestroyTextures() { |
| + // It's safe to delete textures here, as the VDA should be keeping its own |
|
Ami GONE FROM CHROMIUM
2012/10/17 00:48:10
The explanation you give here, in the CL descripti
|
| + // references to the textures. This happens commonly via binding them |
| + // to EGLImages or XPixmaps, which ups refcounts on textures in the graphics |
| + // drivers. |
| + for (std::map<int32, PictureBuffer>::iterator it = |
| + picture_buffers_in_decoder_.begin(); |
| + it != picture_buffers_in_decoder_.end(); ++it) { |
| + factories_->DeleteTexture(it->second.texture_id()); |
| + } |
| + picture_buffers_in_decoder_.clear(); |
| +} |
| + |
| void GpuVideoDecoder::DestroyVDA() { |
| DCHECK(gvd_loop_proxy_->BelongsToCurrentThread()); |
| VideoDecodeAccelerator* vda ALLOW_UNUSED = vda_.release(); |
| @@ -205,6 +218,8 @@ void GpuVideoDecoder::DestroyVDA() { |
| FROM_HERE, |
| base::Bind(&VideoDecodeAccelerator::Destroy, weak_vda_), |
| base::Bind(&GpuVideoDecoder::Release, this)); |
| + |
| + DestroyTextures(); |
| } |
| void GpuVideoDecoder::Read(const ReadCB& read_cb) { |
| @@ -528,6 +543,8 @@ GpuVideoDecoder::~GpuVideoDecoder() { |
| it->second.shm_buffer->shm->Close(); |
| } |
| bitstream_buffers_in_decoder_.clear(); |
| + |
| + DestroyTextures(); |
| } |
| void GpuVideoDecoder::EnsureDemuxOrDecode() { |