OLD | NEW |
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/android_deferred_rendering_backing_strategy.h
" | 5 #include "content/common/gpu/media/android_deferred_rendering_backing_strategy.h
" |
6 | 6 |
7 #include <EGL/egl.h> | 7 #include <EGL/egl.h> |
8 #include <EGL/eglext.h> | 8 #include <EGL/eglext.h> |
9 | 9 |
10 #include "base/android/build_info.h" | 10 #include "base/android/build_info.h" |
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
172 int32_t codec_buf_index, | 172 int32_t codec_buf_index, |
173 const media::PictureBuffer& picture_buffer) { | 173 const media::PictureBuffer& picture_buffer) { |
174 // Make sure that the decoder is available. | 174 // Make sure that the decoder is available. |
175 RETURN_IF_NULL(state_provider_->GetGlDecoder()); | 175 RETURN_IF_NULL(state_provider_->GetGlDecoder()); |
176 | 176 |
177 // Notify the AVDACodecImage for picture_buffer that it should use the | 177 // Notify the AVDACodecImage for picture_buffer that it should use the |
178 // decoded buffer codec_buf_index to render this frame. | 178 // decoded buffer codec_buf_index to render this frame. |
179 AVDACodecImage* avda_image = | 179 AVDACodecImage* avda_image = |
180 shared_state_->GetImageForPicture(picture_buffer.id()); | 180 shared_state_->GetImageForPicture(picture_buffer.id()); |
181 RETURN_IF_NULL(avda_image); | 181 RETURN_IF_NULL(avda_image); |
182 DCHECK_EQ(avda_image->GetMediaCodecBufferIndex(), -1); | 182 |
183 // Note that this is not a race, since we do not re-use a PictureBuffer | 183 // Note that this is not a race, since we do not re-use a PictureBuffer |
184 // until after the CC is done drawing it. | 184 // until after the CC is done drawing it. |
| 185 pictures_out_for_display_.push_back(picture_buffer.id()); |
185 avda_image->SetMediaCodecBufferIndex(codec_buf_index); | 186 avda_image->SetMediaCodecBufferIndex(codec_buf_index); |
186 avda_image->SetSize(state_provider_->GetSize()); | 187 avda_image->SetSize(state_provider_->GetSize()); |
| 188 |
| 189 MaybeRenderEarly(); |
187 } | 190 } |
188 | 191 |
189 void AndroidDeferredRenderingBackingStrategy::AssignOnePictureBuffer( | 192 void AndroidDeferredRenderingBackingStrategy::AssignOnePictureBuffer( |
190 const media::PictureBuffer& picture_buffer, | 193 const media::PictureBuffer& picture_buffer, |
191 bool have_context) { | 194 bool have_context) { |
192 // Attach a GLImage to each texture that will use the surface texture. | 195 // Attach a GLImage to each texture that will use the surface texture. |
193 // We use a refptr here in case SetImageForPicture fails. | 196 // We use a refptr here in case SetImageForPicture fails. |
194 scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image = | 197 scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image = |
195 new AVDACodecImage(picture_buffer.id(), shared_state_, media_codec_, | 198 new AVDACodecImage(picture_buffer.id(), shared_state_, media_codec_, |
196 state_provider_->GetGlDecoder(), surface_texture_); | 199 state_provider_->GetGlDecoder(), surface_texture_); |
(...skipping 14 matching lines...) Expand all Loading... |
211 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, size.width(), size.height(), 0, | 214 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, size.width(), size.height(), 0, |
212 GL_RGBA, GL_UNSIGNED_BYTE, rgba); | 215 GL_RGBA, GL_UNSIGNED_BYTE, rgba); |
213 } | 216 } |
214 } | 217 } |
215 | 218 |
216 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBufferForPicture( | 219 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBufferForPicture( |
217 const media::PictureBuffer& picture_buffer) { | 220 const media::PictureBuffer& picture_buffer) { |
218 AVDACodecImage* avda_image = | 221 AVDACodecImage* avda_image = |
219 shared_state_->GetImageForPicture(picture_buffer.id()); | 222 shared_state_->GetImageForPicture(picture_buffer.id()); |
220 RETURN_IF_NULL(avda_image); | 223 RETURN_IF_NULL(avda_image); |
221 | 224 avda_image->UpdateSurface(AVDACodecImage::UpdateMode::DISCARD_CODEC_BUFFER); |
222 if (!avda_image) | |
223 return; | |
224 | |
225 // See if there is a media codec buffer still attached to this image. | |
226 const int32_t codec_buffer = avda_image->GetMediaCodecBufferIndex(); | |
227 | |
228 if (codec_buffer >= 0) { | |
229 // PictureBuffer wasn't displayed, so release the buffer. | |
230 media_codec_->ReleaseOutputBuffer(codec_buffer, false); | |
231 avda_image->SetMediaCodecBufferIndex(-1); | |
232 } | |
233 } | 225 } |
234 | 226 |
235 void AndroidDeferredRenderingBackingStrategy::ReuseOnePictureBuffer( | 227 void AndroidDeferredRenderingBackingStrategy::ReuseOnePictureBuffer( |
236 const media::PictureBuffer& picture_buffer) { | 228 const media::PictureBuffer& picture_buffer) { |
| 229 pictures_out_for_display_.erase( |
| 230 std::remove(pictures_out_for_display_.begin(), |
| 231 pictures_out_for_display_.end(), picture_buffer.id()), |
| 232 pictures_out_for_display_.end()); |
| 233 |
237 // At this point, the CC must be done with the picture. We can't really | 234 // At this point, the CC must be done with the picture. We can't really |
238 // check for that here directly. it's guaranteed in gpu_video_decoder.cc, | 235 // check for that here directly. it's guaranteed in gpu_video_decoder.cc, |
239 // when it waits on the sync point before releasing the mailbox. That sync | 236 // when it waits on the sync point before releasing the mailbox. That sync |
240 // point is inserted by destroying the resource in VideoLayerImpl::DidDraw. | 237 // point is inserted by destroying the resource in VideoLayerImpl::DidDraw. |
241 ReleaseCodecBufferForPicture(picture_buffer); | 238 ReleaseCodecBufferForPicture(picture_buffer); |
| 239 MaybeRenderEarly(); |
242 } | 240 } |
243 | 241 |
244 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBuffers( | 242 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBuffers( |
245 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { | 243 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { |
246 for (const std::pair<int, media::PictureBuffer>& entry : buffers) | 244 for (const std::pair<int, media::PictureBuffer>& entry : buffers) |
247 ReleaseCodecBufferForPicture(entry.second); | 245 ReleaseCodecBufferForPicture(entry.second); |
248 } | 246 } |
249 | 247 |
| 248 void AndroidDeferredRenderingBackingStrategy::MaybeRenderEarly() { |
| 249 // See if we can consume the front buffer / render to the SurfaceView. |
| 250 if (pictures_out_for_display_.size() == 1u) { |
| 251 AVDACodecImage* avda_image = |
| 252 shared_state_->GetImageForPicture(*pictures_out_for_display_.begin()); |
| 253 RETURN_IF_NULL(avda_image); |
| 254 avda_image->UpdateSurface( |
| 255 AVDACodecImage::UpdateMode::RENDER_TO_FRONT_BUFFER); |
| 256 return; |
| 257 } |
| 258 |
| 259 // Back buffer rendering is only available for surface textures. |
| 260 if (!surface_texture_) |
| 261 return; |
| 262 |
| 263 // See if the back buffer is free. If so, then render the earliest frame. The |
| 264 // listing is in render order, so we can just use the first unrendered frame |
| 265 // if there is back buffer space. |
| 266 AVDACodecImage* first_renderable_image = nullptr; |
| 267 for (int id : pictures_out_for_display_) { |
| 268 AVDACodecImage* avda_image = shared_state_->GetImageForPicture(id); |
| 269 if (!avda_image) |
| 270 continue; |
| 271 |
| 272 // If the back buffer is unavailable, there's nothing left to do. |
| 273 if (avda_image->is_rendered_to_back_buffer()) |
| 274 return; |
| 275 |
| 276 // If the image is rendered to the front buffer or has been dropped, it is |
| 277 // not valid for rendering. |
| 278 if (avda_image->is_rendered()) |
| 279 continue; |
| 280 |
| 281 if (!first_renderable_image) |
| 282 first_renderable_image = avda_image; |
| 283 } |
| 284 |
| 285 if (first_renderable_image) { |
| 286 first_renderable_image->UpdateSurface( |
| 287 AVDACodecImage::UpdateMode::RENDER_TO_BACK_BUFFER); |
| 288 } |
| 289 } |
| 290 |
250 void AndroidDeferredRenderingBackingStrategy::CodecChanged( | 291 void AndroidDeferredRenderingBackingStrategy::CodecChanged( |
251 media::VideoCodecBridge* codec) { | 292 media::VideoCodecBridge* codec) { |
252 media_codec_ = codec; | 293 media_codec_ = codec; |
253 shared_state_->CodecChanged(codec); | 294 shared_state_->CodecChanged(codec); |
254 } | 295 } |
255 | 296 |
256 void AndroidDeferredRenderingBackingStrategy::OnFrameAvailable() { | 297 void AndroidDeferredRenderingBackingStrategy::OnFrameAvailable() { |
257 shared_state_->SignalFrameAvailable(); | 298 shared_state_->SignalFrameAvailable(); |
258 } | 299 } |
259 | 300 |
(...skipping 132 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
392 return !feature_info->workarounds().avda_dont_copy_pictures; | 433 return !feature_info->workarounds().avda_dont_copy_pictures; |
393 } | 434 } |
394 } | 435 } |
395 } | 436 } |
396 | 437 |
397 // Assume so. | 438 // Assume so. |
398 return true; | 439 return true; |
399 } | 440 } |
400 | 441 |
401 } // namespace content | 442 } // namespace content |
OLD | NEW |