| OLD | NEW |
| (Empty) |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "media/gpu/android_deferred_rendering_backing_strategy.h" | |
| 6 | |
| 7 #include <EGL/egl.h> | |
| 8 #include <EGL/eglext.h> | |
| 9 | |
| 10 #include "base/android/build_info.h" | |
| 11 #include "base/bind.h" | |
| 12 #include "base/logging.h" | |
| 13 #include "base/message_loop/message_loop.h" | |
| 14 #include "base/metrics/histogram.h" | |
| 15 #include "gpu/command_buffer/service/context_group.h" | |
| 16 #include "gpu/command_buffer/service/gl_stream_texture_image.h" | |
| 17 #include "gpu/command_buffer/service/gles2_cmd_copy_texture_chromium.h" | |
| 18 #include "gpu/command_buffer/service/texture_manager.h" | |
| 19 #include "gpu/ipc/common/gpu_surface_lookup.h" | |
| 20 #include "gpu/ipc/service/gpu_channel.h" | |
| 21 #include "media/gpu/avda_codec_image.h" | |
| 22 #include "media/gpu/avda_return_on_failure.h" | |
| 23 #include "media/gpu/avda_shared_state.h" | |
| 24 #include "ui/gl/android/surface_texture.h" | |
| 25 #include "ui/gl/egl_util.h" | |
| 26 #include "ui/gl/gl_bindings.h" | |
| 27 #include "ui/gl/gl_surface_egl.h" | |
| 28 #include "ui/gl/scoped_binders.h" | |
| 29 #include "ui/gl/scoped_make_current.h" | |
| 30 | |
| 31 namespace media { | |
| 32 | |
| 33 AndroidDeferredRenderingBackingStrategy:: | |
| 34 AndroidDeferredRenderingBackingStrategy(AVDAStateProvider* state_provider) | |
| 35 : state_provider_(state_provider), media_codec_(nullptr) {} | |
| 36 | |
| 37 AndroidDeferredRenderingBackingStrategy:: | |
| 38 ~AndroidDeferredRenderingBackingStrategy() {} | |
| 39 | |
| 40 gl::ScopedJavaSurface AndroidDeferredRenderingBackingStrategy::Initialize( | |
| 41 int surface_view_id) { | |
| 42 shared_state_ = new AVDASharedState(); | |
| 43 | |
| 44 bool using_virtual_context = false; | |
| 45 if (gl::GLContext* context = gl::GLContext::GetCurrent()) { | |
| 46 if (gl::GLShareGroup* share_group = context->share_group()) | |
| 47 using_virtual_context = !!share_group->GetSharedContext(); | |
| 48 } | |
| 49 UMA_HISTOGRAM_BOOLEAN("Media.AVDA.VirtualContext", using_virtual_context); | |
| 50 | |
| 51 // Acquire the SurfaceView surface if given a valid id. | |
| 52 if (surface_view_id != VideoDecodeAccelerator::Config::kNoSurfaceID) { | |
| 53 return gpu::GpuSurfaceLookup::GetInstance()->AcquireJavaSurface( | |
| 54 surface_view_id); | |
| 55 } | |
| 56 | |
| 57 // Create a SurfaceTexture. | |
| 58 GLuint service_id = 0; | |
| 59 surface_texture_ = state_provider_->CreateAttachedSurfaceTexture(&service_id); | |
| 60 shared_state_->SetSurfaceTexture(surface_texture_, service_id); | |
| 61 return gl::ScopedJavaSurface(surface_texture_.get()); | |
| 62 } | |
| 63 | |
| 64 void AndroidDeferredRenderingBackingStrategy::BeginCleanup( | |
| 65 bool have_context, | |
| 66 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { | |
| 67 // If we failed before Initialize, then do nothing. | |
| 68 if (!shared_state_) | |
| 69 return; | |
| 70 | |
| 71 // TODO(liberato): we should release all codec buffers here without rendering. | |
| 72 // CodecChanged() will drop them, but is expected to be called after the codec | |
| 73 // is no longer accessible. It's unclear that VP8 flush in AVDA can't hang | |
| 74 // waiting for our buffers. | |
| 75 | |
| 76 CodecChanged(nullptr); | |
| 77 } | |
| 78 | |
| 79 void AndroidDeferredRenderingBackingStrategy::EndCleanup() { | |
| 80 // Release the surface texture and any back buffers. This will preserve the | |
| 81 // front buffer, if any. | |
| 82 if (surface_texture_) | |
| 83 surface_texture_->ReleaseSurfaceTexture(); | |
| 84 } | |
| 85 | |
| 86 scoped_refptr<gl::SurfaceTexture> | |
| 87 AndroidDeferredRenderingBackingStrategy::GetSurfaceTexture() const { | |
| 88 return surface_texture_; | |
| 89 } | |
| 90 | |
| 91 uint32_t AndroidDeferredRenderingBackingStrategy::GetTextureTarget() const { | |
| 92 // If we're using a surface texture, then we need an external texture target | |
| 93 // to sample from it. If not, then we'll use 2D transparent textures to draw | |
| 94 // a transparent hole through which to see the SurfaceView. This is normally | |
| 95 // needed only for the devtools inspector, since the overlay mechanism handles | |
| 96 // it otherwise. | |
| 97 return surface_texture_ ? GL_TEXTURE_EXTERNAL_OES : GL_TEXTURE_2D; | |
| 98 } | |
| 99 | |
| 100 gfx::Size AndroidDeferredRenderingBackingStrategy::GetPictureBufferSize() | |
| 101 const { | |
| 102 // For SurfaceView, request a 1x1 2D texture to reduce memory during | |
| 103 // initialization. For SurfaceTexture, allocate a picture buffer that is the | |
| 104 // actual frame size. Note that it will be an external texture anyway, so it | |
| 105 // doesn't allocate an image of that size. However, it's still important to | |
| 106 // get the coded size right, so that VideoLayerImpl doesn't try to scale the | |
| 107 // texture when building the quad for it. | |
| 108 return surface_texture_ ? state_provider_->GetSize() : gfx::Size(1, 1); | |
| 109 } | |
| 110 | |
| 111 void AndroidDeferredRenderingBackingStrategy::SetImageForPicture( | |
| 112 const PictureBuffer& picture_buffer, | |
| 113 const scoped_refptr<gpu::gles2::GLStreamTextureImage>& image) { | |
| 114 gpu::gles2::TextureRef* texture_ref = | |
| 115 state_provider_->GetTextureForPicture(picture_buffer); | |
| 116 RETURN_IF_NULL(texture_ref); | |
| 117 | |
| 118 gpu::gles2::TextureManager* texture_manager = | |
| 119 state_provider_->GetGlDecoder()->GetContextGroup()->texture_manager(); | |
| 120 RETURN_IF_NULL(texture_manager); | |
| 121 | |
| 122 // Default to zero which will clear the stream texture service id if one was | |
| 123 // previously set. | |
| 124 GLuint stream_texture_service_id = 0; | |
| 125 if (image) { | |
| 126 if (shared_state_->surface_texture_service_id() != 0) { | |
| 127 // Override the Texture's service id, so that it will use the one that is | |
| 128 // attached to the SurfaceTexture. | |
| 129 stream_texture_service_id = shared_state_->surface_texture_service_id(); | |
| 130 } | |
| 131 | |
| 132 // Also set the parameters for the level if we're not clearing the image. | |
| 133 const gfx::Size size = state_provider_->GetSize(); | |
| 134 texture_manager->SetLevelInfo(texture_ref, GetTextureTarget(), 0, GL_RGBA, | |
| 135 size.width(), size.height(), 1, 0, GL_RGBA, | |
| 136 GL_UNSIGNED_BYTE, gfx::Rect()); | |
| 137 | |
| 138 static_cast<AVDACodecImage*>(image.get()) | |
| 139 ->set_texture(texture_ref->texture()); | |
| 140 } | |
| 141 | |
| 142 // If we're clearing the image, or setting a SurfaceTexture backed image, we | |
| 143 // set the state to UNBOUND. For SurfaceTexture images, this ensures that the | |
| 144 // implementation will call CopyTexImage, which is where AVDACodecImage | |
| 145 // updates the SurfaceTexture to the right frame. | |
| 146 auto image_state = gpu::gles2::Texture::UNBOUND; | |
| 147 // For SurfaceView we set the state to BOUND because ScheduleOverlayPlane | |
| 148 // requires it. If something tries to sample from this texture it won't work, | |
| 149 // but there's no way to sample from a SurfaceView anyway, so it doesn't | |
| 150 // matter. | |
| 151 if (image && !surface_texture_) | |
| 152 image_state = gpu::gles2::Texture::BOUND; | |
| 153 texture_manager->SetLevelStreamTextureImage(texture_ref, GetTextureTarget(), | |
| 154 0, image.get(), image_state, | |
| 155 stream_texture_service_id); | |
| 156 } | |
| 157 | |
| 158 void AndroidDeferredRenderingBackingStrategy::UseCodecBufferForPictureBuffer( | |
| 159 int32_t codec_buf_index, | |
| 160 const PictureBuffer& picture_buffer) { | |
| 161 // Make sure that the decoder is available. | |
| 162 RETURN_IF_NULL(state_provider_->GetGlDecoder()); | |
| 163 | |
| 164 // Notify the AVDACodecImage for picture_buffer that it should use the | |
| 165 // decoded buffer codec_buf_index to render this frame. | |
| 166 AVDACodecImage* avda_image = | |
| 167 shared_state_->GetImageForPicture(picture_buffer.id()); | |
| 168 RETURN_IF_NULL(avda_image); | |
| 169 | |
| 170 // Note that this is not a race, since we do not re-use a PictureBuffer | |
| 171 // until after the CC is done drawing it. | |
| 172 pictures_out_for_display_.push_back(picture_buffer.id()); | |
| 173 avda_image->set_media_codec_buffer_index(codec_buf_index); | |
| 174 avda_image->set_size(state_provider_->GetSize()); | |
| 175 | |
| 176 MaybeRenderEarly(); | |
| 177 } | |
| 178 | |
| 179 void AndroidDeferredRenderingBackingStrategy::AssignOnePictureBuffer( | |
| 180 const PictureBuffer& picture_buffer, | |
| 181 bool have_context) { | |
| 182 // Attach a GLImage to each texture that will use the surface texture. | |
| 183 // We use a refptr here in case SetImageForPicture fails. | |
| 184 scoped_refptr<gpu::gles2::GLStreamTextureImage> gl_image = | |
| 185 new AVDACodecImage(picture_buffer.id(), shared_state_, media_codec_, | |
| 186 state_provider_->GetGlDecoder(), surface_texture_); | |
| 187 SetImageForPicture(picture_buffer, gl_image); | |
| 188 | |
| 189 if (!surface_texture_ && have_context) { | |
| 190 // To make devtools work, we're using a 2D texture. Make it transparent, | |
| 191 // so that it draws a hole for the SV to show through. This is only | |
| 192 // because devtools draws and reads back, which skips overlay processing. | |
| 193 // It's unclear why devtools renders twice -- once normally, and once | |
| 194 // including a readback layer. The result is that the device screen | |
| 195 // flashes as we alternately draw the overlay hole and this texture, | |
| 196 // unless we make the texture transparent. | |
| 197 static const uint8_t rgba[] = {0, 0, 0, 0}; | |
| 198 const gfx::Size size(1, 1); | |
| 199 DCHECK_LE(1u, picture_buffer.texture_ids().size()); | |
| 200 glBindTexture(GL_TEXTURE_2D, picture_buffer.texture_ids()[0]); | |
| 201 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, size.width(), size.height(), 0, | |
| 202 GL_RGBA, GL_UNSIGNED_BYTE, rgba); | |
| 203 } | |
| 204 } | |
| 205 | |
| 206 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBufferForPicture( | |
| 207 const PictureBuffer& picture_buffer) { | |
| 208 AVDACodecImage* avda_image = | |
| 209 shared_state_->GetImageForPicture(picture_buffer.id()); | |
| 210 RETURN_IF_NULL(avda_image); | |
| 211 avda_image->UpdateSurface(AVDACodecImage::UpdateMode::DISCARD_CODEC_BUFFER); | |
| 212 } | |
| 213 | |
| 214 void AndroidDeferredRenderingBackingStrategy::ReuseOnePictureBuffer( | |
| 215 const PictureBuffer& picture_buffer) { | |
| 216 pictures_out_for_display_.erase( | |
| 217 std::remove(pictures_out_for_display_.begin(), | |
| 218 pictures_out_for_display_.end(), picture_buffer.id()), | |
| 219 pictures_out_for_display_.end()); | |
| 220 | |
| 221 // At this point, the CC must be done with the picture. We can't really | |
| 222 // check for that here directly. it's guaranteed in gpu_video_decoder.cc, | |
| 223 // when it waits on the sync point before releasing the mailbox. That sync | |
| 224 // point is inserted by destroying the resource in VideoLayerImpl::DidDraw. | |
| 225 ReleaseCodecBufferForPicture(picture_buffer); | |
| 226 MaybeRenderEarly(); | |
| 227 } | |
| 228 | |
| 229 void AndroidDeferredRenderingBackingStrategy::ReleaseCodecBuffers( | |
| 230 const AndroidVideoDecodeAccelerator::OutputBufferMap& buffers) { | |
| 231 for (const std::pair<int, PictureBuffer>& entry : buffers) | |
| 232 ReleaseCodecBufferForPicture(entry.second); | |
| 233 } | |
| 234 | |
| 235 void AndroidDeferredRenderingBackingStrategy::MaybeRenderEarly() { | |
| 236 if (pictures_out_for_display_.empty()) | |
| 237 return; | |
| 238 | |
| 239 // See if we can consume the front buffer / render to the SurfaceView. Iterate | |
| 240 // in reverse to find the most recent front buffer. If none is found, the | |
| 241 // |front_index| will point to the beginning of the array. | |
| 242 size_t front_index = pictures_out_for_display_.size() - 1; | |
| 243 AVDACodecImage* first_renderable_image = nullptr; | |
| 244 for (int i = front_index; i >= 0; --i) { | |
| 245 const int id = pictures_out_for_display_[i]; | |
| 246 AVDACodecImage* avda_image = shared_state_->GetImageForPicture(id); | |
| 247 if (!avda_image) | |
| 248 continue; | |
| 249 | |
| 250 // Update the front buffer index as we move along to shorten the number of | |
| 251 // candidate images we look at for back buffer rendering. | |
| 252 front_index = i; | |
| 253 first_renderable_image = avda_image; | |
| 254 | |
| 255 // If we find a front buffer, stop and indicate that front buffer rendering | |
| 256 // is not possible since another image is already in the front buffer. | |
| 257 if (avda_image->was_rendered_to_front_buffer()) { | |
| 258 first_renderable_image = nullptr; | |
| 259 break; | |
| 260 } | |
| 261 } | |
| 262 | |
| 263 if (first_renderable_image) { | |
| 264 first_renderable_image->UpdateSurface( | |
| 265 AVDACodecImage::UpdateMode::RENDER_TO_FRONT_BUFFER); | |
| 266 } | |
| 267 | |
| 268 // Back buffer rendering is only available for surface textures. We'll always | |
| 269 // have at least one front buffer, so the next buffer must be the backbuffer. | |
| 270 size_t backbuffer_index = front_index + 1; | |
| 271 if (!surface_texture_ || backbuffer_index >= pictures_out_for_display_.size()) | |
| 272 return; | |
| 273 | |
| 274 // See if the back buffer is free. If so, then render the frame adjacent to | |
| 275 // the front buffer. The listing is in render order, so we can just use the | |
| 276 // first unrendered frame if there is back buffer space. | |
| 277 first_renderable_image = shared_state_->GetImageForPicture( | |
| 278 pictures_out_for_display_[backbuffer_index]); | |
| 279 if (!first_renderable_image || | |
| 280 first_renderable_image->was_rendered_to_back_buffer()) { | |
| 281 return; | |
| 282 } | |
| 283 | |
| 284 // Due to the loop in the beginning this should never be true. | |
| 285 DCHECK(!first_renderable_image->was_rendered_to_front_buffer()); | |
| 286 first_renderable_image->UpdateSurface( | |
| 287 AVDACodecImage::UpdateMode::RENDER_TO_BACK_BUFFER); | |
| 288 } | |
| 289 | |
| 290 void AndroidDeferredRenderingBackingStrategy::CodecChanged( | |
| 291 VideoCodecBridge* codec) { | |
| 292 media_codec_ = codec; | |
| 293 shared_state_->CodecChanged(codec); | |
| 294 } | |
| 295 | |
| 296 void AndroidDeferredRenderingBackingStrategy::OnFrameAvailable() { | |
| 297 shared_state_->SignalFrameAvailable(); | |
| 298 } | |
| 299 | |
| 300 bool AndroidDeferredRenderingBackingStrategy::ArePicturesOverlayable() { | |
| 301 // SurfaceView frames are always overlayable because that's the only way to | |
| 302 // display them. | |
| 303 return !surface_texture_; | |
| 304 } | |
| 305 | |
| 306 void AndroidDeferredRenderingBackingStrategy::UpdatePictureBufferSize( | |
| 307 PictureBuffer* picture_buffer, | |
| 308 const gfx::Size& new_size) { | |
| 309 // This strategy uses EGL images which manage the texture size for us. We | |
| 310 // simply update the PictureBuffer meta-data and leave the texture as-is. | |
| 311 picture_buffer->set_size(new_size); | |
| 312 } | |
| 313 | |
| 314 } // namespace media | |
| OLD | NEW |