Index: ui/gl/gl_image_surface_texture.cc |
diff --git a/ui/gl/gl_image_surface_texture.cc b/ui/gl/gl_image_surface_texture.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..9aeb384f225e74a726bc62230509861410bfaf04 |
--- /dev/null |
+++ b/ui/gl/gl_image_surface_texture.cc |
@@ -0,0 +1,86 @@ |
+// Copyright 2014 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "ui/gl/gl_image_surface_texture.h" |
+ |
+#include "base/debug/trace_event.h" |
+#include "ui/gl/android/surface_texture.h" |
+#include "ui/gl/android/surface_texture_tracker.h" |
+#include "ui/gl/gl_bindings.h" |
+ |
+namespace gfx { |
+namespace { |
+ |
+// This is admittedly a bit ugly. SurfaceTexture API takes ownership of texture |
+// ids when AttachToGLContext() is called and will delete the texture unless |
+// DetachFromGLContext() is called with no context current. This helper class |
+// can be used to temporarily make no context current for the purpose of |
+// stealing the texture id of a given surface texture. |
+class ScopedNoContextCurrent { |
+ public: |
+ ScopedNoContextCurrent() |
+ : display_(eglGetCurrentDisplay()), |
+ context_(eglGetCurrentContext()), |
+ draw_surface_(eglGetCurrentSurface(EGL_DRAW)), |
+ read_surface_(eglGetCurrentSurface(EGL_READ)) { |
+ eglMakeCurrent(display_, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); |
no sievers
2014/03/14 01:24:51
I was going to suggest using ui::ScopedMakeCurrent
reveman
2014/03/14 19:44:53
I was worried that this might have some other side
|
+ } |
+ |
+ ~ScopedNoContextCurrent() { |
+ eglMakeCurrent(display_, draw_surface_, read_surface_, context_); |
+ } |
+ |
+ private: |
+ EGLDisplay display_; |
+ EGLContext context_; |
+ EGLSurface draw_surface_; |
+ EGLSurface read_surface_; |
+}; |
+ |
+void DetachFromGLContext(SurfaceTexture* surface_texture) { |
+ // Detach with no context current to prevent SurfaceTexture from deleting |
+ // currently attached texture id. |
+ ScopedNoContextCurrent no_context_current; |
+ surface_texture->DetachFromGLContext(); |
+} |
+ |
+} // namespace |
+ |
+GLImageSurfaceTexture::GLImageSurfaceTexture(gfx::Size size) : size_(size) {} |
+ |
+GLImageSurfaceTexture::~GLImageSurfaceTexture() { Destroy(); } |
+ |
+bool GLImageSurfaceTexture::Initialize(gfx::GpuMemoryBufferHandle buffer) { |
+ DCHECK(!surface_texture_); |
+ surface_texture_ = |
+ SurfaceTextureTracker::GetInstance()->AcquireSurfaceTexture( |
+ buffer.surface_texture_id); |
+ return !!surface_texture_; |
+} |
+ |
+void GLImageSurfaceTexture::Destroy() { |
+ if (surface_texture_) |
+ DetachFromGLContext(surface_texture_.get()); |
no sievers
2014/03/14 01:24:51
Is this necessary? I *thought* it does not try to
reveman
2014/03/14 19:44:53
Yes, I think you're right. This also allows me to
|
+} |
+ |
+gfx::Size GLImageSurfaceTexture::GetSize() { return size_; } |
+ |
+bool GLImageSurfaceTexture::BindTexImage(unsigned target) { |
+ TRACE_EVENT0("gpu", "GLImageSurfaceTexture::BindTexImage"); |
+ |
+ DCHECK(surface_texture_); |
+ DetachFromGLContext(surface_texture_.get()); |
+ DCHECK_EQ(static_cast<unsigned>(GL_TEXTURE_EXTERNAL_OES), target); |
+ surface_texture_->AttachToGLContext(); |
+ surface_texture_->UpdateTexImage(); |
+ return true; |
+} |
+ |
+void GLImageSurfaceTexture::ReleaseTexImage(unsigned target) {} |
+ |
+void GLImageSurfaceTexture::WillUseTexImage() {} |
+ |
+void GLImageSurfaceTexture::DidUseTexImage() {} |
+ |
+} // namespace gfx |