Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(10)

Side by Side Diff: content/common/gpu/texture_image_transport_surface.cc

Issue 14188053: gpu: Change Produce/ConsumeTexture to allow texture sharing (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: review comments Created 7 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/texture_image_transport_surface.h" 5 #include "content/common/gpu/texture_image_transport_surface.h"
6 6
7 #include <string> 7 #include <string>
8 #include <vector> 8 #include <vector>
9 9
10 #include "base/command_line.h" 10 #include "base/command_line.h"
11 #include "content/common/gpu/gpu_channel.h" 11 #include "content/common/gpu/gpu_channel.h"
12 #include "content/common/gpu/gpu_channel_manager.h" 12 #include "content/common/gpu/gpu_channel_manager.h"
13 #include "content/common/gpu/gpu_messages.h" 13 #include "content/common/gpu/gpu_messages.h"
14 #include "content/common/gpu/sync_point_manager.h" 14 #include "content/common/gpu/sync_point_manager.h"
15 #include "content/public/common/content_switches.h" 15 #include "content/public/common/content_switches.h"
16 #include "gpu/command_buffer/service/context_group.h" 16 #include "gpu/command_buffer/service/context_group.h"
17 #include "gpu/command_buffer/service/gpu_scheduler.h" 17 #include "gpu/command_buffer/service/gpu_scheduler.h"
18 #include "ui/gl/scoped_binders.h" 18 #include "ui/gl/scoped_binders.h"
19 19
20 using gpu::gles2::ContextGroup; 20 using gpu::gles2::ContextGroup;
21 using gpu::gles2::GLES2Decoder;
21 using gpu::gles2::MailboxManager; 22 using gpu::gles2::MailboxManager;
22 using gpu::gles2::MailboxName; 23 using gpu::gles2::MailboxName;
23 using gpu::gles2::TextureDefinition; 24 using gpu::gles2::Texture;
24 using gpu::gles2::TextureManager; 25 using gpu::gles2::TextureManager;
26 using gpu::gles2::TextureRef;
25 27
26 namespace content { 28 namespace content {
27 29
28 TextureImageTransportSurface::TextureImageTransportSurface( 30 TextureImageTransportSurface::TextureImageTransportSurface(
29 GpuChannelManager* manager, 31 GpuChannelManager* manager,
30 GpuCommandBufferStub* stub, 32 GpuCommandBufferStub* stub,
31 const gfx::GLSurfaceHandle& handle) 33 const gfx::GLSurfaceHandle& handle)
32 : fbo_id_(0), 34 : fbo_id_(0),
33 backbuffer_(CreateTextureDefinition(gfx::Size(), 0)), 35 current_size_(1, 1),
34 stub_destroyed_(false), 36 stub_destroyed_(false),
35 backbuffer_suggested_allocation_(true), 37 backbuffer_suggested_allocation_(true),
36 frontbuffer_suggested_allocation_(true), 38 frontbuffer_suggested_allocation_(true),
37 handle_(handle), 39 handle_(handle),
38 is_swap_buffers_pending_(false), 40 is_swap_buffers_pending_(false),
39 did_unschedule_(false) { 41 did_unschedule_(false) {
40 helper_.reset(new ImageTransportHelper(this, 42 helper_.reset(new ImageTransportHelper(this,
41 manager, 43 manager,
42 stub, 44 stub,
43 gfx::kNullPluginWindow)); 45 gfx::kNullPluginWindow));
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
84 // Ack arrives. 86 // Ack arrives.
85 DCHECK(!did_unschedule_); 87 DCHECK(!did_unschedule_);
86 if (is_swap_buffers_pending_) { 88 if (is_swap_buffers_pending_) {
87 did_unschedule_ = true; 89 did_unschedule_ = true;
88 helper_->SetScheduled(false); 90 helper_->SetScheduled(false);
89 return true; 91 return true;
90 } 92 }
91 return false; 93 return false;
92 } 94 }
93 95
94 bool TextureImageTransportSurface::Resize(const gfx::Size&) {
95 return true;
96 }
97
98 bool TextureImageTransportSurface::IsOffscreen() { 96 bool TextureImageTransportSurface::IsOffscreen() {
99 return true; 97 return true;
100 } 98 }
101 99
102 bool TextureImageTransportSurface::OnMakeCurrent(gfx::GLContext* context) { 100 unsigned int TextureImageTransportSurface::GetBackingFrameBufferObject() {
103 if (stub_destroyed_) { 101 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
104 // Early-exit so that we don't recreate the fbo. We still want to return
105 // true, so that the context is made current and the GLES2DecoderImpl can
106 // release its own resources.
107 return true;
108 }
109
110 context_ = context;
111
112 if (!fbo_id_) { 102 if (!fbo_id_) {
113 glGenFramebuffersEXT(1, &fbo_id_); 103 glGenFramebuffersEXT(1, &fbo_id_);
114 glBindFramebufferEXT(GL_FRAMEBUFFER, fbo_id_); 104 glBindFramebufferEXT(GL_FRAMEBUFFER, fbo_id_);
115 current_size_ = gfx::Size(1, 1);
116 helper_->stub()->AddDestructionObserver(this); 105 helper_->stub()->AddDestructionObserver(this);
106 CreateBackTexture();
117 } 107 }
118 108
119 // We could be receiving non-deferred GL commands, that is anything that does
120 // not need a framebuffer.
121 if (!backbuffer_->service_id() && !is_swap_buffers_pending_ &&
122 backbuffer_suggested_allocation_) {
123 CreateBackTexture();
124 }
125 return true;
126 }
127
128 unsigned int TextureImageTransportSurface::GetBackingFrameBufferObject() {
129 return fbo_id_; 109 return fbo_id_;
130 } 110 }
131 111
132 bool TextureImageTransportSurface::SetBackbufferAllocation(bool allocation) { 112 bool TextureImageTransportSurface::SetBackbufferAllocation(bool allocation) {
133 DCHECK(!is_swap_buffers_pending_); 113 DCHECK(!is_swap_buffers_pending_);
134 if (backbuffer_suggested_allocation_ == allocation) 114 if (backbuffer_suggested_allocation_ == allocation)
135 return true; 115 return true;
136 backbuffer_suggested_allocation_ = allocation; 116 backbuffer_suggested_allocation_ = allocation;
137 117
138 if (backbuffer_suggested_allocation_) { 118 if (backbuffer_suggested_allocation_) {
139 DCHECK(!backbuffer_->service_id()); 119 DCHECK(!backbuffer_);
140 CreateBackTexture(); 120 CreateBackTexture();
141 } else { 121 } else {
142 ReleaseBackTexture(); 122 ReleaseBackTexture();
143 } 123 }
144 124
145 return true; 125 return true;
146 } 126 }
147 127
148 void TextureImageTransportSurface::SetFrontbufferAllocation(bool allocation) { 128 void TextureImageTransportSurface::SetFrontbufferAllocation(bool allocation) {
149 if (frontbuffer_suggested_allocation_ == allocation) 129 if (frontbuffer_suggested_allocation_ == allocation)
150 return; 130 return;
151 frontbuffer_suggested_allocation_ = allocation; 131 frontbuffer_suggested_allocation_ = allocation;
152 132
153 if (!frontbuffer_suggested_allocation_) { 133 // If a swapbuffers is in flight, wait for the ack before releasing the front
154 GpuHostMsg_AcceleratedSurfaceRelease_Params params; 134 // buffer:
155 helper_->SendAcceleratedSurfaceRelease(params); 135 // - we don't know yet which texture the browser will want to keep
136 // - we want to ensure we don't destroy a texture that is in flight before the
137 // browser got a reference on it.
138 if (!frontbuffer_suggested_allocation_ &&
139 !is_swap_buffers_pending_ &&
140 helper_->MakeCurrent()) {
141 ReleaseFrontTexture();
156 } 142 }
157 } 143 }
158 144
159 void* TextureImageTransportSurface::GetShareHandle() { 145 void* TextureImageTransportSurface::GetShareHandle() {
160 return GetHandle(); 146 return GetHandle();
161 } 147 }
162 148
163 void* TextureImageTransportSurface::GetDisplay() { 149 void* TextureImageTransportSurface::GetDisplay() {
164 return surface_.get() ? surface_->GetDisplay() : NULL; 150 return surface_.get() ? surface_->GetDisplay() : NULL;
165 } 151 }
166 152
167 void* TextureImageTransportSurface::GetConfig() { 153 void* TextureImageTransportSurface::GetConfig() {
168 return surface_.get() ? surface_->GetConfig() : NULL; 154 return surface_.get() ? surface_->GetConfig() : NULL;
169 } 155 }
170 156
171 void TextureImageTransportSurface::OnResize(gfx::Size size) { 157 void TextureImageTransportSurface::OnResize(gfx::Size size) {
158 DCHECK_GE(size.width(), 1);
159 DCHECK_GE(size.height(), 1);
172 current_size_ = size; 160 current_size_ = size;
173 CreateBackTexture(); 161 CreateBackTexture();
174 } 162 }
175 163
176 void TextureImageTransportSurface::OnWillDestroyStub() { 164 void TextureImageTransportSurface::OnWillDestroyStub() {
165 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
177 helper_->stub()->RemoveDestructionObserver(this); 166 helper_->stub()->RemoveDestructionObserver(this);
178 167
179 GpuHostMsg_AcceleratedSurfaceRelease_Params params;
180 helper_->SendAcceleratedSurfaceRelease(params);
181
182 ReleaseBackTexture();
183
184 // We are losing the stub owning us, this is our last chance to clean up the 168 // We are losing the stub owning us, this is our last chance to clean up the
185 // resources we allocated in the stub's context. 169 // resources we allocated in the stub's context.
170 ReleaseBackTexture();
171 ReleaseFrontTexture();
172
186 if (fbo_id_) { 173 if (fbo_id_) {
187 glDeleteFramebuffersEXT(1, &fbo_id_); 174 glDeleteFramebuffersEXT(1, &fbo_id_);
188 CHECK_GL_ERROR(); 175 CHECK_GL_ERROR();
189 fbo_id_ = 0; 176 fbo_id_ = 0;
190 } 177 }
191 178
192 stub_destroyed_ = true; 179 stub_destroyed_ = true;
193 } 180 }
194 181
195 void TextureImageTransportSurface::SetLatencyInfo( 182 void TextureImageTransportSurface::SetLatencyInfo(
196 const cc::LatencyInfo& latency_info) { 183 const cc::LatencyInfo& latency_info) {
197 latency_info_ = latency_info; 184 latency_info_ = latency_info;
198 } 185 }
199 186
200 bool TextureImageTransportSurface::SwapBuffers() { 187 bool TextureImageTransportSurface::SwapBuffers() {
188 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
201 DCHECK(backbuffer_suggested_allocation_); 189 DCHECK(backbuffer_suggested_allocation_);
202 190
203 if (!frontbuffer_suggested_allocation_) 191 if (!frontbuffer_suggested_allocation_)
204 return true; 192 return true;
205 193
206 if (!backbuffer_->service_id()) { 194 if (!backbuffer_) {
207 LOG(ERROR) << "Swap without valid backing."; 195 LOG(ERROR) << "Swap without valid backing.";
208 return true; 196 return true;
209 } 197 }
210 198
211 DCHECK(backbuffer_size() == current_size_); 199 DCHECK(backbuffer_size() == current_size_);
212 GpuHostMsg_AcceleratedSurfaceBuffersSwapped_Params params; 200 GpuHostMsg_AcceleratedSurfaceBuffersSwapped_Params params;
213 params.size = backbuffer_size(); 201 params.size = backbuffer_size();
214 params.mailbox_name.assign( 202 params.mailbox_name.assign(
215 reinterpret_cast<const char*>(&mailbox_name_), sizeof(mailbox_name_)); 203 reinterpret_cast<const char*>(&back_mailbox_name_),
204 sizeof(back_mailbox_name_));
216 205
217 glFlush(); 206 glFlush();
218 ProduceTexture();
219
220 // Do not allow destruction while we are still waiting for a swap ACK,
221 // so we do not leak a texture in the mailbox.
222 AddRef();
223 207
224 params.latency_info = latency_info_; 208 params.latency_info = latency_info_;
225 helper_->SendAcceleratedSurfaceBuffersSwapped(params); 209 helper_->SendAcceleratedSurfaceBuffersSwapped(params);
226 210
227 DCHECK(!is_swap_buffers_pending_); 211 DCHECK(!is_swap_buffers_pending_);
228 is_swap_buffers_pending_ = true; 212 is_swap_buffers_pending_ = true;
229 return true; 213 return true;
230 } 214 }
231 215
232 bool TextureImageTransportSurface::PostSubBuffer( 216 bool TextureImageTransportSurface::PostSubBuffer(
233 int x, int y, int width, int height) { 217 int x, int y, int width, int height) {
218 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
234 DCHECK(backbuffer_suggested_allocation_); 219 DCHECK(backbuffer_suggested_allocation_);
235 if (!frontbuffer_suggested_allocation_) 220 if (!frontbuffer_suggested_allocation_)
236 return true; 221 return true;
237 const gfx::Rect new_damage_rect(x, y, width, height); 222 const gfx::Rect new_damage_rect(x, y, width, height);
238 DCHECK(gfx::Rect(gfx::Point(), current_size_).Contains(new_damage_rect)); 223 DCHECK(gfx::Rect(gfx::Point(), current_size_).Contains(new_damage_rect));
239 224
240 // An empty damage rect is a successful no-op. 225 // An empty damage rect is a successful no-op.
241 if (new_damage_rect.IsEmpty()) 226 if (new_damage_rect.IsEmpty())
242 return true; 227 return true;
243 228
244 if (!backbuffer_->service_id()) { 229 if (!backbuffer_) {
245 LOG(ERROR) << "Swap without valid backing."; 230 LOG(ERROR) << "Swap without valid backing.";
246 return true; 231 return true;
247 } 232 }
248 233
249 DCHECK(current_size_ == backbuffer_size()); 234 DCHECK(current_size_ == backbuffer_size());
250 GpuHostMsg_AcceleratedSurfacePostSubBuffer_Params params; 235 GpuHostMsg_AcceleratedSurfacePostSubBuffer_Params params;
251 params.surface_size = backbuffer_size(); 236 params.surface_size = backbuffer_size();
252 params.x = x; 237 params.x = x;
253 params.y = y; 238 params.y = y;
254 params.width = width; 239 params.width = width;
255 params.height = height; 240 params.height = height;
256 params.mailbox_name.assign( 241 params.mailbox_name.assign(
257 reinterpret_cast<const char*>(&mailbox_name_), sizeof(mailbox_name_)); 242 reinterpret_cast<const char*>(&back_mailbox_name_),
243 sizeof(back_mailbox_name_));
258 244
259 glFlush(); 245 glFlush();
260 ProduceTexture();
261
262 // Do not allow destruction while we are still waiting for a swap ACK,
263 // so we do not leak a texture in the mailbox.
264 AddRef();
265 246
266 params.latency_info = latency_info_; 247 params.latency_info = latency_info_;
267 helper_->SendAcceleratedSurfacePostSubBuffer(params); 248 helper_->SendAcceleratedSurfacePostSubBuffer(params);
268 249
269 DCHECK(!is_swap_buffers_pending_); 250 DCHECK(!is_swap_buffers_pending_);
270 is_swap_buffers_pending_ = true; 251 is_swap_buffers_pending_ = true;
271 return true; 252 return true;
272 } 253 }
273 254
274 std::string TextureImageTransportSurface::GetExtensions() { 255 std::string TextureImageTransportSurface::GetExtensions() {
275 std::string extensions = gfx::GLSurface::GetExtensions(); 256 std::string extensions = gfx::GLSurface::GetExtensions();
276 extensions += extensions.empty() ? "" : " "; 257 extensions += extensions.empty() ? "" : " ";
277 extensions += "GL_CHROMIUM_front_buffer_cached "; 258 extensions += "GL_CHROMIUM_front_buffer_cached ";
278 extensions += "GL_CHROMIUM_post_sub_buffer"; 259 extensions += "GL_CHROMIUM_post_sub_buffer";
279 return extensions; 260 return extensions;
280 } 261 }
281 262
282 gfx::Size TextureImageTransportSurface::GetSize() { 263 gfx::Size TextureImageTransportSurface::GetSize() {
283 gfx::Size size = current_size_; 264 return current_size_;
284
285 // OSMesa expects a non-zero size.
286 return gfx::Size(size.width() == 0 ? 1 : size.width(),
287 size.height() == 0 ? 1 : size.height());
288 } 265 }
289 266
290 void* TextureImageTransportSurface::GetHandle() { 267 void* TextureImageTransportSurface::GetHandle() {
291 return surface_.get() ? surface_->GetHandle() : NULL; 268 return surface_.get() ? surface_->GetHandle() : NULL;
292 } 269 }
293 270
294 unsigned TextureImageTransportSurface::GetFormat() { 271 unsigned TextureImageTransportSurface::GetFormat() {
295 return surface_.get() ? surface_->GetFormat() : 0; 272 return surface_.get() ? surface_->GetFormat() : 0;
296 } 273 }
297 274
298 void TextureImageTransportSurface::OnBufferPresented( 275 void TextureImageTransportSurface::OnBufferPresented(
299 const AcceleratedSurfaceMsg_BufferPresented_Params& params) { 276 const AcceleratedSurfaceMsg_BufferPresented_Params& params) {
300 if (params.sync_point == 0) { 277 if (params.sync_point == 0) {
301 BufferPresentedImpl(params.mailbox_name); 278 BufferPresentedImpl(params.mailbox_name);
302 } else { 279 } else {
303 helper_->manager()->sync_point_manager()->AddSyncPointCallback( 280 helper_->manager()->sync_point_manager()->AddSyncPointCallback(
304 params.sync_point, 281 params.sync_point,
305 base::Bind(&TextureImageTransportSurface::BufferPresentedImpl, 282 base::Bind(&TextureImageTransportSurface::BufferPresentedImpl,
306 this, 283 this,
307 params.mailbox_name)); 284 params.mailbox_name));
308 } 285 }
309
310 // Careful, we might get deleted now if we were only waiting for
311 // a final swap ACK.
312 Release();
313 } 286 }
314 287
315 void TextureImageTransportSurface::BufferPresentedImpl( 288 void TextureImageTransportSurface::BufferPresentedImpl(
316 const std::string& mailbox_name) { 289 const std::string& mailbox_name) {
317 DCHECK(!backbuffer_->service_id());
318 if (!mailbox_name.empty()) {
319 DCHECK(mailbox_name.length() == GL_MAILBOX_SIZE_CHROMIUM);
320 mailbox_name.copy(reinterpret_cast<char *>(&mailbox_name_),
321 sizeof(MailboxName));
322 ConsumeTexture();
323 }
324
325 if (stub_destroyed_ && backbuffer_->service_id()) {
326 // TODO(sievers): Remove this after changes to the mailbox to take ownership
327 // of the service ids.
328 DCHECK(context_.get() && surface_.get());
329 uint32 service_id = backbuffer_->ReleaseServiceId();
330 if (context_->MakeCurrent(surface_))
331 glDeleteTextures(1, &service_id);
332
333 return;
334 }
335
336 DCHECK(is_swap_buffers_pending_); 290 DCHECK(is_swap_buffers_pending_);
337 is_swap_buffers_pending_ = false; 291 is_swap_buffers_pending_ = false;
338
339 // We should not have allowed the backbuffer to be discarded while the ack 292 // We should not have allowed the backbuffer to be discarded while the ack
340 // was pending. 293 // was pending.
341 DCHECK(backbuffer_suggested_allocation_); 294 DCHECK(backbuffer_suggested_allocation_);
295 DCHECK(backbuffer_);
296
297 bool swap = true;
298 if (!mailbox_name.empty()) {
299 DCHECK(mailbox_name.length() == GL_MAILBOX_SIZE_CHROMIUM);
300 if (!memcmp(mailbox_name.data(),
301 &back_mailbox_name_,
302 mailbox_name.length())) {
303 // The browser has skipped the frame to unblock the GPU process, waiting
304 // for one of the right size, and returned the back buffer, so don't swap.
305 swap = false;
306 }
307 }
308 if (swap) {
309 std::swap(backbuffer_, frontbuffer_);
310 std::swap(back_mailbox_name_, front_mailbox_name_);
311 }
342 312
343 // We're relying on the fact that the parent context is 313 // We're relying on the fact that the parent context is
344 // finished with it's context when it inserts the sync point that 314 // finished with its context when it inserts the sync point that
345 // triggers this callback. 315 // triggers this callback.
346 if (helper_->MakeCurrent()) { 316 if (helper_->MakeCurrent()) {
347 if (backbuffer_size() != current_size_ || !backbuffer_->service_id()) 317 if (frontbuffer_ && !frontbuffer_suggested_allocation_)
318 ReleaseFrontTexture();
319 if (!backbuffer_ || backbuffer_size() != current_size_)
348 CreateBackTexture(); 320 CreateBackTexture();
349 else 321 else
350 AttachBackTextureToFBO(); 322 AttachBackTextureToFBO();
351 } 323 }
352 324
353 // Even if MakeCurrent fails, schedule anyway, to trigger the lost context 325 // Even if MakeCurrent fails, schedule anyway, to trigger the lost context
354 // logic. 326 // logic.
355 if (did_unschedule_) { 327 if (did_unschedule_) {
356 did_unschedule_ = false; 328 did_unschedule_ = false;
357 helper_->SetScheduled(true); 329 helper_->SetScheduled(true);
358 } 330 }
359 } 331 }
360 332
361 void TextureImageTransportSurface::OnResizeViewACK() { 333 void TextureImageTransportSurface::OnResizeViewACK() {
362 NOTREACHED(); 334 NOTREACHED();
363 } 335 }
364 336
365 void TextureImageTransportSurface::ReleaseBackTexture() { 337 void TextureImageTransportSurface::ReleaseBackTexture() {
366 if (!backbuffer_->service_id()) 338 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
367 return; 339 backbuffer_ = NULL;
368 340 back_mailbox_name_ = MailboxName();
369 uint32 service_id = backbuffer_->ReleaseServiceId();
370 glDeleteTextures(1, &service_id);
371 backbuffer_.reset(CreateTextureDefinition(gfx::Size(), 0));
372 mailbox_name_ = MailboxName();
373 glFlush(); 341 glFlush();
374 CHECK_GL_ERROR(); 342 CHECK_GL_ERROR();
375 } 343 }
376 344
345 void TextureImageTransportSurface::ReleaseFrontTexture() {
346 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
347 frontbuffer_ = NULL;
348 front_mailbox_name_ = MailboxName();
349 glFlush();
350 CHECK_GL_ERROR();
351 GpuHostMsg_AcceleratedSurfaceRelease_Params params;
352 helper_->SendAcceleratedSurfaceRelease(params);
353 }
354
377 void TextureImageTransportSurface::CreateBackTexture() { 355 void TextureImageTransportSurface::CreateBackTexture() {
356 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
378 // If |is_swap_buffers_pending| we are waiting for our backbuffer 357 // If |is_swap_buffers_pending| we are waiting for our backbuffer
379 // in the mailbox, so we shouldn't be reallocating it now. 358 // in the mailbox, so we shouldn't be reallocating it now.
380 DCHECK(!is_swap_buffers_pending_); 359 DCHECK(!is_swap_buffers_pending_);
381 360
382 if (backbuffer_->service_id() && backbuffer_size() == current_size_) 361 if (backbuffer_ && backbuffer_size() == current_size_)
383 return; 362 return;
384 363
385 uint32 service_id = backbuffer_->ReleaseServiceId();
386
387 VLOG(1) << "Allocating new backbuffer texture"; 364 VLOG(1) << "Allocating new backbuffer texture";
388 365
389 // On Qualcomm we couldn't resize an FBO texture past a certain 366 // On Qualcomm we couldn't resize an FBO texture past a certain
390 // size, after we allocated it as 1x1. So here we simply delete 367 // size, after we allocated it as 1x1. So here we simply delete
391 // the previous texture on resize, to insure we don't 'run out of 368 // the previous texture on resize, to insure we don't 'run out of
392 // memory'. 369 // memory'.
393 if (service_id && 370 if (backbuffer_ &&
394 helper_->stub() 371 helper_->stub()
395 ->decoder() 372 ->decoder()
396 ->GetContextGroup() 373 ->GetContextGroup()
397 ->feature_info() 374 ->feature_info()
398 ->workarounds() 375 ->workarounds()
399 .delete_instead_of_resize_fbo) { 376 .delete_instead_of_resize_fbo) {
400 glDeleteTextures(1, &service_id); 377 ReleaseBackTexture();
401 service_id = 0; 378 }
402 mailbox_name_ = MailboxName(); 379 GLES2Decoder* decoder = helper_->stub()->decoder();
380 TextureManager* texture_manager =
381 decoder->GetContextGroup()->texture_manager();
382 if (!backbuffer_) {
383 mailbox_manager_->GenerateMailboxName(&back_mailbox_name_);
384 GLuint service_id;
385 glGenTextures(1, &service_id);
386 backbuffer_ = TextureRef::Create(texture_manager, 0, service_id);
387 texture_manager->SetTarget(backbuffer_, GL_TEXTURE_2D);
388 Texture* texture = texture_manager->Produce(backbuffer_);
389 bool success = mailbox_manager_->ProduceTexture(
390 GL_TEXTURE_2D, back_mailbox_name_, texture);
391 DCHECK(success);
403 } 392 }
404 393
405 if (!service_id) {
406 MailboxName new_mailbox_name;
407 MailboxName& name = mailbox_name_;
408 // This slot should be uninitialized.
409 DCHECK(!memcmp(&name, &new_mailbox_name, sizeof(MailboxName)));
410 mailbox_manager_->GenerateMailboxName(&new_mailbox_name);
411 name = new_mailbox_name;
412 glGenTextures(1, &service_id);
413 }
414
415 backbuffer_.reset(
416 CreateTextureDefinition(current_size_, service_id));
417
418 { 394 {
419 gfx::ScopedTextureBinder texture_binder(GL_TEXTURE_2D, service_id); 395 gfx::ScopedTextureBinder texture_binder(GL_TEXTURE_2D,
420 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 396 backbuffer_->service_id());
421 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
422 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
423 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
424 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 397 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
425 current_size_.width(), current_size_.height(), 0, 398 current_size_.width(), current_size_.height(), 0,
426 GL_RGBA, GL_UNSIGNED_BYTE, NULL); 399 GL_RGBA, GL_UNSIGNED_BYTE, NULL);
400 gpu::gles2::ErrorState* error_state = decoder->GetErrorState();
401 texture_manager->SetParameter("Backbuffer", error_state, backbuffer_,
402 GL_TEXTURE_MIN_FILTER, GL_LINEAR);
403 texture_manager->SetParameter("Backbuffer", error_state, backbuffer_,
404 GL_TEXTURE_MAG_FILTER, GL_LINEAR);
405 texture_manager->SetParameter("Backbuffer", error_state, backbuffer_,
406 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
407 texture_manager->SetParameter("Backbuffer", error_state, backbuffer_,
408 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
409 texture_manager->SetLevelInfo(
410 backbuffer_,
411 GL_TEXTURE_2D,
412 0,
413 GL_RGBA,
414 current_size_.width(),
415 current_size_.height(),
416 1,
417 0,
418 GL_RGBA,
419 GL_UNSIGNED_BYTE,
420 true);
421 DCHECK(texture_manager->CanRender(backbuffer_));
427 CHECK_GL_ERROR(); 422 CHECK_GL_ERROR();
428 } 423 }
429 424
430 AttachBackTextureToFBO(); 425 AttachBackTextureToFBO();
431 } 426 }
432 427
433 void TextureImageTransportSurface::AttachBackTextureToFBO() { 428 void TextureImageTransportSurface::AttachBackTextureToFBO() {
434 DCHECK(backbuffer_->service_id()); 429 DCHECK(helper_->stub()->decoder()->GetGLContext()->IsCurrent(NULL));
430 DCHECK(backbuffer_);
435 gfx::ScopedFrameBufferBinder fbo_binder(fbo_id_); 431 gfx::ScopedFrameBufferBinder fbo_binder(fbo_id_);
436 glFramebufferTexture2DEXT(GL_FRAMEBUFFER, 432 glFramebufferTexture2DEXT(GL_FRAMEBUFFER,
437 GL_COLOR_ATTACHMENT0, 433 GL_COLOR_ATTACHMENT0,
438 GL_TEXTURE_2D, 434 GL_TEXTURE_2D,
439 backbuffer_->service_id(), 435 backbuffer_->service_id(),
440 0); 436 0);
441 CHECK_GL_ERROR(); 437 CHECK_GL_ERROR();
442 438
443 #ifndef NDEBUG 439 #ifndef NDEBUG
444 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER); 440 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
445 if (status != GL_FRAMEBUFFER_COMPLETE) { 441 if (status != GL_FRAMEBUFFER_COMPLETE) {
446 DLOG(FATAL) << "Framebuffer incomplete: " << status; 442 DLOG(FATAL) << "Framebuffer incomplete: " << status;
447 } 443 }
448 #endif 444 #endif
449 } 445 }
450 446
451 TextureDefinition* TextureImageTransportSurface::CreateTextureDefinition(
452 gfx::Size size, int service_id) {
453 TextureDefinition::LevelInfo info(
454 GL_TEXTURE_2D, GL_RGBA, size.width(), size.height(), 1,
455 0, GL_RGBA, GL_UNSIGNED_BYTE, true);
456
457 TextureDefinition::LevelInfos level_infos;
458 level_infos.resize(1);
459 level_infos[0].resize(1);
460 level_infos[0][0] = info;
461 return new TextureDefinition(
462 GL_TEXTURE_2D,
463 service_id,
464 GL_LINEAR,
465 GL_LINEAR,
466 GL_CLAMP_TO_EDGE,
467 GL_CLAMP_TO_EDGE,
468 GL_NONE,
469 true,
470 false,
471 level_infos);
472 }
473
474 void TextureImageTransportSurface::ConsumeTexture() {
475 DCHECK(!backbuffer_->service_id());
476
477 backbuffer_.reset(mailbox_manager_->ConsumeTexture(
478 GL_TEXTURE_2D, mailbox_name_));
479 if (!backbuffer_) {
480 mailbox_name_ = MailboxName();
481 backbuffer_.reset(CreateTextureDefinition(gfx::Size(), 0));
482 }
483 }
484
485 void TextureImageTransportSurface::ProduceTexture() {
486 DCHECK(backbuffer_->service_id());
487 DCHECK(!backbuffer_size().IsEmpty());
488
489 // Pass NULL as |owner| here to avoid errors from glConsumeTextureCHROMIUM()
490 // when the renderer context group goes away before the RWHV handles a pending
491 // ACK. We avoid leaking a texture in the mailbox by waiting for the final ACK
492 // at which point we consume the correct texture back.
493 bool success = mailbox_manager_->ProduceTexture(
494 GL_TEXTURE_2D,
495 mailbox_name_,
496 backbuffer_.release(),
497 NULL);
498 DCHECK(success);
499 mailbox_name_ = MailboxName();
500 backbuffer_.reset(CreateTextureDefinition(gfx::Size(), 0));
501 }
502
503 } // namespace content 447 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698