Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1184)

Side by Side Diff: content/common/gpu/texture_image_transport_surface.cc

Issue 11475017: Revert 171569 as it broke some browser_tests on win_aura. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src/
Patch Set: Created 8 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "content/common/gpu/texture_image_transport_surface.h" 5 #include "content/common/gpu/texture_image_transport_surface.h"
6 6
7 #include <string> 7 #include <string>
8 #include <vector> 8 #include <vector>
9 9
10 #include "base/command_line.h" 10 #include "base/command_line.h"
11 #include "content/common/gpu/gl_scoped_binders.h" 11 #include "content/common/gpu/gl_scoped_binders.h"
12 #include "content/common/gpu/gpu_channel.h" 12 #include "content/common/gpu/gpu_channel.h"
13 #include "content/common/gpu/gpu_channel_manager.h" 13 #include "content/common/gpu/gpu_channel_manager.h"
14 #include "content/common/gpu/gpu_messages.h" 14 #include "content/common/gpu/gpu_messages.h"
15 #include "content/common/gpu/sync_point_manager.h" 15 #include "content/common/gpu/sync_point_manager.h"
16 #include "content/public/common/content_switches.h" 16 #include "content/public/common/content_switches.h"
17 #include "gpu/command_buffer/service/context_group.h" 17 #include "gpu/command_buffer/service/context_group.h"
18 #include "gpu/command_buffer/service/gpu_scheduler.h" 18 #include "gpu/command_buffer/service/gpu_scheduler.h"
19 #include "gpu/command_buffer/service/texture_definition.h" 19 #include "gpu/command_buffer/service/texture_manager.h"
20 20
21 using gpu::gles2::ContextGroup; 21 using gpu::gles2::ContextGroup;
22 using gpu::gles2::MailboxManager;
23 using gpu::gles2::MailboxName;
24 using gpu::gles2::TextureDefinition;
25 using gpu::gles2::TextureManager; 22 using gpu::gles2::TextureManager;
23 typedef TextureManager::TextureInfo TextureInfo;
26 24
27 namespace content { 25 namespace content {
28 26
29 TextureImageTransportSurface::Texture::Texture() 27 TextureImageTransportSurface::Texture::Texture()
30 : service_id(0), 28 : client_id(0),
31 surface_handle(0) { 29 sent_to_client(false) {
32 } 30 }
33 31
34 TextureImageTransportSurface::Texture::~Texture() { 32 TextureImageTransportSurface::Texture::~Texture() {
35 } 33 }
36 34
37 TextureImageTransportSurface::TextureImageTransportSurface( 35 TextureImageTransportSurface::TextureImageTransportSurface(
38 GpuChannelManager* manager, 36 GpuChannelManager* manager,
39 GpuCommandBufferStub* stub, 37 GpuCommandBufferStub* stub,
40 const gfx::GLSurfaceHandle& handle) 38 const gfx::GLSurfaceHandle& handle)
41 : fbo_id_(0), 39 : fbo_id_(0),
40 front_(0),
42 stub_destroyed_(false), 41 stub_destroyed_(false),
43 backbuffer_suggested_allocation_(true), 42 backbuffer_suggested_allocation_(true),
44 frontbuffer_suggested_allocation_(true), 43 frontbuffer_suggested_allocation_(true),
44 frontbuffer_is_protected_(true),
45 protection_state_id_(0),
45 handle_(handle), 46 handle_(handle),
47 parent_stub_(NULL),
46 is_swap_buffers_pending_(false), 48 is_swap_buffers_pending_(false),
47 did_unschedule_(false) { 49 did_unschedule_(false),
50 did_flip_(false) {
48 helper_.reset(new ImageTransportHelper(this, 51 helper_.reset(new ImageTransportHelper(this,
49 manager, 52 manager,
50 stub, 53 stub,
51 gfx::kNullPluginWindow)); 54 gfx::kNullPluginWindow));
52 } 55 }
53 56
54 TextureImageTransportSurface::~TextureImageTransportSurface() { 57 TextureImageTransportSurface::~TextureImageTransportSurface() {
55 DCHECK(stub_destroyed_); 58 DCHECK(stub_destroyed_);
56 Destroy(); 59 Destroy();
57 } 60 }
58 61
59 bool TextureImageTransportSurface::Initialize() { 62 bool TextureImageTransportSurface::Initialize() {
60 mailbox_manager_ = 63 GpuChannelManager* manager = helper_->manager();
61 helper_->stub()->decoder()->GetContextGroup()->mailbox_manager(); 64 GpuChannel* parent_channel = manager->LookupChannel(handle_.parent_client_id);
65 if (!parent_channel)
66 return false;
62 67
63 backbuffer_.surface_handle = 1; 68 parent_stub_ = parent_channel->LookupCommandBuffer(handle_.parent_context_id);
69 if (!parent_stub_)
70 return false;
64 71
65 GpuChannelManager* manager = helper_->manager(); 72 parent_stub_->AddDestructionObserver(this);
73 TextureManager* texture_manager =
74 parent_stub_->decoder()->GetContextGroup()->texture_manager();
75 DCHECK(texture_manager);
76
77 for (int i = 0; i < 2; ++i) {
78 Texture& texture = textures_[i];
79 texture.client_id = handle_.parent_texture_id[i];
80 texture.info = texture_manager->GetTextureInfo(texture.client_id);
81 if (!texture.info)
82 return false;
83
84 if (!texture.info->target())
85 texture_manager->SetInfoTarget(texture.info, GL_TEXTURE_2D);
86 texture_manager->SetParameter(
87 texture.info, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
88 texture_manager->SetParameter(
89 texture.info, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
90 texture_manager->SetParameter(
91 texture.info, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
92 texture_manager->SetParameter(
93 texture.info, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
94 }
95
66 surface_ = manager->GetDefaultOffscreenSurface(); 96 surface_ = manager->GetDefaultOffscreenSurface();
67 if (!surface_.get()) 97 if (!surface_.get())
68 return false; 98 return false;
69 99
70 if (!helper_->Initialize()) 100 if (!helper_->Initialize())
71 return false; 101 return false;
72 102
73 GpuChannel* parent_channel = manager->LookupChannel(handle_.parent_client_id); 103 const CommandLine* command_line = CommandLine::ForCurrentProcess();
74 if (parent_channel) { 104 if (command_line->HasSwitch(switches::kUIPrioritizeInGpuProcess))
75 const CommandLine* command_line = CommandLine::ForCurrentProcess(); 105 helper_->SetPreemptByCounter(parent_channel->MessagesPendingCount());
76 if (command_line->HasSwitch(switches::kUIPrioritizeInGpuProcess))
77 helper_->SetPreemptByCounter(parent_channel->MessagesPendingCount());
78 }
79 106
80 return true; 107 return true;
81 } 108 }
82 109
83 void TextureImageTransportSurface::Destroy() { 110 void TextureImageTransportSurface::Destroy() {
111 if (parent_stub_) {
112 parent_stub_->decoder()->MakeCurrent();
113 ReleaseParentStub();
114 }
115
84 if (surface_.get()) 116 if (surface_.get())
85 surface_ = NULL; 117 surface_ = NULL;
86 118
87 helper_->Destroy(); 119 helper_->Destroy();
88 } 120 }
89 121
90 bool TextureImageTransportSurface::DeferDraws() { 122 bool TextureImageTransportSurface::DeferDraws() {
91 // The command buffer hit a draw/clear command that could clobber the 123 // The command buffer hit a draw/clear command that could clobber the
92 // texture in use by the UI compositor. If a Swap is pending, abort 124 // texture in use by the UI compositor. If a Swap is pending, abort
93 // processing of the command by returning true and unschedule until the Swap 125 // processing of the command by returning true and unschedule until the Swap
(...skipping 16 matching lines...) Expand all
110 } 142 }
111 143
112 bool TextureImageTransportSurface::OnMakeCurrent(gfx::GLContext* context) { 144 bool TextureImageTransportSurface::OnMakeCurrent(gfx::GLContext* context) {
113 if (stub_destroyed_) { 145 if (stub_destroyed_) {
114 // Early-exit so that we don't recreate the fbo. We still want to return 146 // Early-exit so that we don't recreate the fbo. We still want to return
115 // true, so that the context is made current and the GLES2DecoderImpl can 147 // true, so that the context is made current and the GLES2DecoderImpl can
116 // release its own resources. 148 // release its own resources.
117 return true; 149 return true;
118 } 150 }
119 151
120 if (!context_.get()) {
121 DCHECK(helper_->stub());
122 context_ = helper_->stub()->decoder()->GetGLContext();
123 }
124
125 if (!fbo_id_) { 152 if (!fbo_id_) {
126 glGenFramebuffersEXT(1, &fbo_id_); 153 glGenFramebuffersEXT(1, &fbo_id_);
127 glBindFramebufferEXT(GL_FRAMEBUFFER, fbo_id_); 154 glBindFramebufferEXT(GL_FRAMEBUFFER, fbo_id_);
128 current_size_ = gfx::Size(1, 1); 155 CreateBackTexture(gfx::Size(1, 1));
129 helper_->stub()->AddDestructionObserver(this);
130 }
131
132 // We could be receiving non-deferred GL commands, that is anything that does
133 // not need a framebuffer.
134 if (!backbuffer_.service_id && !is_swap_buffers_pending_ &&
135 backbuffer_suggested_allocation_) {
136 CreateBackTexture();
137 156
138 #ifndef NDEBUG 157 #ifndef NDEBUG
139 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER); 158 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
140 if (status != GL_FRAMEBUFFER_COMPLETE) { 159 if (status != GL_FRAMEBUFFER_COMPLETE) {
141 DLOG(ERROR) << "Framebuffer incomplete."; 160 DLOG(ERROR) << "Framebuffer incomplete.";
142 glDeleteFramebuffersEXT(1, &fbo_id_); 161 glDeleteFramebuffersEXT(1, &fbo_id_);
143 fbo_id_ = 0; 162 fbo_id_ = 0;
144 return false; 163 return false;
145 } 164 }
146 #endif 165 #endif
166 DCHECK(helper_->stub());
167 helper_->stub()->AddDestructionObserver(this);
147 } 168 }
169
148 return true; 170 return true;
149 } 171 }
150 172
151 unsigned int TextureImageTransportSurface::GetBackingFrameBufferObject() { 173 unsigned int TextureImageTransportSurface::GetBackingFrameBufferObject() {
152 return fbo_id_; 174 return fbo_id_;
153 } 175 }
154 176
155 void TextureImageTransportSurface::SetBackbufferAllocation(bool allocation) { 177 void TextureImageTransportSurface::SetBackbufferAllocation(bool allocation) {
156 DCHECK(!is_swap_buffers_pending_); 178 DCHECK(!is_swap_buffers_pending_);
157 if (backbuffer_suggested_allocation_ == allocation) 179 if (backbuffer_suggested_allocation_ == allocation)
158 return; 180 return;
159 backbuffer_suggested_allocation_ = allocation; 181 backbuffer_suggested_allocation_ = allocation;
160 182
183 if (!helper_->MakeCurrent())
184 return;
185
161 if (backbuffer_suggested_allocation_) { 186 if (backbuffer_suggested_allocation_) {
162 DCHECK(!backbuffer_.service_id); 187 DCHECK(!textures_[back()].info->service_id() ||
163 CreateBackTexture(); 188 !textures_[back()].sent_to_client);
189 CreateBackTexture(textures_[back()].size);
164 } else { 190 } else {
165 ReleaseBackTexture(); 191 ReleaseTexture(back());
166 } 192 }
167 } 193 }
168 194
169 void TextureImageTransportSurface::SetFrontbufferAllocation(bool allocation) { 195 void TextureImageTransportSurface::SetFrontbufferAllocation(bool allocation) {
170 if (frontbuffer_suggested_allocation_ == allocation) 196 if (frontbuffer_suggested_allocation_ == allocation)
171 return; 197 return;
172 frontbuffer_suggested_allocation_ = allocation; 198 frontbuffer_suggested_allocation_ = allocation;
199 AdjustFrontBufferAllocation();
200 }
173 201
174 if (!frontbuffer_suggested_allocation_) { 202 void TextureImageTransportSurface::AdjustFrontBufferAllocation() {
175 GpuHostMsg_AcceleratedSurfaceRelease_Params params; 203 if (!helper_->MakeCurrent())
176 helper_->SendAcceleratedSurfaceRelease(params); 204 return;
205
206 if (!frontbuffer_suggested_allocation_ && !frontbuffer_is_protected_ &&
207 textures_[front()].info->service_id()) {
208 ReleaseTexture(front());
209 if (textures_[front()].sent_to_client) {
210 GpuHostMsg_AcceleratedSurfaceRelease_Params params;
211 params.identifier = textures_[front()].client_id;
212 helper_->SendAcceleratedSurfaceRelease(params);
213 textures_[front()].sent_to_client = false;
214 }
177 } 215 }
178 } 216 }
179 217
180 void* TextureImageTransportSurface::GetShareHandle() { 218 void* TextureImageTransportSurface::GetShareHandle() {
181 return GetHandle(); 219 return GetHandle();
182 } 220 }
183 221
184 void* TextureImageTransportSurface::GetDisplay() { 222 void* TextureImageTransportSurface::GetDisplay() {
185 return surface_.get() ? surface_->GetDisplay() : NULL; 223 return surface_.get() ? surface_->GetDisplay() : NULL;
186 } 224 }
187 225
188 void* TextureImageTransportSurface::GetConfig() { 226 void* TextureImageTransportSurface::GetConfig() {
189 return surface_.get() ? surface_->GetConfig() : NULL; 227 return surface_.get() ? surface_->GetConfig() : NULL;
190 } 228 }
191 229
192 void TextureImageTransportSurface::OnResize(gfx::Size size) { 230 void TextureImageTransportSurface::OnResize(gfx::Size size) {
193 current_size_ = size; 231 CreateBackTexture(size);
194 CreateBackTexture();
195 } 232 }
196 233
197 void TextureImageTransportSurface::OnWillDestroyStub( 234 void TextureImageTransportSurface::OnWillDestroyStub(
198 GpuCommandBufferStub* stub) { 235 GpuCommandBufferStub* stub) {
199 DCHECK(stub == helper_->stub()); 236 if (stub == parent_stub_) {
200 stub->RemoveDestructionObserver(this); 237 ReleaseParentStub();
238 helper_->SetPreemptByCounter(NULL);
239 } else {
240 DCHECK(stub == helper_->stub());
241 stub->RemoveDestructionObserver(this);
201 242
202 GpuHostMsg_AcceleratedSurfaceRelease_Params params; 243 // We are losing the stub owning us, this is our last chance to clean up the
203 helper_->SendAcceleratedSurfaceRelease(params); 244 // resources we allocated in the stub's context.
245 if (fbo_id_) {
246 glDeleteFramebuffersEXT(1, &fbo_id_);
247 CHECK_GL_ERROR();
248 fbo_id_ = 0;
249 }
204 250
205 ReleaseBackTexture(); 251 stub_destroyed_ = true;
206
207 // We are losing the stub owning us, this is our last chance to clean up the
208 // resources we allocated in the stub's context.
209 if (fbo_id_) {
210 glDeleteFramebuffersEXT(1, &fbo_id_);
211 CHECK_GL_ERROR();
212 fbo_id_ = 0;
213 } 252 }
214
215 stub_destroyed_ = true;
216 } 253 }
217 254
218 bool TextureImageTransportSurface::SwapBuffers() { 255 bool TextureImageTransportSurface::SwapBuffers() {
219 DCHECK(backbuffer_suggested_allocation_); 256 DCHECK(backbuffer_suggested_allocation_);
220 if (!frontbuffer_suggested_allocation_) 257 if (!frontbuffer_suggested_allocation_ || !frontbuffer_is_protected_)
221 return true; 258 return true;
259 if (!parent_stub_) {
260 LOG(ERROR) << "SwapBuffers failed because no parent stub.";
261 return false;
262 }
222 263
223 glFlush(); 264 glFlush();
224 ProduceTexture(backbuffer_); 265 front_ = back();
266 previous_damage_rect_ = gfx::Rect(textures_[front()].size);
225 267
226 // Do not allow destruction while we are still waiting for a swap ACK, 268 DCHECK(textures_[front()].client_id != 0);
227 // so we do not leak a texture in the mailbox.
228 AddRef();
229 269
230 DCHECK(backbuffer_.size == current_size_);
231 GpuHostMsg_AcceleratedSurfaceBuffersSwapped_Params params; 270 GpuHostMsg_AcceleratedSurfaceBuffersSwapped_Params params;
232 params.surface_handle = backbuffer_.surface_handle; 271 params.surface_handle = textures_[front()].client_id;
233 params.size = backbuffer_.size; 272 params.size = textures_[front()].size;
273 params.protection_state_id = protection_state_id_;
274 params.skip_ack = false;
234 helper_->SendAcceleratedSurfaceBuffersSwapped(params); 275 helper_->SendAcceleratedSurfaceBuffersSwapped(params);
235 276
236 DCHECK(!is_swap_buffers_pending_); 277 DCHECK(!is_swap_buffers_pending_);
237 is_swap_buffers_pending_ = true; 278 is_swap_buffers_pending_ = true;
238 return true; 279 return true;
239 } 280 }
240 281
241 bool TextureImageTransportSurface::PostSubBuffer( 282 bool TextureImageTransportSurface::PostSubBuffer(
242 int x, int y, int width, int height) { 283 int x, int y, int width, int height) {
243 DCHECK(backbuffer_suggested_allocation_); 284 DCHECK(backbuffer_suggested_allocation_);
244 if (!frontbuffer_suggested_allocation_) 285 DCHECK(textures_[back()].info->service_id());
286 if (!frontbuffer_suggested_allocation_ || !frontbuffer_is_protected_)
245 return true; 287 return true;
288 // If we are recreating the frontbuffer with this swap, make sure we are
289 // drawing a full frame.
290 DCHECK(textures_[front()].info->service_id() ||
291 (!x && !y && gfx::Size(width, height) == textures_[back()].size));
292 if (!parent_stub_) {
293 LOG(ERROR) << "PostSubBuffer failed because no parent stub.";
294 return false;
295 }
296
246 const gfx::Rect new_damage_rect(x, y, width, height); 297 const gfx::Rect new_damage_rect(x, y, width, height);
247 DCHECK(gfx::Rect(gfx::Point(), current_size_).Contains(new_damage_rect));
248 298
249 // An empty damage rect is a successful no-op. 299 // An empty damage rect is a successful no-op.
250 if (new_damage_rect.IsEmpty()) 300 if (new_damage_rect.IsEmpty())
251 return true; 301 return true;
252 302
303 int back_texture_service_id = textures_[back()].info->service_id();
304 int front_texture_service_id = textures_[front()].info->service_id();
305
306 gfx::Size expected_size = textures_[back()].size;
307 bool surfaces_same_size = textures_[front()].size == expected_size;
308
309 if (surfaces_same_size) {
310 std::vector<gfx::Rect> regions_to_copy;
311 GetRegionsToCopy(previous_damage_rect_, new_damage_rect, &regions_to_copy);
312
313 ScopedFrameBufferBinder fbo_binder(fbo_id_);
314 glFramebufferTexture2DEXT(GL_FRAMEBUFFER,
315 GL_COLOR_ATTACHMENT0,
316 GL_TEXTURE_2D,
317 front_texture_service_id,
318 0);
319 ScopedTextureBinder texture_binder(back_texture_service_id);
320
321 for (size_t i = 0; i < regions_to_copy.size(); ++i) {
322 const gfx::Rect& region_to_copy = regions_to_copy[i];
323 if (!region_to_copy.IsEmpty()) {
324 glCopyTexSubImage2D(GL_TEXTURE_2D, 0, region_to_copy.x(),
325 region_to_copy.y(), region_to_copy.x(), region_to_copy.y(),
326 region_to_copy.width(), region_to_copy.height());
327 }
328 }
329 } else if (!surfaces_same_size && did_flip_) {
330 DCHECK(new_damage_rect == gfx::Rect(expected_size));
331 }
332
253 glFlush(); 333 glFlush();
254 ProduceTexture(backbuffer_); 334 front_ = back();
335 previous_damage_rect_ = new_damage_rect;
255 336
256 // Do not allow destruction while we are still waiting for a swap ACK, 337 DCHECK(textures_[front()].client_id);
257 // so we do not leak a texture in the mailbox.
258 AddRef();
259
260 DCHECK(current_size_ == backbuffer_.size);
261 338
262 GpuHostMsg_AcceleratedSurfacePostSubBuffer_Params params; 339 GpuHostMsg_AcceleratedSurfacePostSubBuffer_Params params;
263 params.surface_handle = backbuffer_.surface_handle; 340 params.surface_handle = textures_[front()].client_id;
264 params.surface_size = backbuffer_.size; 341 params.surface_size = textures_[front()].size;
265 params.x = x; 342 params.x = x;
266 params.y = y; 343 params.y = y;
267 params.width = width; 344 params.width = width;
268 params.height = height; 345 params.height = height;
346 params.protection_state_id = protection_state_id_;
269 helper_->SendAcceleratedSurfacePostSubBuffer(params); 347 helper_->SendAcceleratedSurfacePostSubBuffer(params);
270 348
271 DCHECK(!is_swap_buffers_pending_); 349 DCHECK(!is_swap_buffers_pending_);
272 is_swap_buffers_pending_ = true; 350 is_swap_buffers_pending_ = true;
273 return true; 351 return true;
274 } 352 }
275 353
276 std::string TextureImageTransportSurface::GetExtensions() { 354 std::string TextureImageTransportSurface::GetExtensions() {
277 std::string extensions = gfx::GLSurface::GetExtensions(); 355 std::string extensions = gfx::GLSurface::GetExtensions();
278 extensions += extensions.empty() ? "" : " "; 356 extensions += extensions.empty() ? "" : " ";
279 extensions += "GL_CHROMIUM_front_buffer_cached "; 357 extensions += "GL_CHROMIUM_front_buffer_cached ";
280 extensions += "GL_CHROMIUM_post_sub_buffer"; 358 extensions += "GL_CHROMIUM_post_sub_buffer";
281 return extensions; 359 return extensions;
282 } 360 }
283 361
284 gfx::Size TextureImageTransportSurface::GetSize() { 362 gfx::Size TextureImageTransportSurface::GetSize() {
285 gfx::Size size = current_size_; 363 gfx::Size size = textures_[back()].size;
286 364
287 // OSMesa expects a non-zero size. 365 // OSMesa expects a non-zero size.
288 return gfx::Size(size.width() == 0 ? 1 : size.width(), 366 return gfx::Size(size.width() == 0 ? 1 : size.width(),
289 size.height() == 0 ? 1 : size.height()); 367 size.height() == 0 ? 1 : size.height());
290 } 368 }
291 369
292 void* TextureImageTransportSurface::GetHandle() { 370 void* TextureImageTransportSurface::GetHandle() {
293 return surface_.get() ? surface_->GetHandle() : NULL; 371 return surface_.get() ? surface_->GetHandle() : NULL;
294 } 372 }
295 373
296 unsigned TextureImageTransportSurface::GetFormat() { 374 unsigned TextureImageTransportSurface::GetFormat() {
297 return surface_.get() ? surface_->GetFormat() : 0; 375 return surface_.get() ? surface_->GetFormat() : 0;
298 } 376 }
299 377
300 void TextureImageTransportSurface::OnBufferPresented(uint64 surface_handle, 378 void TextureImageTransportSurface::OnSetFrontSurfaceIsProtected(
379 bool is_protected, uint32 protection_state_id) {
380 protection_state_id_ = protection_state_id;
381 if (frontbuffer_is_protected_ == is_protected)
382 return;
383 frontbuffer_is_protected_ = is_protected;
384 AdjustFrontBufferAllocation();
385
386 // If surface is set to protected, and we haven't actually released it yet,
387 // we can set the ui surface handle now just by sending a swap message.
388 if (is_protected && textures_[front()].info->service_id() &&
389 textures_[front()].sent_to_client) {
390 GpuHostMsg_AcceleratedSurfaceBuffersSwapped_Params params;
391 params.surface_handle = textures_[front()].client_id;
392 params.size = textures_[front()].size;
393 params.protection_state_id = protection_state_id_;
394 params.skip_ack = true;
395 helper_->SendAcceleratedSurfaceBuffersSwapped(params);
396 }
397 }
398
399 void TextureImageTransportSurface::OnBufferPresented(bool presented,
301 uint32 sync_point) { 400 uint32 sync_point) {
302 if (sync_point == 0) { 401 if (sync_point == 0) {
303 BufferPresentedImpl(surface_handle); 402 BufferPresentedImpl(presented);
304 } else { 403 } else {
305 helper_->manager()->sync_point_manager()->AddSyncPointCallback( 404 helper_->manager()->sync_point_manager()->AddSyncPointCallback(
306 sync_point, 405 sync_point,
307 base::Bind(&TextureImageTransportSurface::BufferPresentedImpl, 406 base::Bind(&TextureImageTransportSurface::BufferPresentedImpl,
308 this, 407 this->AsWeakPtr(),
309 surface_handle)); 408 presented));
310 } 409 }
311
312 // Careful, we might get deleted now if we were only waiting for
313 // a final swap ACK.
314 Release();
315 } 410 }
316 411
317 void TextureImageTransportSurface::BufferPresentedImpl(uint64 surface_handle) { 412 void TextureImageTransportSurface::BufferPresentedImpl(bool presented) {
318 DCHECK(!backbuffer_.service_id);
319 if (surface_handle) {
320 DCHECK(surface_handle == 1 || surface_handle == 2);
321 backbuffer_.surface_handle = surface_handle;
322 ConsumeTexture(backbuffer_);
323 } else {
324 // We didn't get back a texture, so allocate 'the other' buffer.
325 backbuffer_.surface_handle = (backbuffer_.surface_handle == 1) ? 2 : 1;
326 mailbox_name(backbuffer_.surface_handle) = MailboxName();
327 }
328
329 if (stub_destroyed_ && backbuffer_.service_id) {
330 // TODO(sievers): Remove this after changes to the mailbox to take ownership
331 // of the service ids.
332 DCHECK(context_.get() && surface_.get());
333 if (context_->MakeCurrent(surface_.get()))
334 glDeleteTextures(1, &backbuffer_.service_id);
335
336 return;
337 }
338
339 DCHECK(is_swap_buffers_pending_); 413 DCHECK(is_swap_buffers_pending_);
340 is_swap_buffers_pending_ = false; 414 is_swap_buffers_pending_ = false;
341 415
342 // We should not have allowed the backbuffer to be discarded while the ack 416 if (presented) {
343 // was pending. 417 // If we had not flipped, the two frame damage tracking is inconsistent.
344 DCHECK(backbuffer_suggested_allocation_); 418 // So conservatively take the whole frame.
419 if (!did_flip_)
420 previous_damage_rect_ = gfx::Rect(textures_[front()].size);
421 } else {
422 front_ = back();
423 previous_damage_rect_ = gfx::Rect(0, 0, 0, 0);
424 }
425
426 did_flip_ = presented;
345 427
346 // We're relying on the fact that the parent context is 428 // We're relying on the fact that the parent context is
347 // finished with it's context when it inserts the sync point that 429 // finished with it's context when it inserts the sync point that
348 // triggers this callback. 430 // triggers this callback.
349 if (helper_->MakeCurrent()) { 431 if (helper_->MakeCurrent()) {
350 if (backbuffer_.size != current_size_ || !backbuffer_.service_id) 432 if ((presented && textures_[front()].size != textures_[back()].size) ||
351 CreateBackTexture(); 433 !textures_[back()].info->service_id() ||
352 else 434 !textures_[back()].sent_to_client) {
435 // We may get an ACK from a stale swap just to reschedule. In that case,
436 // we may not have a backbuffer suggestion and should not recreate one.
437 if (backbuffer_suggested_allocation_)
438 CreateBackTexture(textures_[front()].size);
439 } else {
353 AttachBackTextureToFBO(); 440 AttachBackTextureToFBO();
441 }
354 } 442 }
355 443
356 // Even if MakeCurrent fails, schedule anyway, to trigger the lost context 444 // Even if MakeCurrent fails, schedule anyway, to trigger the lost context
357 // logic. 445 // logic.
358 if (did_unschedule_) { 446 if (did_unschedule_) {
359 did_unschedule_ = false; 447 did_unschedule_ = false;
360 helper_->SetScheduled(true); 448 helper_->SetScheduled(true);
361 } 449 }
362 } 450 }
363 451
364 void TextureImageTransportSurface::OnResizeViewACK() { 452 void TextureImageTransportSurface::OnResizeViewACK() {
365 NOTREACHED(); 453 NOTREACHED();
366 } 454 }
367 455
368 void TextureImageTransportSurface::ReleaseBackTexture() { 456 void TextureImageTransportSurface::ReleaseTexture(int id) {
369 if (!backbuffer_.service_id) 457 if (!parent_stub_)
370 return; 458 return;
459 Texture& texture = textures_[id];
460 TextureInfo* info = texture.info;
461 DCHECK(info);
371 462
372 glDeleteTextures(1, &backbuffer_.service_id); 463 GLuint service_id = info->service_id();
373 backbuffer_.service_id = 0; 464 if (!service_id)
374 mailbox_name(backbuffer_.surface_handle) = MailboxName(); 465 return;
466 info->SetServiceId(0);
467
468 {
469 ScopedFrameBufferBinder fbo_binder(fbo_id_);
470 glDeleteTextures(1, &service_id);
471 }
375 glFlush(); 472 glFlush();
376 CHECK_GL_ERROR(); 473 CHECK_GL_ERROR();
377 } 474 }
378 475
379 void TextureImageTransportSurface::CreateBackTexture() { 476 void TextureImageTransportSurface::CreateBackTexture(const gfx::Size& size) {
380 // If |is_swap_buffers_pending| we are waiting for our backbuffer 477 if (!parent_stub_)
381 // in the mailbox, so we shouldn't be reallocating it now. 478 return;
382 DCHECK(!is_swap_buffers_pending_); 479 Texture& texture = textures_[back()];
480 TextureInfo* info = texture.info;
481 DCHECK(info);
383 482
384 if (backbuffer_.service_id && backbuffer_.size == current_size_) 483 GLuint service_id = info->service_id();
484
485 if (service_id && texture.size == size && texture.sent_to_client)
385 return; 486 return;
386 487
387 if (!backbuffer_.service_id) { 488 if (!service_id) {
388 MailboxName new_mailbox_name; 489 glGenTextures(1, &service_id);
389 MailboxName& name = mailbox_name(backbuffer_.surface_handle); 490 info->SetServiceId(service_id);
390 // This slot should be uninitialized.
391 DCHECK(!memcmp(&name, &new_mailbox_name, sizeof(MailboxName)));
392 mailbox_manager_->GenerateMailboxName(&new_mailbox_name);
393 name = new_mailbox_name;
394 glGenTextures(1, &backbuffer_.service_id);
395 } 491 }
396 492
397 backbuffer_.size = current_size_; 493 if (size != texture.size) {
494 texture.size = size;
495 TextureManager* texture_manager =
496 parent_stub_->decoder()->GetContextGroup()->texture_manager();
497 texture_manager->SetLevelInfo(
498 info,
499 GL_TEXTURE_2D,
500 0,
501 GL_RGBA,
502 size.width(),
503 size.height(),
504 1,
505 0,
506 GL_RGBA,
507 GL_UNSIGNED_BYTE,
508 true);
509 }
398 510
399 { 511 {
400 ScopedTextureBinder texture_binder(backbuffer_.service_id); 512 ScopedTextureBinder texture_binder(service_id);
401 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); 513 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
402 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); 514 glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
403 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 515 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
404 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 516 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
405 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 517 glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
406 current_size_.width(), current_size_.height(), 0, 518 size.width(), size.height(), 0,
407 GL_RGBA, GL_UNSIGNED_BYTE, NULL); 519 GL_RGBA, GL_UNSIGNED_BYTE, NULL);
408 CHECK_GL_ERROR(); 520 CHECK_GL_ERROR();
409 } 521 }
410 522
411 AttachBackTextureToFBO(); 523 AttachBackTextureToFBO();
412 524
413 const MailboxName& name = mailbox_name(backbuffer_.surface_handle);
414
415 GpuHostMsg_AcceleratedSurfaceNew_Params params; 525 GpuHostMsg_AcceleratedSurfaceNew_Params params;
416 params.width = current_size_.width(); 526 params.width = size.width();
417 params.height = current_size_.height(); 527 params.height = size.height();
418 params.surface_handle = backbuffer_.surface_handle; 528 params.surface_handle = texture.client_id;
419 params.mailbox_name.append(
420 reinterpret_cast<const char*>(&name), sizeof(name));
421 helper_->SendAcceleratedSurfaceNew(params); 529 helper_->SendAcceleratedSurfaceNew(params);
530 texture.sent_to_client = true;
422 } 531 }
423 532
424 void TextureImageTransportSurface::AttachBackTextureToFBO() { 533 void TextureImageTransportSurface::AttachBackTextureToFBO() {
425 DCHECK(backbuffer_.service_id); 534 if (!parent_stub_)
535 return;
536 TextureInfo* info = textures_[back()].info;
537 DCHECK(info);
538
426 ScopedFrameBufferBinder fbo_binder(fbo_id_); 539 ScopedFrameBufferBinder fbo_binder(fbo_id_);
427 glFramebufferTexture2DEXT(GL_FRAMEBUFFER, 540 glFramebufferTexture2DEXT(GL_FRAMEBUFFER,
428 GL_COLOR_ATTACHMENT0, 541 GL_COLOR_ATTACHMENT0,
429 GL_TEXTURE_2D, 542 GL_TEXTURE_2D,
430 backbuffer_.service_id, 543 info->service_id(),
431 0); 544 0);
432 glFlush(); 545 glFlush();
433 CHECK_GL_ERROR(); 546 CHECK_GL_ERROR();
434 547
435 #ifndef NDEBUG 548 #ifndef NDEBUG
436 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER); 549 GLenum status = glCheckFramebufferStatusEXT(GL_FRAMEBUFFER);
437 if (status != GL_FRAMEBUFFER_COMPLETE) { 550 if (status != GL_FRAMEBUFFER_COMPLETE) {
438 DLOG(FATAL) << "Framebuffer incomplete: " << status; 551 DLOG(ERROR) << "Framebuffer incomplete.";
439 } 552 }
440 #endif 553 #endif
441 } 554 }
442 555
443 void TextureImageTransportSurface::ConsumeTexture(Texture& texture) { 556 void TextureImageTransportSurface::ReleaseParentStub() {
444 DCHECK(!texture.service_id); 557 DCHECK(parent_stub_);
445 DCHECK(texture.surface_handle == 1 || texture.surface_handle == 2); 558 parent_stub_->RemoveDestructionObserver(this);
446 559 for (int i = 0; i < 2; ++i) {
447 scoped_ptr<TextureDefinition> definition(mailbox_manager_->ConsumeTexture( 560 Texture& texture = textures_[i];
448 GL_TEXTURE_2D, mailbox_name(texture.surface_handle))); 561 texture.info = NULL;
449 if (definition.get()) { 562 if (!texture.sent_to_client)
450 texture.service_id = definition->ReleaseServiceId(); 563 continue;
451 texture.size = gfx::Size(definition->level_infos()[0][0].width, 564 GpuHostMsg_AcceleratedSurfaceRelease_Params params;
452 definition->level_infos()[0][0].height); 565 params.identifier = texture.client_id;
566 helper_->SendAcceleratedSurfaceRelease(params);
453 } 567 }
454 } 568 parent_stub_ = NULL;
455
456 void TextureImageTransportSurface::ProduceTexture(Texture& texture) {
457 DCHECK(texture.service_id);
458 DCHECK(texture.surface_handle == 1 || texture.surface_handle == 2);
459 TextureManager* texture_manager =
460 helper_->stub()->decoder()->GetContextGroup()->texture_manager();
461 DCHECK(texture.size.width() > 0 && texture.size.height() > 0);
462 TextureDefinition::LevelInfo info(
463 GL_TEXTURE_2D, GL_RGBA, texture.size.width(), texture.size.height(), 1,
464 0, GL_RGBA, GL_UNSIGNED_BYTE, true);
465
466 TextureDefinition::LevelInfos level_infos;
467 level_infos.resize(1);
468 level_infos[0].resize(texture_manager->MaxLevelsForTarget(GL_TEXTURE_2D));
469 level_infos[0][0] = info;
470 scoped_ptr<TextureDefinition> definition(new TextureDefinition(
471 GL_TEXTURE_2D,
472 texture.service_id,
473 GL_LINEAR,
474 GL_LINEAR,
475 GL_CLAMP_TO_EDGE,
476 GL_CLAMP_TO_EDGE,
477 GL_NONE,
478 true,
479 level_infos));
480 // Pass NULL as |owner| here to avoid errors from glConsumeTextureCHROMIUM()
481 // when the renderer context group goes away before the RWHV handles a pending
482 // ACK. We avoid leaking a texture in the mailbox by waiting for the final ACK
483 // at which point we consume the correct texture back.
484 mailbox_manager_->ProduceTexture(
485 GL_TEXTURE_2D,
486 mailbox_name(texture.surface_handle),
487 definition.release(),
488 NULL);
489 texture.service_id = 0;
490 } 569 }
491 570
492 } // namespace content 571 } // namespace content
OLDNEW
« no previous file with comments | « content/common/gpu/texture_image_transport_surface.h ('k') | content/port/browser/render_widget_host_view_port.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698