OLD | NEW |
1 // Copyright 2012 The Chromium Authors. All rights reserved. | 1 // Copyright 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/layers/delegated_renderer_layer_impl.h" | 5 #include "cc/layers/delegated_renderer_layer_impl.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
144 // Save the new frame's resources, but don't give them to the ResourceProvider | 144 // Save the new frame's resources, but don't give them to the ResourceProvider |
145 // until they are active, since the resources on the active tree will still be | 145 // until they are active, since the resources on the active tree will still be |
146 // used and we don't want to return them early. | 146 // used and we don't want to return them early. |
147 resources_.swap(resources_in_frame); | 147 resources_.swap(resources_in_frame); |
148 TakeOwnershipOfResourcesIfOnActiveTree(resources_); | 148 TakeOwnershipOfResourcesIfOnActiveTree(resources_); |
149 | 149 |
150 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor; | 150 inverse_device_scale_factor_ = 1.0f / frame_data->device_scale_factor; |
151 // Display size is already set so we can compute what the damage rect | 151 // Display size is already set so we can compute what the damage rect |
152 // will be in layer space. The damage may exceed the visible portion of | 152 // will be in layer space. The damage may exceed the visible portion of |
153 // the frame, so intersect the damage to the layer's bounds. | 153 // the frame, so intersect the damage to the layer's bounds. |
154 RenderPass* new_root_pass = render_pass_list.back(); | 154 RenderPass* new_root_pass = render_pass_list.back().get(); |
155 gfx::Size frame_size = new_root_pass->output_rect.size(); | 155 gfx::Size frame_size = new_root_pass->output_rect.size(); |
156 gfx::Rect damage_in_layer = | 156 gfx::Rect damage_in_layer = |
157 gfx::ScaleToEnclosingRect(damage_in_frame, inverse_device_scale_factor_); | 157 gfx::ScaleToEnclosingRect(damage_in_frame, inverse_device_scale_factor_); |
158 SetUpdateRect(gfx::IntersectRects( | 158 SetUpdateRect(gfx::IntersectRects( |
159 gfx::UnionRects(update_rect(), damage_in_layer), gfx::Rect(bounds()))); | 159 gfx::UnionRects(update_rect(), damage_in_layer), gfx::Rect(bounds()))); |
160 | 160 |
161 SetRenderPasses(&render_pass_list); | 161 SetRenderPasses(&render_pass_list); |
162 have_render_passes_to_push_ = true; | 162 have_render_passes_to_push_ = true; |
163 } | 163 } |
164 | 164 |
165 void DelegatedRendererLayerImpl::TakeOwnershipOfResourcesIfOnActiveTree( | 165 void DelegatedRendererLayerImpl::TakeOwnershipOfResourcesIfOnActiveTree( |
166 const ResourceProvider::ResourceIdSet& resources) { | 166 const ResourceProvider::ResourceIdSet& resources) { |
167 DCHECK(child_id_); | 167 DCHECK(child_id_); |
168 if (!layer_tree_impl()->IsActiveTree()) | 168 if (!layer_tree_impl()->IsActiveTree()) |
169 return; | 169 return; |
170 layer_tree_impl()->resource_provider()->DeclareUsedResourcesFromChild( | 170 layer_tree_impl()->resource_provider()->DeclareUsedResourcesFromChild( |
171 child_id_, resources); | 171 child_id_, resources); |
172 } | 172 } |
173 | 173 |
174 void DelegatedRendererLayerImpl::SetRenderPasses( | 174 void DelegatedRendererLayerImpl::SetRenderPasses( |
175 RenderPassList* render_passes_in_draw_order) { | 175 RenderPassList* render_passes_in_draw_order) { |
176 ClearRenderPasses(); | 176 ClearRenderPasses(); |
177 | 177 |
178 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) { | 178 for (size_t i = 0; i < render_passes_in_draw_order->size(); ++i) { |
179 RenderPassList::iterator to_take = | 179 RenderPassList::iterator to_take = |
180 render_passes_in_draw_order->begin() + i; | 180 render_passes_in_draw_order->begin() + i; |
181 render_passes_index_by_id_.insert( | 181 render_passes_index_by_id_.insert( |
182 RenderPassToIndexMap::value_type((*to_take)->id, i)); | 182 RenderPassToIndexMap::value_type((*to_take)->id, i)); |
183 scoped_ptr<RenderPass> taken_render_pass = | 183 render_passes_in_draw_order_.push_back(to_take->Pass()); |
184 render_passes_in_draw_order->take(to_take); | |
185 render_passes_in_draw_order_.push_back(taken_render_pass.Pass()); | |
186 } | 184 } |
187 | 185 |
188 // Give back an empty array instead of nulls. | 186 // Give back an empty array instead of nulls. |
189 render_passes_in_draw_order->clear(); | 187 render_passes_in_draw_order->clear(); |
190 | 188 |
191 // The render passes given here become part of the RenderSurfaceLayerList, so | 189 // The render passes given here become part of the RenderSurfaceLayerList, so |
192 // changing them requires recomputing the RenderSurfaceLayerList. | 190 // changing them requires recomputing the RenderSurfaceLayerList. |
193 layer_tree_impl()->set_needs_update_draw_properties(); | 191 layer_tree_impl()->set_needs_update_draw_properties(); |
194 } | 192 } |
195 | 193 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
239 *output_render_pass_id = | 237 *output_render_pass_id = |
240 RenderPassId(id(), IndexToId(delegated_render_pass_index)); | 238 RenderPassId(id(), IndexToId(delegated_render_pass_index)); |
241 return true; | 239 return true; |
242 } | 240 } |
243 | 241 |
244 void DelegatedRendererLayerImpl::AppendContributingRenderPasses( | 242 void DelegatedRendererLayerImpl::AppendContributingRenderPasses( |
245 RenderPassSink* render_pass_sink) { | 243 RenderPassSink* render_pass_sink) { |
246 DCHECK(HasContributingDelegatedRenderPasses()); | 244 DCHECK(HasContributingDelegatedRenderPasses()); |
247 | 245 |
248 const RenderPass* root_delegated_render_pass = | 246 const RenderPass* root_delegated_render_pass = |
249 render_passes_in_draw_order_.back(); | 247 render_passes_in_draw_order_.back().get(); |
250 gfx::Size frame_size = root_delegated_render_pass->output_rect.size(); | 248 gfx::Size frame_size = root_delegated_render_pass->output_rect.size(); |
251 gfx::Transform delegated_frame_to_root_transform = screen_space_transform(); | 249 gfx::Transform delegated_frame_to_root_transform = screen_space_transform(); |
252 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_, | 250 delegated_frame_to_root_transform.Scale(inverse_device_scale_factor_, |
253 inverse_device_scale_factor_); | 251 inverse_device_scale_factor_); |
254 | 252 |
255 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) { | 253 for (size_t i = 0; i < render_passes_in_draw_order_.size() - 1; ++i) { |
256 RenderPassId output_render_pass_id; | 254 RenderPassId output_render_pass_id; |
257 bool present = | 255 bool present = |
258 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id, | 256 ConvertDelegatedRenderPassId(render_passes_in_draw_order_[i]->id, |
259 &output_render_pass_id); | 257 &output_render_pass_id); |
(...skipping 23 matching lines...) Expand all Loading... |
283 AppendQuadsData* append_quads_data) { | 281 AppendQuadsData* append_quads_data) { |
284 AppendRainbowDebugBorder(render_pass); | 282 AppendRainbowDebugBorder(render_pass); |
285 | 283 |
286 // This list will be empty after a lost context until a new frame arrives. | 284 // This list will be empty after a lost context until a new frame arrives. |
287 if (render_passes_in_draw_order_.empty()) | 285 if (render_passes_in_draw_order_.empty()) |
288 return; | 286 return; |
289 | 287 |
290 RenderPassId target_render_pass_id = render_pass->id; | 288 RenderPassId target_render_pass_id = render_pass->id; |
291 | 289 |
292 const RenderPass* root_delegated_render_pass = | 290 const RenderPass* root_delegated_render_pass = |
293 render_passes_in_draw_order_.back(); | 291 render_passes_in_draw_order_.back().get(); |
294 | 292 |
295 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin()); | 293 DCHECK(root_delegated_render_pass->output_rect.origin().IsOrigin()); |
296 gfx::Size frame_size = root_delegated_render_pass->output_rect.size(); | 294 gfx::Size frame_size = root_delegated_render_pass->output_rect.size(); |
297 | 295 |
298 // If the index of the RenderPassId is 0, then it is a RenderPass generated | 296 // If the index of the RenderPassId is 0, then it is a RenderPass generated |
299 // for a layer in this compositor, not the delegating renderer. Then we want | 297 // for a layer in this compositor, not the delegating renderer. Then we want |
300 // to merge our root RenderPass with the target RenderPass. Otherwise, it is | 298 // to merge our root RenderPass with the target RenderPass. Otherwise, it is |
301 // some RenderPass which we added from the delegating renderer. | 299 // some RenderPass which we added from the delegating renderer. |
302 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index; | 300 bool should_merge_root_render_pass_with_target = !target_render_pass_id.index; |
303 if (should_merge_root_render_pass_with_target) { | 301 if (should_merge_root_render_pass_with_target) { |
304 // Verify that the RenderPass we are appending to is created by our | 302 // Verify that the RenderPass we are appending to is created by our |
305 // render_target. | 303 // render_target. |
306 DCHECK(target_render_pass_id.layer_id == render_target()->id()); | 304 DCHECK(target_render_pass_id.layer_id == render_target()->id()); |
307 | 305 |
308 AppendRenderPassQuads(render_pass, | 306 AppendRenderPassQuads(render_pass, |
309 root_delegated_render_pass, | 307 root_delegated_render_pass, |
310 frame_size); | 308 frame_size); |
311 } else { | 309 } else { |
312 // Verify that the RenderPass we are appending to was created by us. | 310 // Verify that the RenderPass we are appending to was created by us. |
313 DCHECK(target_render_pass_id.layer_id == id()); | 311 DCHECK(target_render_pass_id.layer_id == id()); |
314 | 312 |
315 size_t render_pass_index = IdToIndex(target_render_pass_id.index); | 313 size_t render_pass_index = IdToIndex(target_render_pass_id.index); |
316 const RenderPass* delegated_render_pass = | 314 const RenderPass* delegated_render_pass = |
317 render_passes_in_draw_order_[render_pass_index]; | 315 render_passes_in_draw_order_[render_pass_index].get(); |
318 AppendRenderPassQuads(render_pass, | 316 AppendRenderPassQuads(render_pass, |
319 delegated_render_pass, | 317 delegated_render_pass, |
320 frame_size); | 318 frame_size); |
321 } | 319 } |
322 } | 320 } |
323 | 321 |
324 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder( | 322 void DelegatedRendererLayerImpl::AppendRainbowDebugBorder( |
325 RenderPass* render_pass) { | 323 RenderPass* render_pass) { |
326 if (!ShowDebugBorders()) | 324 if (!ShowDebugBorders()) |
327 return; | 325 return; |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
413 RenderPass* render_pass, | 411 RenderPass* render_pass, |
414 const RenderPass* delegated_render_pass, | 412 const RenderPass* delegated_render_pass, |
415 const gfx::Size& frame_size) const { | 413 const gfx::Size& frame_size) const { |
416 const SharedQuadState* delegated_shared_quad_state = nullptr; | 414 const SharedQuadState* delegated_shared_quad_state = nullptr; |
417 SharedQuadState* output_shared_quad_state = nullptr; | 415 SharedQuadState* output_shared_quad_state = nullptr; |
418 | 416 |
419 gfx::Transform delegated_frame_to_target_transform = draw_transform(); | 417 gfx::Transform delegated_frame_to_target_transform = draw_transform(); |
420 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_, | 418 delegated_frame_to_target_transform.Scale(inverse_device_scale_factor_, |
421 inverse_device_scale_factor_); | 419 inverse_device_scale_factor_); |
422 bool is_root_delegated_render_pass = | 420 bool is_root_delegated_render_pass = |
423 delegated_render_pass == render_passes_in_draw_order_.back(); | 421 delegated_render_pass == render_passes_in_draw_order_.back().get(); |
424 for (const auto& delegated_quad : delegated_render_pass->quad_list) { | 422 for (const auto& delegated_quad : delegated_render_pass->quad_list) { |
425 if (delegated_quad->shared_quad_state != delegated_shared_quad_state) { | 423 if (delegated_quad->shared_quad_state != delegated_shared_quad_state) { |
426 delegated_shared_quad_state = delegated_quad->shared_quad_state; | 424 delegated_shared_quad_state = delegated_quad->shared_quad_state; |
427 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState(); | 425 output_shared_quad_state = render_pass->CreateAndAppendSharedQuadState(); |
428 output_shared_quad_state->CopyFrom(delegated_shared_quad_state); | 426 output_shared_quad_state->CopyFrom(delegated_shared_quad_state); |
429 | 427 |
430 if (is_root_delegated_render_pass) { | 428 if (is_root_delegated_render_pass) { |
431 output_shared_quad_state->quad_to_target_transform.ConcatTransform( | 429 output_shared_quad_state->quad_to_target_transform.ConcatTransform( |
432 delegated_frame_to_target_transform); | 430 delegated_frame_to_target_transform); |
433 | 431 |
(...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
514 if (own_child_id_) { | 512 if (own_child_id_) { |
515 ResourceProvider* provider = layer_tree_impl()->resource_provider(); | 513 ResourceProvider* provider = layer_tree_impl()->resource_provider(); |
516 provider->DestroyChild(child_id_); | 514 provider->DestroyChild(child_id_); |
517 } | 515 } |
518 | 516 |
519 resources_.clear(); | 517 resources_.clear(); |
520 child_id_ = 0; | 518 child_id_ = 0; |
521 } | 519 } |
522 | 520 |
523 } // namespace cc | 521 } // namespace cc |
OLD | NEW |