Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(451)

Side by Side Diff: runtime/vm/heap.cc

Issue 1212943010: Safer interface for heap iteration. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/heap.h" 5 #include "vm/heap.h"
6 6
7 #include "platform/assert.h" 7 #include "platform/assert.h"
8 #include "platform/utils.h" 8 #include "platform/utils.h"
9 #include "vm/flags.h" 9 #include "vm/flags.h"
10 #include "vm/isolate.h" 10 #include "vm/isolate.h"
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after
212 return old_space_->Contains(addr, HeapPage::kExecutable); 212 return old_space_->Contains(addr, HeapPage::kExecutable);
213 } 213 }
214 214
215 215
216 void Heap::VisitObjects(ObjectVisitor* visitor) const { 216 void Heap::VisitObjects(ObjectVisitor* visitor) const {
217 new_space_->VisitObjects(visitor); 217 new_space_->VisitObjects(visitor);
218 old_space_->VisitObjects(visitor); 218 old_space_->VisitObjects(visitor);
219 } 219 }
220 220
221 221
222 HeapIterationScope::HeapIterationScope()
223 : StackResource(Thread::Current()->isolate()),
224 old_space_(isolate()->heap()->old_space()) {
225 // It's not yet safe to iterate over a paged space while it's concurrently
226 // sweeping, so wait for any such task to complete first.
227 MonitorLocker ml(old_space_->tasks_lock());
228 while (old_space_->tasks() > 0) {
229 ml.Wait();
230 }
231 old_space_->set_tasks(1);
siva 2015/07/07 18:10:24 How do we ensure that people don't end up with nes
koda 2015/07/07 22:46:39 Done.
232 }
233
234
235 HeapIterationScope::~HeapIterationScope() {
236 old_space_->set_tasks(0);
siva 2015/07/07 18:10:24 Doesn't this need to be set under a lock as well?
koda 2015/07/07 22:46:39 Done.
237 }
238
239
240 void Heap::IterateObjects(ObjectVisitor* visitor) const {
241 // The visitor must not allocate from the heap.
242 NoSafepointScope no_safepoint_scope_;
243 new_space_->VisitObjects(visitor);
244 IterateOldObjects(visitor);
245 }
246
247
248 void Heap::IterateOldObjects(ObjectVisitor* visitor) const {
249 HeapIterationScope heap_iteration_scope;
250 old_space_->VisitObjects(visitor);
251 }
252
253
222 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const { 254 void Heap::VisitObjectPointers(ObjectPointerVisitor* visitor) const {
223 new_space_->VisitObjectPointers(visitor); 255 new_space_->VisitObjectPointers(visitor);
224 old_space_->VisitObjectPointers(visitor); 256 old_space_->VisitObjectPointers(visitor);
225 } 257 }
226 258
227 259
228 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const { 260 RawInstructions* Heap::FindObjectInCodeSpace(FindObjectVisitor* visitor) const {
229 // Only executable pages can have RawInstructions objects. 261 // Only executable pages can have RawInstructions objects.
230 RawObject* raw_obj = old_space_->FindObject(visitor, HeapPage::kExecutable); 262 RawObject* raw_obj = old_space_->FindObject(visitor, HeapPage::kExecutable);
231 ASSERT((raw_obj == Object::null()) || 263 ASSERT((raw_obj == Object::null()) ||
232 (raw_obj->GetClassId() == kInstructionsCid)); 264 (raw_obj->GetClassId() == kInstructionsCid));
233 return reinterpret_cast<RawInstructions*>(raw_obj); 265 return reinterpret_cast<RawInstructions*>(raw_obj);
234 } 266 }
235 267
236 268
237 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const { 269 RawObject* Heap::FindOldObject(FindObjectVisitor* visitor) const {
238 // Wait for any concurrent GC tasks to finish before walking. 270 HeapIterationScope heap_iteration_scope;
239 MonitorLocker ml(old_space_->tasks_lock());
240 while (old_space_->tasks() > 0) {
241 ml.Wait();
242 }
243 return old_space_->FindObject(visitor, HeapPage::kData); 271 return old_space_->FindObject(visitor, HeapPage::kData);
244 } 272 }
245 273
246 274
247 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const { 275 RawObject* Heap::FindNewObject(FindObjectVisitor* visitor) const {
248 return new_space_->FindObject(visitor); 276 return new_space_->FindObject(visitor);
249 } 277 }
250 278
251 279
252 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const { 280 RawObject* Heap::FindObject(FindObjectVisitor* visitor) const {
253 ASSERT(isolate()->no_safepoint_scope_depth() != 0); 281 // The visitor must not allocate from the heap.
282 NoSafepointScope no_safepoint_scope;
254 RawObject* raw_obj = FindNewObject(visitor); 283 RawObject* raw_obj = FindNewObject(visitor);
255 if (raw_obj != Object::null()) { 284 if (raw_obj != Object::null()) {
256 return raw_obj; 285 return raw_obj;
257 } 286 }
258 raw_obj = FindOldObject(visitor); 287 raw_obj = FindOldObject(visitor);
259 if (raw_obj != Object::null()) { 288 if (raw_obj != Object::null()) {
260 return raw_obj; 289 return raw_obj;
261 } 290 }
262 raw_obj = FindObjectInCodeSpace(visitor); 291 raw_obj = FindObjectInCodeSpace(visitor);
263 return raw_obj; 292 return raw_obj;
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after
482 // VM isolate heap is premarked. 511 // VM isolate heap is premarked.
483 VerifyObjectVisitor vm_object_visitor( 512 VerifyObjectVisitor vm_object_visitor(
484 isolate(), allocated_set, kRequireMarked); 513 isolate(), allocated_set, kRequireMarked);
485 vm_isolate->heap()->VisitObjects(&vm_object_visitor); 514 vm_isolate->heap()->VisitObjects(&vm_object_visitor);
486 } 515 }
487 return allocated_set; 516 return allocated_set;
488 } 517 }
489 518
490 519
491 bool Heap::Verify(MarkExpectation mark_expectation) const { 520 bool Heap::Verify(MarkExpectation mark_expectation) const {
521 HeapIterationScope heap_iteration_scope;
522 return VerifyGC(mark_expectation);
523 }
524
525
526 bool Heap::VerifyGC(MarkExpectation mark_expectation) const {
492 ObjectSet* allocated_set = CreateAllocatedObjectSet(mark_expectation); 527 ObjectSet* allocated_set = CreateAllocatedObjectSet(mark_expectation);
493 VerifyPointersVisitor visitor(isolate(), allocated_set); 528 VerifyPointersVisitor visitor(isolate(), allocated_set);
494 VisitObjectPointers(&visitor); 529 VisitObjectPointers(&visitor);
495 delete allocated_set; 530 delete allocated_set;
496 // Only returning a value so that Heap::Validate can be called from an ASSERT. 531 // Only returning a value so that Heap::Validate can be called from an ASSERT.
497 return true; 532 return true;
498 } 533 }
499 534
500 535
501 void Heap::PrintSizes() const { 536 void Heap::PrintSizes() const {
(...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after
714 heap->DisableGrowthControl(); 749 heap->DisableGrowthControl();
715 } 750 }
716 751
717 752
718 NoHeapGrowthControlScope::~NoHeapGrowthControlScope() { 753 NoHeapGrowthControlScope::~NoHeapGrowthControlScope() {
719 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap(); 754 Heap* heap = reinterpret_cast<Isolate*>(isolate())->heap();
720 heap->SetGrowthControlState(current_growth_controller_state_); 755 heap->SetGrowthControlState(current_growth_controller_state_);
721 } 756 }
722 757
723 } // namespace dart 758 } // namespace dart
OLDNEW
« runtime/vm/heap.h ('K') | « runtime/vm/heap.h ('k') | runtime/vm/isolate.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698