Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(127)

Side by Side Diff: src/heap.cc

Issue 7044082: Minor cleanup of StoreBuffer related heap iteration methods. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 974 matching lines...) Expand 10 before | Expand all | Expand 10 after
985 current_page_->set_scan_on_scavenge(true); 985 current_page_->set_scan_on_scavenge(true);
986 ASSERT(start_of_current_page_ != store_buffer_->Top()); 986 ASSERT(start_of_current_page_ != store_buffer_->Top());
987 store_buffer_->SetTop(start_of_current_page_); 987 store_buffer_->SetTop(start_of_current_page_);
988 } 988 }
989 } else { 989 } else {
990 UNREACHABLE(); 990 UNREACHABLE();
991 } 991 }
992 } 992 }
993 993
994 994
995 static void ScavengeObjectAndMarkSlot(HeapObject** p, HeapObject* object) {
996 Heap::ScavengeObject(p, object);
997
998 // TODO(gc) ISOLATES MERGE
999 if (HEAP->InNewSpace(*p)) {
1000 ASSERT(HEAP->InToSpace(*p));
1001 HEAP->store_buffer()->EnterDirectlyIntoStoreBuffer(
1002 reinterpret_cast<Address>(p));
1003 }
1004 }
1005
1006
1007 void Heap::Scavenge() { 995 void Heap::Scavenge() {
1008 #ifdef DEBUG 996 #ifdef DEBUG
1009 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); 997 if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
1010 #endif 998 #endif
1011 999
1012 gc_state_ = SCAVENGE; 1000 gc_state_ = SCAVENGE;
1013 1001
1014 // Implements Cheney's copying algorithm 1002 // Implements Cheney's copying algorithm
1015 LOG(isolate_, ResourceEvent("scavenge", "begin")); 1003 LOG(isolate_, ResourceEvent("scavenge", "begin"));
1016 1004
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
1060 1048
1061 ScavengeVisitor scavenge_visitor(this); 1049 ScavengeVisitor scavenge_visitor(this);
1062 // Copy roots. 1050 // Copy roots.
1063 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE); 1051 IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1064 1052
1065 // Copy objects reachable from the old generation. 1053 // Copy objects reachable from the old generation.
1066 { 1054 {
1067 StoreBufferRebuildScope scope(this, 1055 StoreBufferRebuildScope scope(this,
1068 store_buffer(), 1056 store_buffer(),
1069 &ScavengeStoreBufferCallback); 1057 &ScavengeStoreBufferCallback);
1070 store_buffer()->IteratePointersToNewSpace(&ScavengeObjectAndMarkSlot); 1058 store_buffer()->IteratePointersToNewSpace(&ScavengeObject);
1071 } 1059 }
1072 1060
1073 // Copy objects reachable from cells by scavenging cell values directly. 1061 // Copy objects reachable from cells by scavenging cell values directly.
1074 HeapObjectIterator cell_iterator(cell_space_); 1062 HeapObjectIterator cell_iterator(cell_space_);
1075 for (HeapObject* cell = cell_iterator.Next(); 1063 for (HeapObject* cell = cell_iterator.Next();
1076 cell != NULL; cell = cell_iterator.Next()) { 1064 cell != NULL; cell = cell_iterator.Next()) {
1077 if (cell->IsJSGlobalPropertyCell()) { 1065 if (cell->IsJSGlobalPropertyCell()) {
1078 Address value_address = 1066 Address value_address =
1079 reinterpret_cast<Address>(cell) + 1067 reinterpret_cast<Address>(cell) +
1080 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); 1068 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
(...skipping 3063 matching lines...) Expand 10 before | Expand all | Expand 10 after
4144 return cell_space_->Contains(addr); 4132 return cell_space_->Contains(addr);
4145 case LO_SPACE: 4133 case LO_SPACE:
4146 return lo_space_->SlowContains(addr); 4134 return lo_space_->SlowContains(addr);
4147 } 4135 }
4148 4136
4149 return false; 4137 return false;
4150 } 4138 }
4151 4139
4152 4140
4153 #ifdef DEBUG 4141 #ifdef DEBUG
4154 static void DummyScavengePointer(HeapObject** p, HeapObject* o) {
4155 // When we are not in GC the Heap::InNewSpace() predicate
4156 // checks that pointers which satisfy predicate point into
4157 // the active semispace.
4158 // TODO(gc) ISOLATES MERGE
4159 HEAP->InNewSpace(*p);
4160 }
4161
4162
4163 static void VerifyPointers(
4164 PagedSpace* space,
4165 PointerRegionCallback visit_pointer_region) {
4166 PageIterator it(space);
4167
4168 while (it.has_next()) {
4169 Page* page = it.next();
4170 HEAP->IteratePointersOnPage(reinterpret_cast<PagedSpace*>(page->owner()),
4171 visit_pointer_region,
4172 &DummyScavengePointer,
4173 page);
4174 }
4175 }
4176
4177
4178 static void VerifyPointers(LargeObjectSpace* space) {
4179 LargeObjectIterator it(space);
4180 for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
4181 if (object->IsFixedArray()) {
4182 Address slot_address = object->address();
4183 Address end = object->address() + object->Size();
4184
4185 while (slot_address < end) {
4186 HeapObject** slot = reinterpret_cast<HeapObject**>(slot_address);
4187 // When we are not in GC the Heap::InNewSpace() predicate
4188 // checks that pointers which satisfy predicate point into
4189 // the active semispace.
4190 HEAP->InNewSpace(*slot);
4191 slot_address += kPointerSize;
4192 }
4193 }
4194 }
4195 }
4196
4197
4198 void Heap::Verify() { 4142 void Heap::Verify() {
4199 ASSERT(HasBeenSetup()); 4143 ASSERT(HasBeenSetup());
4200 4144
4201 store_buffer()->Verify(); 4145 store_buffer()->Verify();
4202 4146
4203 VerifyPointersVisitor visitor; 4147 VerifyPointersVisitor visitor;
4204 IterateRoots(&visitor, VISIT_ONLY_STRONG); 4148 IterateRoots(&visitor, VISIT_ONLY_STRONG);
4205 4149
4206 new_space_.Verify(); 4150 new_space_.Verify();
4207 4151
4208 old_pointer_space_->Verify(&visitor); 4152 old_pointer_space_->Verify(&visitor);
4209 map_space_->Verify(&visitor); 4153 map_space_->Verify(&visitor);
4210 4154
4211 VerifyPointers(old_pointer_space_, &IteratePointersToNewSpace);
4212 VerifyPointers(map_space_, &IteratePointersFromMapsToNewSpace);
4213 VerifyPointers(lo_space_);
4214
4215 VerifyPointersVisitor no_dirty_regions_visitor; 4155 VerifyPointersVisitor no_dirty_regions_visitor;
4216 old_data_space_->Verify(&no_dirty_regions_visitor); 4156 old_data_space_->Verify(&no_dirty_regions_visitor);
4217 code_space_->Verify(&no_dirty_regions_visitor); 4157 code_space_->Verify(&no_dirty_regions_visitor);
4218 cell_space_->Verify(&no_dirty_regions_visitor); 4158 cell_space_->Verify(&no_dirty_regions_visitor);
4219 4159
4220 lo_space_->Verify(); 4160 lo_space_->Verify();
4221 } 4161 }
4222 #endif // DEBUG 4162 #endif // DEBUG
4223 4163
4224 4164
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
4317 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure()); 4257 ASSERT(reinterpret_cast<Object*>(kFromSpaceZapValue)->IsFailure());
4318 for (Address a = new_space_.FromSpaceLow(); 4258 for (Address a = new_space_.FromSpaceLow();
4319 a < new_space_.FromSpaceHigh(); 4259 a < new_space_.FromSpaceHigh();
4320 a += kPointerSize) { 4260 a += kPointerSize) {
4321 Memory::Address_at(a) = kFromSpaceZapValue; 4261 Memory::Address_at(a) = kFromSpaceZapValue;
4322 } 4262 }
4323 } 4263 }
4324 #endif // DEBUG 4264 #endif // DEBUG
4325 4265
4326 4266
4327 void Heap::IteratePointersToNewSpace(Heap* heap,
4328 Address start,
4329 Address end,
4330 ObjectSlotCallback copy_object_func) {
4331 for (Address slot_address = start;
4332 slot_address < end;
4333 slot_address += kPointerSize) {
4334 Object** slot = reinterpret_cast<Object**>(slot_address);
4335 if (heap->InNewSpace(*slot)) {
4336 HeapObject* object = reinterpret_cast<HeapObject*>(*slot);
4337 ASSERT(object->IsHeapObject());
4338 copy_object_func(reinterpret_cast<HeapObject**>(slot), object);
4339 }
4340 }
4341 }
4342
4343
4344 // Compute start address of the first map following given addr.
4345 static inline Address MapStartAlign(Address addr) {
4346 Address page = Page::FromAddress(addr)->ObjectAreaStart();
4347 return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
4348 }
4349
4350
4351 // Compute end address of the first map preceding given addr.
4352 static inline Address MapEndAlign(Address addr) {
4353 Address page = Page::FromAllocationTop(addr)->ObjectAreaStart();
4354 return page + ((addr - page) / Map::kSize * Map::kSize);
4355 }
4356
4357
4358 static void IteratePointersToNewSpaceInMaps(
4359 Heap* heap,
4360 Address start,
4361 Address end,
4362 ObjectSlotCallback copy_object_func) {
4363 ASSERT(MapStartAlign(start) == start);
4364 ASSERT(MapEndAlign(end) == end);
4365
4366 Address map_address = start;
4367 while (map_address < end) {
4368 ASSERT(!heap->InNewSpace(Memory::Object_at(map_address)));
4369 ASSERT(Memory::Object_at(map_address)->IsMap());
4370
4371 Address pointer_fields_start = map_address + Map::kPointerFieldsBeginOffset;
4372 Address pointer_fields_end = map_address + Map::kPointerFieldsEndOffset;
4373
4374 Heap::IteratePointersToNewSpace(heap,
4375 pointer_fields_start,
4376 pointer_fields_end,
4377 copy_object_func);
4378 map_address += Map::kSize;
4379 }
4380 }
4381
4382
4383 void Heap::IteratePointersFromMapsToNewSpace(
4384 Heap* heap,
4385 Address start,
4386 Address end,
4387 ObjectSlotCallback copy_object_func) {
4388 Address map_aligned_start = MapStartAlign(start);
4389 Address map_aligned_end = MapEndAlign(end);
4390
4391 ASSERT(map_aligned_start == start);
4392 ASSERT(map_aligned_end == end);
4393
4394 IteratePointersToNewSpaceInMaps(heap,
4395 map_aligned_start,
4396 map_aligned_end,
4397 copy_object_func);
4398 }
4399
4400
4401 void Heap::IterateAndMarkPointersToFromSpace(Address start, 4267 void Heap::IterateAndMarkPointersToFromSpace(Address start,
4402 Address end, 4268 Address end,
4403 ObjectSlotCallback callback) { 4269 ObjectSlotCallback callback) {
4404 Address slot_address = start; 4270 Address slot_address = start;
4405 while (slot_address < end) { 4271 while (slot_address < end) {
4406 Object** slot = reinterpret_cast<Object**>(slot_address); 4272 Object** slot = reinterpret_cast<Object**>(slot_address);
4407 Object* object = *slot; 4273 Object* object = *slot;
4408 // If the store buffer becomes overfull we mark pages as being exempt from 4274 // If the store buffer becomes overfull we mark pages as being exempt from
4409 // the store buffer. These pages are scanned to find pointers that point 4275 // the store buffer. These pages are scanned to find pointers that point
4410 // to the new space. In that case we may hit newly promoted objects and 4276 // to the new space. In that case we may hit newly promoted objects and
(...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after
4572 NULL, 4438 NULL,
4573 NULL); 4439 NULL);
4574 } 4440 }
4575 } 4441 }
4576 } 4442 }
4577 4443
4578 4444
4579 #endif 4445 #endif
4580 4446
4581 4447
4582 // This function iterates over all the pointers in a paged space in the heap,
4583 // looking for pointers into new space. Within the pages there may be dead
4584 // objects that have not been overwritten by free spaces or fillers because of
4585 // lazy sweeping. These dead objects may not contain pointers to new space.
4586 // The garbage areas that have been swept properly (these will normally be the
4587 // large ones) will be marked with free space and filler map words. In
4588 // addition any area that has never been used at all for object allocation must
4589 // be marked with a free space or filler. Because the free space and filler
4590 // maps do not move we can always recognize these even after a compaction.
4591 // Normal objects like FixedArrays and JSObjects should not contain references
4592 // to these maps. The special garbage section (see comment in spaces.h) is
4593 // skipped since it can contain absolutely anything. Any objects that are
4594 // allocated during iteration may or may not be visited by the iteration, but
4595 // they will not be partially visited.
4596 void Heap::IteratePointers(
4597 PagedSpace* space,
4598 PointerRegionCallback visit_pointer_region,
4599 ObjectSlotCallback copy_object_func) {
4600
4601 PageIterator pages(space);
4602
4603 while (pages.has_next()) {
4604 Page* page = pages.next();
4605 IteratePointersOnPage(space, visit_pointer_region, copy_object_func, page);
4606 }
4607 }
4608
4609
4610 void Heap::IteratePointersOnPage(
4611 PagedSpace* space,
4612 PointerRegionCallback visit_pointer_region,
4613 ObjectSlotCallback copy_object_func,
4614 Page* page) {
4615 Address visitable_start = page->ObjectAreaStart();
4616 Address end_of_page = page->ObjectAreaEnd();
4617
4618 Address visitable_end = visitable_start;
4619
4620 // TODO(gc) ISOLATES
4621 Object* free_space_map = HEAP->free_space_map();
4622 Object* two_pointer_filler_map = HEAP->two_pointer_filler_map();
4623
4624 while (visitable_end < end_of_page) {
4625 Object* o = *reinterpret_cast<Object**>(visitable_end);
4626 // Skip fillers but not things that look like fillers in the special
4627 // garbage section which can contain anything.
4628 if (o == free_space_map ||
4629 o == two_pointer_filler_map ||
4630 visitable_end == space->top()) {
4631 if (visitable_start != visitable_end) {
4632 // After calling this the special garbage section may have moved.
4633 visit_pointer_region(HEAP,
4634 visitable_start,
4635 visitable_end,
4636 copy_object_func);
4637 if (visitable_end >= space->top() && visitable_end < space->limit()) {
4638 visitable_end = space->limit();
4639 visitable_start = visitable_end;
4640 continue;
4641 }
4642 }
4643 if (visitable_end == space->top() && visitable_end != space->limit()) {
4644 visitable_start = visitable_end = space->limit();
4645 } else {
4646 // At this point we are either at the start of a filler or we are at
4647 // the point where the space->top() used to be before the
4648 // visit_pointer_region call above. Either way we can skip the
4649 // object at the current spot: We don't promise to visit objects
4650 // allocated during heap traversal, and if space->top() moved then it
4651 // must be because an object was allocated at this point.
4652 visitable_start =
4653 visitable_end + HeapObject::FromAddress(visitable_end)->Size();
4654 visitable_end = visitable_start;
4655 }
4656 } else {
4657 ASSERT(o != free_space_map);
4658 ASSERT(o != two_pointer_filler_map);
4659 ASSERT(visitable_end < space->top() || visitable_end >= space->limit());
4660 visitable_end += kPointerSize;
4661 }
4662 }
4663 ASSERT(visitable_end == end_of_page);
4664 if (visitable_start != visitable_end) {
4665 visit_pointer_region(HEAP,
4666 visitable_start,
4667 visitable_end,
4668 copy_object_func);
4669 }
4670 }
4671
4672
4673 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { 4448 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
4674 IterateStrongRoots(v, mode); 4449 IterateStrongRoots(v, mode);
4675 IterateWeakRoots(v, mode); 4450 IterateWeakRoots(v, mode);
4676 } 4451 }
4677 4452
4678 4453
4679 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { 4454 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
4680 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); 4455 v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
4681 v->Synchronize("symbol_table"); 4456 v->Synchronize("symbol_table");
4682 if (mode != VISIT_ALL_IN_SCAVENGE && 4457 if (mode != VISIT_ALL_IN_SCAVENGE &&
(...skipping 1324 matching lines...) Expand 10 before | Expand all | Expand 10 after
6007 } 5782 }
6008 5783
6009 5784
6010 void ExternalStringTable::TearDown() { 5785 void ExternalStringTable::TearDown() {
6011 new_space_strings_.Free(); 5786 new_space_strings_.Free();
6012 old_space_strings_.Free(); 5787 old_space_strings_.Free();
6013 } 5788 }
6014 5789
6015 5790
6016 } } // namespace v8::internal 5791 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/heap.h ('k') | src/mark-compact.cc » ('j') | src/store-buffer.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698