Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(174)

Side by Side Diff: src/mark-compact.cc

Issue 6542047: Basic implementation of incremental marking. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/gc
Patch Set: Created 9 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. 1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
83 83
84 84
85 void Marking::TearDown() { 85 void Marking::TearDown() {
86 if (new_space_bitmap_ != NULL) { 86 if (new_space_bitmap_ != NULL) {
87 BitmapStorageDescriptor::Free(new_space_bitmap_); 87 BitmapStorageDescriptor::Free(new_space_bitmap_);
88 new_space_bitmap_ = NULL; 88 new_space_bitmap_ = NULL;
89 } 89 }
90 } 90 }
91 91
92 92
93 #ifdef DEBUG
94 class VerifyMarkingVisitor: public ObjectVisitor {
95 public:
96 void VisitPointers(Object** start, Object** end) {
97 for (Object** current = start; current < end; current++) {
98 if ((*current)->IsHeapObject()) {
99 HeapObject* object = HeapObject::cast(*current);
100 ASSERT(Marking::IsMarked(object));
101 }
102 }
103 }
104 };
105
Erik Corry 2011/02/22 12:27:19 2 blank lines here and several places.
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
106 static void VerifyMarking(Address bottom, Address top) {
107 VerifyMarkingVisitor visitor;
108 HeapObject* object;
109
110 for (Address current = bottom;
111 current < top;
112 current += object->Size()) {
113 object = HeapObject::FromAddress(current);
114 if (Marking::IsMarked(object)) object->Iterate(&visitor);
115 }
116
117 }
118
119 static void VerifyMarking(Page* p) {
120 VerifyMarking(p->ObjectAreaStart(), p->AllocationTop());
121 }
122
123 static void VerifyMarking(NewSpace* space) {
124 VerifyMarking(space->bottom(), space->top());
125 }
126
127 static void VerifyMarking(PagedSpace* space) {
128 PageIterator it(space, PageIterator::PAGES_IN_USE);
129
130 while (it.has_next()) {
131 VerifyMarking(it.next());
132 }
133 }
134
135 static void VerifyMarking() {
136 VerifyMarking(Heap::old_pointer_space());
137 VerifyMarking(Heap::old_data_space());
138 VerifyMarking(Heap::code_space());
139 VerifyMarking(Heap::cell_space());
140 VerifyMarking(Heap::map_space());
141 VerifyMarking(Heap::new_space());
142
143 VerifyMarkingVisitor visitor;
144 Heap::IterateStrongRoots(&visitor, VISIT_ONLY_STRONG);
145 }
146 #endif
147
93 void MarkCompactCollector::CollectGarbage() { 148 void MarkCompactCollector::CollectGarbage() {
94 // Make sure that Prepare() has been called. The individual steps below will 149 // Make sure that Prepare() has been called. The individual steps below will
95 // update the state as they proceed. 150 // update the state as they proceed.
96 ASSERT(state_ == PREPARE_GC); 151 ASSERT(state_ == PREPARE_GC);
97 152
98 // Prepare has selected whether to compact the old generation or not. 153 // Prepare has selected whether to compact the old generation or not.
99 // Tell the tracer. 154 // Tell the tracer.
100 if (IsCompacting()) tracer_->set_is_compacting(); 155 // if (IsCompacting()) tracer_->set_is_compacting();
Erik Corry 2011/02/22 12:27:19 Commented code.
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
101 156
102 MarkLiveObjects(); 157 if (IncrementalMarking::state() == IncrementalMarking::STOPPED) {
158 MarkLiveObjects();
159 } else {
160 {
161 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK);
162 IncrementalMarking::Finalize();
163 ASSERT(IncrementalMarking::state() == IncrementalMarking::STOPPED);
164 }
165 MarkLiveObjects();
166 }
103 167
104 if (FLAG_collect_maps) ClearNonLiveTransitions(); 168 #ifdef DEBUG
169 VerifyMarking();
170 #endif
171 // if (FLAG_collect_maps) ClearNonLiveTransitions();
105 172
106 SweepSpaces(); 173 SweepSpaces();
107 174
108 PcToCodeCache::FlushPcToCodeCache(); 175 PcToCodeCache::FlushPcToCodeCache();
109 176
110 Finish(); 177 Finish();
111 178
112 // Check that swept all marked objects and 179 // Check that swept all marked objects and
113 // null out the GC tracer. 180 // null out the GC tracer.
114 // TODO(gc) does not work with conservative sweeping. 181 // TODO(gc) does not work with conservative sweeping.
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
147 } 214 }
148 215
149 static void ClearMarkbits() { 216 static void ClearMarkbits() {
150 // We are sweeping code and map spaces precisely so clearing is not required. 217 // We are sweeping code and map spaces precisely so clearing is not required.
151 ClearMarkbits(Heap::old_pointer_space()); 218 ClearMarkbits(Heap::old_pointer_space());
152 ClearMarkbits(Heap::old_data_space()); 219 ClearMarkbits(Heap::old_data_space());
153 ClearMarkbits(Heap::cell_space()); 220 ClearMarkbits(Heap::cell_space());
154 } 221 }
155 222
156 223
224 void Marking::TransferMark(Address old_start, Address new_start) {
225 if (IncrementalMarking::state() == IncrementalMarking::MARKING) {
226 if (IncrementalMarking::IsBlack(HeapObject::FromAddress(old_start))) {
227 IncrementalMarking::MarkBlack(HeapObject::FromAddress(new_start));
228 } else if (IncrementalMarking::IsGrey(HeapObject::FromAddress(old_start))) {
229 IncrementalMarking::WhiteToGrey(HeapObject::FromAddress(new_start));
230 // TODO(gc): if we shift huge array in the loop we might end up pushing
231 // to much to marking stack. maybe we should check one or two elements
232 // on top of it to see whether they are equal to old_start.
Erik Corry 2011/02/22 12:27:19 Or perhaps we should compact the marking stack by
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 We will think about it.
233 }
234 } else {
235 if (Heap::InNewSpace(old_start) ||
236 Page::FromAddress(old_start)->IsFlagSet(Page::IS_CONTINUOUS) ||
237 !IsMarked(old_start)) {
238 return;
239 }
240 SetMark(new_start);
241 }
242 }
243
244
157 void MarkCompactCollector::Prepare(GCTracer* tracer) { 245 void MarkCompactCollector::Prepare(GCTracer* tracer) {
158 FLAG_flush_code = false; 246 FLAG_flush_code = false;
159 FLAG_always_compact = false; 247 FLAG_always_compact = false;
160 FLAG_never_compact = true; 248 FLAG_never_compact = true;
161 249
250 // Disable collection of maps if incremental marking is enabled.
251 // TODO(gc) improve maps collection algorithm to work with incremental
252 // marking.
253 if (FLAG_incremental_marking) FLAG_collect_maps = false;
254
162 // Rather than passing the tracer around we stash it in a static member 255 // Rather than passing the tracer around we stash it in a static member
163 // variable. 256 // variable.
164 tracer_ = tracer; 257 tracer_ = tracer;
165 258
166 #ifdef DEBUG 259 #ifdef DEBUG
167 ASSERT(state_ == IDLE); 260 ASSERT(state_ == IDLE);
168 state_ = PREPARE_GC; 261 state_ = PREPARE_GC;
169 #endif 262 #endif
170 ASSERT(!FLAG_always_compact || !FLAG_never_compact); 263 ASSERT(!FLAG_always_compact || !FLAG_never_compact);
171 264
172 compacting_collection_ = 265 compacting_collection_ =
173 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; 266 FLAG_always_compact || force_compaction_ || compact_on_next_gc_;
174 compact_on_next_gc_ = false; 267 compact_on_next_gc_ = false;
175 268
176 if (FLAG_never_compact) compacting_collection_ = false; 269 if (FLAG_never_compact) compacting_collection_ = false;
177 if (!Heap::map_space()->MapPointersEncodable()) 270 if (!Heap::map_space()->MapPointersEncodable()) {
178 compacting_collection_ = false; 271 compacting_collection_ = false;
272 }
179 if (FLAG_collect_maps) CreateBackPointers(); 273 if (FLAG_collect_maps) CreateBackPointers();
180 #ifdef ENABLE_GDB_JIT_INTERFACE 274 #ifdef ENABLE_GDB_JIT_INTERFACE
181 if (FLAG_gdbjit) { 275 if (FLAG_gdbjit) {
182 // If GDBJIT interface is active disable compaction. 276 // If GDBJIT interface is active disable compaction.
183 compacting_collection_ = false; 277 compacting_collection_ = false;
184 } 278 }
185 #endif 279 #endif
186 280
187 PagedSpaces spaces; 281 PagedSpaces spaces;
188 for (PagedSpace* space = spaces.next(); 282 for (PagedSpace* space = spaces.next();
189 space != NULL; space = spaces.next()) { 283 space != NULL; space = spaces.next()) {
190 space->PrepareForMarkCompact(compacting_collection_); 284 space->PrepareForMarkCompact(compacting_collection_);
191 } 285 }
192 286
193 Address new_space_top = Heap::new_space()->top(); 287 if (IncrementalMarking::state() == IncrementalMarking::STOPPED) {
194 Address new_space_bottom = Heap::new_space()->bottom(); 288 Address new_space_top = Heap::new_space()->top();
289 Address new_space_bottom = Heap::new_space()->bottom();
195 290
196 Marking::ClearRange(new_space_bottom, 291 Marking::ClearRange(new_space_bottom,
197 static_cast<int>(new_space_top - new_space_bottom)); 292 static_cast<int>(new_space_top - new_space_bottom));
198 293
199 ClearMarkbits(); 294 ClearMarkbits();
295 #ifdef DEBUG
296 VerifyMarkbitsAreClean();
297 #endif
298 }
200 299
201 #ifdef DEBUG 300 #ifdef DEBUG
202 VerifyMarkbitsAreClean();
203
204 live_bytes_ = 0; 301 live_bytes_ = 0;
205 live_young_objects_size_ = 0; 302 live_young_objects_size_ = 0;
206 live_old_pointer_objects_size_ = 0; 303 live_old_pointer_objects_size_ = 0;
207 live_old_data_objects_size_ = 0; 304 live_old_data_objects_size_ = 0;
208 live_code_objects_size_ = 0; 305 live_code_objects_size_ = 0;
209 live_map_objects_size_ = 0; 306 live_map_objects_size_ = 0;
210 live_cell_objects_size_ = 0; 307 live_cell_objects_size_ = 0;
211 live_lo_objects_size_ = 0; 308 live_lo_objects_size_ = 0;
212 #endif 309 #endif
213 } 310 }
(...skipping 324 matching lines...) Expand 10 before | Expand all | Expand 10 after
538 MarkCompactCollector::MarkObject(code); 635 MarkCompactCollector::MarkObject(code);
539 } 636 }
540 } 637 }
541 638
542 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { 639 static void VisitGlobalPropertyCell(RelocInfo* rinfo) {
543 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); 640 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
544 Object* cell = rinfo->target_cell(); 641 Object* cell = rinfo->target_cell();
545 Object* old_cell = cell; 642 Object* old_cell = cell;
546 VisitPointer(&cell); 643 VisitPointer(&cell);
547 if (cell != old_cell) { 644 if (cell != old_cell) {
548 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); 645 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell), NULL );
549 } 646 }
550 } 647 }
551 648
552 static inline void VisitDebugTarget(RelocInfo* rinfo) { 649 static inline void VisitDebugTarget(RelocInfo* rinfo) {
553 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 650 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
554 rinfo->IsPatchedReturnSequence()) || 651 rinfo->IsPatchedReturnSequence()) ||
555 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 652 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
556 rinfo->IsPatchedDebugBreakSlotSequence())); 653 rinfo->IsPatchedDebugBreakSlotSequence()));
557 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); 654 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
558 MarkCompactCollector::MarkObject(code); 655 MarkCompactCollector::MarkObject(code);
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
785 Object* old_code = code; 882 Object* old_code = code;
786 VisitPointer(&code); 883 VisitPointer(&code);
787 if (code != old_code) { 884 if (code != old_code) {
788 Memory::Address_at(entry_address) = 885 Memory::Address_at(entry_address) =
789 reinterpret_cast<Code*>(code)->entry(); 886 reinterpret_cast<Code*>(code)->entry();
790 } 887 }
791 } 888 }
792 889
793 890
794 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { 891 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) {
892 UNREACHABLE();
Erik Corry 2011/02/22 12:27:19 TODO?
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
893 #if 0
Erik Corry 2011/02/22 12:27:19 Commented code.
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
795 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); 894 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object);
796 // The function must have a valid context and not be a builtin. 895 // The function must have a valid context and not be a builtin.
797 bool flush_code_candidate = false; 896 bool flush_code_candidate = false;
798 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { 897 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) {
799 flush_code_candidate = FlushCodeForFunction(jsfunction); 898 flush_code_candidate = FlushCodeForFunction(jsfunction);
800 } 899 }
801 900
802 if (!flush_code_candidate) { 901 if (!flush_code_candidate) {
803 MarkCompactCollector::MarkObject( 902 MarkCompactCollector::MarkObject(
804 jsfunction->unchecked_shared()->unchecked_code()); 903 jsfunction->unchecked_shared()->unchecked_code());
(...skipping 14 matching lines...) Expand all
819 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); 918 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i));
820 MarkCompactCollector::MarkObject( 919 MarkCompactCollector::MarkObject(
821 inlined->unchecked_shared()->unchecked_code()); 920 inlined->unchecked_shared()->unchecked_code());
822 } 921 }
823 } 922 }
824 } 923 }
825 924
826 VisitJSFunctionFields(map, 925 VisitJSFunctionFields(map,
827 reinterpret_cast<JSFunction*>(object), 926 reinterpret_cast<JSFunction*>(object),
828 flush_code_candidate); 927 flush_code_candidate);
928 #endif
829 } 929 }
830 930
831 931
832 static void VisitJSFunction(Map* map, HeapObject* object) { 932 static void VisitJSFunction(Map* map, HeapObject* object) {
833 VisitJSFunctionFields(map, 933 VisitJSFunctionFields(map,
834 reinterpret_cast<JSFunction*>(object), 934 reinterpret_cast<JSFunction*>(object),
835 false); 935 false);
836 } 936 }
837 937
838 938
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
1065 1165
1066 1166
1067 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) { 1167 void MarkCompactCollector::ProcessNewlyMarkedObject(HeapObject* object) {
1068 ASSERT(Marking::IsMarked(object)); 1168 ASSERT(Marking::IsMarked(object));
1069 ASSERT(Heap::Contains(object)); 1169 ASSERT(Heap::Contains(object));
1070 if (object->IsMap()) { 1170 if (object->IsMap()) {
1071 Map* map = Map::cast(object); 1171 Map* map = Map::cast(object);
1072 if (FLAG_cleanup_caches_in_maps_at_gc) { 1172 if (FLAG_cleanup_caches_in_maps_at_gc) {
1073 map->ClearCodeCache(); 1173 map->ClearCodeCache();
1074 } 1174 }
1075 if (FLAG_collect_maps && 1175 /*if (FLAG_collect_maps &&
Erik Corry 2011/02/22 12:27:19 Commented code. TODO?
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
1076 map->instance_type() >= FIRST_JS_OBJECT_TYPE && 1176 map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
1077 map->instance_type() <= JS_FUNCTION_TYPE) { 1177 map->instance_type() <= JS_FUNCTION_TYPE) {
1178 UNREACHABLE();
1078 MarkMapContents(map); 1179 MarkMapContents(map);
1079 } else { 1180 } else {*/
1080 marking_stack.Push(map); 1181 marking_stack.Push(map);
1081 } 1182 /*}*/
1082 } else { 1183 } else {
1083 marking_stack.Push(object); 1184 marking_stack.Push(object);
1084 } 1185 }
1085 } 1186 }
1086 1187
1087 1188
1088 void MarkCompactCollector::MarkMapContents(Map* map) { 1189 void MarkCompactCollector::MarkMapContents(Map* map) {
1089 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( 1190 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>(
1090 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); 1191 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset)));
1091 1192
(...skipping 289 matching lines...) Expand 10 before | Expand all | Expand 10 after
1381 GlobalHandles::IterateWeakRoots(&root_visitor); 1482 GlobalHandles::IterateWeakRoots(&root_visitor);
1382 while (marking_stack.overflowed()) { 1483 while (marking_stack.overflowed()) {
1383 RefillMarkingStack(); 1484 RefillMarkingStack();
1384 EmptyMarkingStack(); 1485 EmptyMarkingStack();
1385 } 1486 }
1386 1487
1387 // Repeat the object groups to mark unmarked groups reachable from the 1488 // Repeat the object groups to mark unmarked groups reachable from the
1388 // weak roots. 1489 // weak roots.
1389 ProcessObjectGroups(); 1490 ProcessObjectGroups();
1390 1491
1492 AfterMarking();
1493 }
1494
1495
1496 void MarkCompactCollector::AfterMarking() {
1391 // Prune the symbol table removing all symbols only pointed to by the 1497 // Prune the symbol table removing all symbols only pointed to by the
1392 // symbol table. Cannot use symbol_table() here because the symbol 1498 // symbol table. Cannot use symbol_table() here because the symbol
1393 // table is marked. 1499 // table is marked.
1394 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); 1500 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table();
1395 SymbolTableCleaner v; 1501 SymbolTableCleaner v;
1396 symbol_table->IterateElements(&v); 1502 symbol_table->IterateElements(&v);
1397 symbol_table->ElementsRemoved(v.PointersRemoved()); 1503 symbol_table->ElementsRemoved(v.PointersRemoved());
1398 ExternalStringTable::Iterate(&v); 1504 ExternalStringTable::Iterate(&v);
1399 ExternalStringTable::CleanUp(); 1505 ExternalStringTable::CleanUp();
1400 1506
1401 // Process the weak references. 1507 // Process the weak references.
1402 MarkCompactWeakObjectRetainer mark_compact_object_retainer; 1508 MarkCompactWeakObjectRetainer mark_compact_object_retainer;
1403 Heap::ProcessWeakReferences(&mark_compact_object_retainer); 1509 Heap::ProcessWeakReferences(&mark_compact_object_retainer);
1404 1510
1405 // Remove object groups after marking phase. 1511 // Remove object groups after marking phase.
1406 GlobalHandles::RemoveObjectGroups(); 1512 GlobalHandles::RemoveObjectGroups();
1407 1513
1408 // Flush code from collected candidates. 1514 // Flush code from collected candidates.
1409 FlushCode::ProcessCandidates(); 1515 // FlushCode::ProcessCandidates();
Erik Corry 2011/02/22 12:27:19 Commented code and missing TODO'
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
1410 } 1516 }
1411 1517
1412 1518
1413 #ifdef DEBUG 1519 #ifdef DEBUG
1414 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { 1520 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) {
1415 live_bytes_ += obj->Size(); 1521 live_bytes_ += obj->Size();
1416 if (Heap::new_space()->Contains(obj)) { 1522 if (Heap::new_space()->Contains(obj)) {
1417 live_young_objects_size_ += obj->Size(); 1523 live_young_objects_size_ += obj->Size();
1418 } else if (Heap::map_space()->Contains(obj)) { 1524 } else if (Heap::map_space()->Contains(obj)) {
1419 ASSERT(obj->IsMap()); 1525 ASSERT(obj->IsMap());
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
1559 void VisitPointers(Object** start, Object** end) { 1665 void VisitPointers(Object** start, Object** end) {
1560 for (Object** p = start; p < end; p++) { 1666 for (Object** p = start; p < end; p++) {
1561 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p); 1667 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p);
1562 } 1668 }
1563 } 1669 }
1564 1670
1565 void VisitCodeTarget(RelocInfo* rinfo) { 1671 void VisitCodeTarget(RelocInfo* rinfo) {
1566 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); 1672 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
1567 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); 1673 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
1568 VisitPointer(&target); 1674 VisitPointer(&target);
1569 rinfo->set_target_address(Code::cast(target)->instruction_start()); 1675 rinfo->set_target_address(Code::cast(target)->instruction_start(), NULL);
1570 } 1676 }
1571 1677
1572 void VisitDebugTarget(RelocInfo* rinfo) { 1678 void VisitDebugTarget(RelocInfo* rinfo) {
1573 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && 1679 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
1574 rinfo->IsPatchedReturnSequence()) || 1680 rinfo->IsPatchedReturnSequence()) ||
1575 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && 1681 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
1576 rinfo->IsPatchedDebugBreakSlotSequence())); 1682 rinfo->IsPatchedDebugBreakSlotSequence()));
1577 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); 1683 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
1578 VisitPointer(&target); 1684 VisitPointer(&target);
1579 rinfo->set_call_address(Code::cast(target)->instruction_start()); 1685 rinfo->set_call_address(Code::cast(target)->instruction_start());
(...skipping 309 matching lines...) Expand 10 before | Expand all | Expand 10 after
1889 Marking::ClearMark(object); 1995 Marking::ClearMark(object);
1890 MarkCompactCollector::tracer()->decrement_marked_count(); 1996 MarkCompactCollector::tracer()->decrement_marked_count();
1891 1997
1892 if (!is_previous_alive) { // Transition from free to live. 1998 if (!is_previous_alive) { // Transition from free to live.
1893 space->DeallocateBlock(free_start, 1999 space->DeallocateBlock(free_start,
1894 static_cast<int>(current - free_start), 2000 static_cast<int>(current - free_start),
1895 true); 2001 true);
1896 is_previous_alive = true; 2002 is_previous_alive = true;
1897 } 2003 }
1898 } else { 2004 } else {
2005 ASSERT((current + kPointerSize) >= p->AllocationTop() ||
2006 object->Size() == 4 ||
Erik Corry 2011/02/22 12:27:19 kPointerSize?
Vyacheslav Egorov (Chromium) 2011/02/23 14:31:46 Done.
2007 IncrementalMarking::IsWhite(object));
1899 MarkCompactCollector::ReportDeleteIfNeeded(object); 2008 MarkCompactCollector::ReportDeleteIfNeeded(object);
1900 if (is_previous_alive) { // Transition from live to free. 2009 if (is_previous_alive) { // Transition from live to free.
1901 free_start = current; 2010 free_start = current;
1902 is_previous_alive = false; 2011 is_previous_alive = false;
1903 } 2012 }
1904 } 2013 }
1905 } 2014 }
1906 2015
1907 if (!is_previous_alive) *last_free_start = free_start; 2016 if (!is_previous_alive) *last_free_start = free_start;
1908 } 2017 }
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after
2031 space->SetTop(new_allocation_top); 2140 space->SetTop(new_allocation_top);
2032 } 2141 }
2033 } 2142 }
2034 2143
2035 2144
2036 void MarkCompactCollector::SweepSpaces() { 2145 void MarkCompactCollector::SweepSpaces() {
2037 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); 2146 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP);
2038 #ifdef DEBUG 2147 #ifdef DEBUG
2039 state_ = SWEEP_SPACES; 2148 state_ = SWEEP_SPACES;
2040 #endif 2149 #endif
2150
2151 #ifndef DEBUG
2152 SweeperType fast_sweeper = CONSERVATIVE;
2153 #else
2154 SweeperType fast_sweeper = PRECISE;
2155 #endif
2156
2041 ASSERT(!IsCompacting()); 2157 ASSERT(!IsCompacting());
2042 // Noncompacting collections simply sweep the spaces to clear the mark 2158 // Noncompacting collections simply sweep the spaces to clear the mark
2043 // bits and free the nonlive blocks (for old and map spaces). We sweep 2159 // bits and free the nonlive blocks (for old and map spaces). We sweep
2044 // the map space last because freeing non-live maps overwrites them and 2160 // the map space last because freeing non-live maps overwrites them and
2045 // the other spaces rely on possibly non-live maps to get the sizes for 2161 // the other spaces rely on possibly non-live maps to get the sizes for
2046 // non-live objects. 2162 // non-live objects.
2047 SweepSpace(Heap::old_pointer_space(), CONSERVATIVE); 2163 SweepSpace(Heap::old_pointer_space(), fast_sweeper);
2048 SweepSpace(Heap::old_data_space(), CONSERVATIVE); 2164 SweepSpace(Heap::old_data_space(), fast_sweeper);
2049 SweepSpace(Heap::code_space(), PRECISE); 2165 SweepSpace(Heap::code_space(), PRECISE);
2050 // TODO(gc): implement specialized sweeper for cell space. 2166 // TODO(gc): implement specialized sweeper for cell space.
2051 SweepSpace(Heap::cell_space(), CONSERVATIVE); 2167 SweepSpace(Heap::cell_space(), fast_sweeper);
2052 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); 2168 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE);
2053 SweepNewSpace(Heap::new_space()); 2169 SweepNewSpace(Heap::new_space());
2054 } 2170 }
2055 // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of 2171 // TODO(gc): ClearNonLiveTransitions depends on precise sweeping of
2056 // map space to detect whether unmarked map became dead in this 2172 // map space to detect whether unmarked map became dead in this
2057 // collection or in one of the previous ones. 2173 // collection or in one of the previous ones.
2058 // TODO(gc): Implement specialized sweeper for map space. 2174 // TODO(gc): Implement specialized sweeper for map space.
2059 SweepSpace(Heap::map_space(), PRECISE); 2175 SweepSpace(Heap::map_space(), PRECISE);
2060 2176
2061 ASSERT(live_map_objects_size_ <= Heap::map_space()->Size()); 2177 ASSERT(live_map_objects_size_ <= Heap::map_space()->Size());
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after
2134 } 2250 }
2135 2251
2136 2252
2137 void MarkCompactCollector::Initialize() { 2253 void MarkCompactCollector::Initialize() {
2138 StaticPointersToNewGenUpdatingVisitor::Initialize(); 2254 StaticPointersToNewGenUpdatingVisitor::Initialize();
2139 StaticMarkingVisitor::Initialize(); 2255 StaticMarkingVisitor::Initialize();
2140 } 2256 }
2141 2257
2142 2258
2143 } } // namespace v8::internal 2259 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698