| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 #include "mark-compact.h" | 37 #include "mark-compact.h" |
| 38 #include "objects-visiting.h" | 38 #include "objects-visiting.h" |
| 39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
| 40 | 40 |
| 41 namespace v8 { | 41 namespace v8 { |
| 42 namespace internal { | 42 namespace internal { |
| 43 | 43 |
| 44 // ------------------------------------------------------------------------- | 44 // ------------------------------------------------------------------------- |
| 45 // MarkCompactCollector | 45 // MarkCompactCollector |
| 46 | 46 |
| 47 bool MarkCompactCollector::force_compaction_ = false; | 47 MarkCompactCollector::MarkCompactCollector() : // NOLINT |
| 48 bool MarkCompactCollector::compacting_collection_ = false; | |
| 49 bool MarkCompactCollector::compact_on_next_gc_ = false; | |
| 50 | |
| 51 int MarkCompactCollector::previous_marked_count_ = 0; | |
| 52 GCTracer* MarkCompactCollector::tracer_ = NULL; | |
| 53 | |
| 54 | |
| 55 #ifdef DEBUG | 48 #ifdef DEBUG |
| 56 MarkCompactCollector::CollectorState MarkCompactCollector::state_ = IDLE; | 49 state_(IDLE), |
| 57 | |
| 58 // Counters used for debugging the marking phase of mark-compact or mark-sweep | |
| 59 // collection. | |
| 60 int MarkCompactCollector::live_bytes_ = 0; | |
| 61 int MarkCompactCollector::live_young_objects_size_ = 0; | |
| 62 int MarkCompactCollector::live_old_data_objects_size_ = 0; | |
| 63 int MarkCompactCollector::live_old_pointer_objects_size_ = 0; | |
| 64 int MarkCompactCollector::live_code_objects_size_ = 0; | |
| 65 int MarkCompactCollector::live_map_objects_size_ = 0; | |
| 66 int MarkCompactCollector::live_cell_objects_size_ = 0; | |
| 67 int MarkCompactCollector::live_lo_objects_size_ = 0; | |
| 68 #endif | 50 #endif |
| 51 force_compaction_(false), |
| 52 compacting_collection_(false), |
| 53 compact_on_next_gc_(false), |
| 54 previous_marked_count_(0), |
| 55 tracer_(NULL), |
| 56 #ifdef DEBUG |
| 57 live_young_objects_size_(0), |
| 58 live_old_pointer_objects_size_(0), |
| 59 live_old_data_objects_size_(0), |
| 60 live_code_objects_size_(0), |
| 61 live_map_objects_size_(0), |
| 62 live_cell_objects_size_(0), |
| 63 live_lo_objects_size_(0), |
| 64 live_bytes_(0), |
| 65 #endif |
| 66 heap_(NULL), |
| 67 code_flusher_(NULL) { } |
| 69 | 68 |
| 70 | 69 |
| 71 void MarkCompactCollector::CollectGarbage() { | 70 void MarkCompactCollector::CollectGarbage() { |
| 72 // Make sure that Prepare() has been called. The individual steps below will | 71 // Make sure that Prepare() has been called. The individual steps below will |
| 73 // update the state as they proceed. | 72 // update the state as they proceed. |
| 74 ASSERT(state_ == PREPARE_GC); | 73 ASSERT(state_ == PREPARE_GC); |
| 75 | 74 |
| 76 // Prepare has selected whether to compact the old generation or not. | 75 // Prepare has selected whether to compact the old generation or not. |
| 77 // Tell the tracer. | 76 // Tell the tracer. |
| 78 if (IsCompacting()) tracer_->set_is_compacting(); | 77 if (IsCompacting()) tracer_->set_is_compacting(); |
| 79 | 78 |
| 80 MarkLiveObjects(); | 79 MarkLiveObjects(); |
| 81 | 80 |
| 82 if (FLAG_collect_maps) ClearNonLiveTransitions(); | 81 if (FLAG_collect_maps) ClearNonLiveTransitions(); |
| 83 | 82 |
| 84 SweepLargeObjectSpace(); | 83 SweepLargeObjectSpace(); |
| 85 | 84 |
| 86 if (IsCompacting()) { | 85 if (IsCompacting()) { |
| 87 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); | 86 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_COMPACT); |
| 88 EncodeForwardingAddresses(); | 87 EncodeForwardingAddresses(); |
| 89 | 88 |
| 90 Heap::MarkMapPointersAsEncoded(true); | 89 heap_->MarkMapPointersAsEncoded(true); |
| 91 UpdatePointers(); | 90 UpdatePointers(); |
| 92 Heap::MarkMapPointersAsEncoded(false); | 91 heap_->MarkMapPointersAsEncoded(false); |
| 93 PcToCodeCache::FlushPcToCodeCache(); | 92 heap_->isolate()->pc_to_code_cache()->Flush(); |
| 94 | 93 |
| 95 RelocateObjects(); | 94 RelocateObjects(); |
| 96 } else { | 95 } else { |
| 97 SweepSpaces(); | 96 SweepSpaces(); |
| 98 PcToCodeCache::FlushPcToCodeCache(); | 97 heap_->isolate()->pc_to_code_cache()->Flush(); |
| 99 } | 98 } |
| 100 | 99 |
| 101 Finish(); | 100 Finish(); |
| 102 | 101 |
| 103 // Save the count of marked objects remaining after the collection and | 102 // Save the count of marked objects remaining after the collection and |
| 104 // null out the GC tracer. | 103 // null out the GC tracer. |
| 105 previous_marked_count_ = tracer_->marked_count(); | 104 previous_marked_count_ = tracer_->marked_count(); |
| 106 ASSERT(previous_marked_count_ == 0); | 105 ASSERT(previous_marked_count_ == 0); |
| 107 tracer_ = NULL; | 106 tracer_ = NULL; |
| 108 } | 107 } |
| 109 | 108 |
| 110 | 109 |
| 111 void MarkCompactCollector::Prepare(GCTracer* tracer) { | 110 void MarkCompactCollector::Prepare(GCTracer* tracer) { |
| 112 // Rather than passing the tracer around we stash it in a static member | 111 // Rather than passing the tracer around we stash it in a static member |
| 113 // variable. | 112 // variable. |
| 114 tracer_ = tracer; | 113 tracer_ = tracer; |
| 115 | 114 |
| 116 #ifdef DEBUG | 115 #ifdef DEBUG |
| 117 ASSERT(state_ == IDLE); | 116 ASSERT(state_ == IDLE); |
| 118 state_ = PREPARE_GC; | 117 state_ = PREPARE_GC; |
| 119 #endif | 118 #endif |
| 120 ASSERT(!FLAG_always_compact || !FLAG_never_compact); | 119 ASSERT(!FLAG_always_compact || !FLAG_never_compact); |
| 121 | 120 |
| 122 compacting_collection_ = | 121 compacting_collection_ = |
| 123 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; | 122 FLAG_always_compact || force_compaction_ || compact_on_next_gc_; |
| 124 compact_on_next_gc_ = false; | 123 compact_on_next_gc_ = false; |
| 125 | 124 |
| 126 if (FLAG_never_compact) compacting_collection_ = false; | 125 if (FLAG_never_compact) compacting_collection_ = false; |
| 127 if (!Heap::map_space()->MapPointersEncodable()) | 126 if (!HEAP->map_space()->MapPointersEncodable()) |
| 128 compacting_collection_ = false; | 127 compacting_collection_ = false; |
| 129 if (FLAG_collect_maps) CreateBackPointers(); | 128 if (FLAG_collect_maps) CreateBackPointers(); |
| 130 #ifdef ENABLE_GDB_JIT_INTERFACE | 129 #ifdef ENABLE_GDB_JIT_INTERFACE |
| 131 if (FLAG_gdbjit) { | 130 if (FLAG_gdbjit) { |
| 132 // If GDBJIT interface is active disable compaction. | 131 // If GDBJIT interface is active disable compaction. |
| 133 compacting_collection_ = false; | 132 compacting_collection_ = false; |
| 134 } | 133 } |
| 135 #endif | 134 #endif |
| 136 | 135 |
| 137 PagedSpaces spaces; | 136 PagedSpaces spaces; |
| (...skipping 17 matching lines...) Expand all Loading... |
| 155 | 154 |
| 156 void MarkCompactCollector::Finish() { | 155 void MarkCompactCollector::Finish() { |
| 157 #ifdef DEBUG | 156 #ifdef DEBUG |
| 158 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); | 157 ASSERT(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS); |
| 159 state_ = IDLE; | 158 state_ = IDLE; |
| 160 #endif | 159 #endif |
| 161 // The stub cache is not traversed during GC; clear the cache to | 160 // The stub cache is not traversed during GC; clear the cache to |
| 162 // force lazy re-initialization of it. This must be done after the | 161 // force lazy re-initialization of it. This must be done after the |
| 163 // GC, because it relies on the new address of certain old space | 162 // GC, because it relies on the new address of certain old space |
| 164 // objects (empty string, illegal builtin). | 163 // objects (empty string, illegal builtin). |
| 165 StubCache::Clear(); | 164 Isolate::Current()->stub_cache()->Clear(); |
| 166 | 165 |
| 167 ExternalStringTable::CleanUp(); | 166 heap_->external_string_table_.CleanUp(); |
| 168 | 167 |
| 169 // If we've just compacted old space there's no reason to check the | 168 // If we've just compacted old space there's no reason to check the |
| 170 // fragmentation limit. Just return. | 169 // fragmentation limit. Just return. |
| 171 if (HasCompacted()) return; | 170 if (HasCompacted()) return; |
| 172 | 171 |
| 173 // We compact the old generation on the next GC if it has gotten too | 172 // We compact the old generation on the next GC if it has gotten too |
| 174 // fragmented (ie, we could recover an expected amount of space by | 173 // fragmented (ie, we could recover an expected amount of space by |
| 175 // reclaiming the waste and free list blocks). | 174 // reclaiming the waste and free list blocks). |
| 176 static const int kFragmentationLimit = 15; // Percent. | 175 static const int kFragmentationLimit = 15; // Percent. |
| 177 static const int kFragmentationAllowed = 1 * MB; // Absolute. | 176 static const int kFragmentationAllowed = 1 * MB; // Absolute. |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 214 // overflow flag. When the overflow flag is set, we continue marking objects | 213 // overflow flag. When the overflow flag is set, we continue marking objects |
| 215 // reachable from the objects on the marking stack, but no longer push them on | 214 // reachable from the objects on the marking stack, but no longer push them on |
| 216 // the marking stack. Instead, we mark them as both marked and overflowed. | 215 // the marking stack. Instead, we mark them as both marked and overflowed. |
| 217 // When the stack is in the overflowed state, objects marked as overflowed | 216 // When the stack is in the overflowed state, objects marked as overflowed |
| 218 // have been reached and marked but their children have not been visited yet. | 217 // have been reached and marked but their children have not been visited yet. |
| 219 // After emptying the marking stack, we clear the overflow flag and traverse | 218 // After emptying the marking stack, we clear the overflow flag and traverse |
| 220 // the heap looking for objects marked as overflowed, push them on the stack, | 219 // the heap looking for objects marked as overflowed, push them on the stack, |
| 221 // and continue with marking. This process repeats until all reachable | 220 // and continue with marking. This process repeats until all reachable |
| 222 // objects have been marked. | 221 // objects have been marked. |
| 223 | 222 |
| 224 static MarkingStack marking_stack; | 223 class CodeFlusher { |
| 224 public: |
| 225 explicit CodeFlusher(Isolate* isolate) |
| 226 : isolate_(isolate), |
| 227 jsfunction_candidates_head_(NULL), |
| 228 shared_function_info_candidates_head_(NULL) {} |
| 225 | 229 |
| 226 class FlushCode : public AllStatic { | 230 void AddCandidate(SharedFunctionInfo* shared_info) { |
| 227 public: | |
| 228 static void AddCandidate(SharedFunctionInfo* shared_info) { | |
| 229 SetNextCandidate(shared_info, shared_function_info_candidates_head_); | 231 SetNextCandidate(shared_info, shared_function_info_candidates_head_); |
| 230 shared_function_info_candidates_head_ = shared_info; | 232 shared_function_info_candidates_head_ = shared_info; |
| 231 } | 233 } |
| 232 | 234 |
| 233 | 235 void AddCandidate(JSFunction* function) { |
| 234 static void AddCandidate(JSFunction* function) { | |
| 235 ASSERT(function->unchecked_code() == | 236 ASSERT(function->unchecked_code() == |
| 236 function->unchecked_shared()->unchecked_code()); | 237 function->unchecked_shared()->unchecked_code()); |
| 237 | 238 |
| 238 SetNextCandidate(function, jsfunction_candidates_head_); | 239 SetNextCandidate(function, jsfunction_candidates_head_); |
| 239 jsfunction_candidates_head_ = function; | 240 jsfunction_candidates_head_ = function; |
| 240 } | 241 } |
| 241 | 242 |
| 242 | 243 void ProcessCandidates() { |
| 243 static void ProcessCandidates() { | |
| 244 ProcessSharedFunctionInfoCandidates(); | 244 ProcessSharedFunctionInfoCandidates(); |
| 245 ProcessJSFunctionCandidates(); | 245 ProcessJSFunctionCandidates(); |
| 246 } | 246 } |
| 247 | 247 |
| 248 private: | 248 private: |
| 249 static void ProcessJSFunctionCandidates() { | 249 void ProcessJSFunctionCandidates() { |
| 250 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 250 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile); |
| 251 | 251 |
| 252 JSFunction* candidate = jsfunction_candidates_head_; | 252 JSFunction* candidate = jsfunction_candidates_head_; |
| 253 JSFunction* next_candidate; | 253 JSFunction* next_candidate; |
| 254 while (candidate != NULL) { | 254 while (candidate != NULL) { |
| 255 next_candidate = GetNextCandidate(candidate); | 255 next_candidate = GetNextCandidate(candidate); |
| 256 | 256 |
| 257 SharedFunctionInfo* shared = candidate->unchecked_shared(); | 257 SharedFunctionInfo* shared = candidate->unchecked_shared(); |
| 258 | 258 |
| 259 Code* code = shared->unchecked_code(); | 259 Code* code = shared->unchecked_code(); |
| 260 if (!code->IsMarked()) { | 260 if (!code->IsMarked()) { |
| 261 shared->set_code(lazy_compile); | 261 shared->set_code(lazy_compile); |
| 262 candidate->set_code(lazy_compile); | 262 candidate->set_code(lazy_compile); |
| 263 } else { | 263 } else { |
| 264 candidate->set_code(shared->unchecked_code()); | 264 candidate->set_code(shared->unchecked_code()); |
| 265 } | 265 } |
| 266 | 266 |
| 267 candidate = next_candidate; | 267 candidate = next_candidate; |
| 268 } | 268 } |
| 269 | 269 |
| 270 jsfunction_candidates_head_ = NULL; | 270 jsfunction_candidates_head_ = NULL; |
| 271 } | 271 } |
| 272 | 272 |
| 273 | 273 |
| 274 static void ProcessSharedFunctionInfoCandidates() { | 274 void ProcessSharedFunctionInfoCandidates() { |
| 275 Code* lazy_compile = Builtins::builtin(Builtins::LazyCompile); | 275 Code* lazy_compile = isolate_->builtins()->builtin(Builtins::LazyCompile); |
| 276 | 276 |
| 277 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; | 277 SharedFunctionInfo* candidate = shared_function_info_candidates_head_; |
| 278 SharedFunctionInfo* next_candidate; | 278 SharedFunctionInfo* next_candidate; |
| 279 while (candidate != NULL) { | 279 while (candidate != NULL) { |
| 280 next_candidate = GetNextCandidate(candidate); | 280 next_candidate = GetNextCandidate(candidate); |
| 281 SetNextCandidate(candidate, NULL); | 281 SetNextCandidate(candidate, NULL); |
| 282 | 282 |
| 283 Code* code = candidate->unchecked_code(); | 283 Code* code = candidate->unchecked_code(); |
| 284 if (!code->IsMarked()) { | 284 if (!code->IsMarked()) { |
| 285 candidate->set_code(lazy_compile); | 285 candidate->set_code(lazy_compile); |
| 286 } | 286 } |
| 287 | 287 |
| 288 candidate = next_candidate; | 288 candidate = next_candidate; |
| 289 } | 289 } |
| 290 | 290 |
| 291 shared_function_info_candidates_head_ = NULL; | 291 shared_function_info_candidates_head_ = NULL; |
| 292 } | 292 } |
| 293 | 293 |
| 294 | |
| 295 static JSFunction** GetNextCandidateField(JSFunction* candidate) { | 294 static JSFunction** GetNextCandidateField(JSFunction* candidate) { |
| 296 return reinterpret_cast<JSFunction**>( | 295 return reinterpret_cast<JSFunction**>( |
| 297 candidate->address() + JSFunction::kCodeEntryOffset); | 296 candidate->address() + JSFunction::kCodeEntryOffset); |
| 298 } | 297 } |
| 299 | 298 |
| 300 | |
| 301 static JSFunction* GetNextCandidate(JSFunction* candidate) { | 299 static JSFunction* GetNextCandidate(JSFunction* candidate) { |
| 302 return *GetNextCandidateField(candidate); | 300 return *GetNextCandidateField(candidate); |
| 303 } | 301 } |
| 304 | 302 |
| 305 | |
| 306 static void SetNextCandidate(JSFunction* candidate, | 303 static void SetNextCandidate(JSFunction* candidate, |
| 307 JSFunction* next_candidate) { | 304 JSFunction* next_candidate) { |
| 308 *GetNextCandidateField(candidate) = next_candidate; | 305 *GetNextCandidateField(candidate) = next_candidate; |
| 309 } | 306 } |
| 310 | 307 |
| 311 | |
| 312 STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart); | 308 STATIC_ASSERT(kPointerSize <= Code::kHeaderSize - Code::kHeaderPaddingStart); |
| 313 | 309 |
| 314 | |
| 315 static SharedFunctionInfo** GetNextCandidateField( | 310 static SharedFunctionInfo** GetNextCandidateField( |
| 316 SharedFunctionInfo* candidate) { | 311 SharedFunctionInfo* candidate) { |
| 317 Code* code = candidate->unchecked_code(); | 312 Code* code = candidate->unchecked_code(); |
| 318 return reinterpret_cast<SharedFunctionInfo**>( | 313 return reinterpret_cast<SharedFunctionInfo**>( |
| 319 code->address() + Code::kHeaderPaddingStart); | 314 code->address() + Code::kHeaderPaddingStart); |
| 320 } | 315 } |
| 321 | 316 |
| 322 | |
| 323 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) { | 317 static SharedFunctionInfo* GetNextCandidate(SharedFunctionInfo* candidate) { |
| 324 return *GetNextCandidateField(candidate); | 318 return *GetNextCandidateField(candidate); |
| 325 } | 319 } |
| 326 | 320 |
| 327 | |
| 328 static void SetNextCandidate(SharedFunctionInfo* candidate, | 321 static void SetNextCandidate(SharedFunctionInfo* candidate, |
| 329 SharedFunctionInfo* next_candidate) { | 322 SharedFunctionInfo* next_candidate) { |
| 330 *GetNextCandidateField(candidate) = next_candidate; | 323 *GetNextCandidateField(candidate) = next_candidate; |
| 331 } | 324 } |
| 332 | 325 |
| 333 static JSFunction* jsfunction_candidates_head_; | 326 Isolate* isolate_; |
| 327 JSFunction* jsfunction_candidates_head_; |
| 328 SharedFunctionInfo* shared_function_info_candidates_head_; |
| 334 | 329 |
| 335 static SharedFunctionInfo* shared_function_info_candidates_head_; | 330 DISALLOW_COPY_AND_ASSIGN(CodeFlusher); |
| 336 }; | 331 }; |
| 337 | 332 |
| 338 JSFunction* FlushCode::jsfunction_candidates_head_ = NULL; | |
| 339 | 333 |
| 340 SharedFunctionInfo* FlushCode::shared_function_info_candidates_head_ = NULL; | 334 MarkCompactCollector::~MarkCompactCollector() { |
| 335 if (code_flusher_ != NULL) { |
| 336 delete code_flusher_; |
| 337 code_flusher_ = NULL; |
| 338 } |
| 339 } |
| 340 |
| 341 | 341 |
| 342 static inline HeapObject* ShortCircuitConsString(Object** p) { | 342 static inline HeapObject* ShortCircuitConsString(Object** p) { |
| 343 // Optimization: If the heap object pointed to by p is a non-symbol | 343 // Optimization: If the heap object pointed to by p is a non-symbol |
| 344 // cons string whose right substring is Heap::empty_string, update | 344 // cons string whose right substring is HEAP->empty_string, update |
| 345 // it in place to its left substring. Return the updated value. | 345 // it in place to its left substring. Return the updated value. |
| 346 // | 346 // |
| 347 // Here we assume that if we change *p, we replace it with a heap object | 347 // Here we assume that if we change *p, we replace it with a heap object |
| 348 // (ie, the left substring of a cons string is always a heap object). | 348 // (ie, the left substring of a cons string is always a heap object). |
| 349 // | 349 // |
| 350 // The check performed is: | 350 // The check performed is: |
| 351 // object->IsConsString() && !object->IsSymbol() && | 351 // object->IsConsString() && !object->IsSymbol() && |
| 352 // (ConsString::cast(object)->second() == Heap::empty_string()) | 352 // (ConsString::cast(object)->second() == HEAP->empty_string()) |
| 353 // except the maps for the object and its possible substrings might be | 353 // except the maps for the object and its possible substrings might be |
| 354 // marked. | 354 // marked. |
| 355 HeapObject* object = HeapObject::cast(*p); | 355 HeapObject* object = HeapObject::cast(*p); |
| 356 MapWord map_word = object->map_word(); | 356 MapWord map_word = object->map_word(); |
| 357 map_word.ClearMark(); | 357 map_word.ClearMark(); |
| 358 InstanceType type = map_word.ToMap()->instance_type(); | 358 InstanceType type = map_word.ToMap()->instance_type(); |
| 359 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; | 359 if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; |
| 360 | 360 |
| 361 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); | 361 Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); |
| 362 if (second != Heap::raw_unchecked_empty_string()) { | 362 Heap* heap = map_word.ToMap()->heap(); |
| 363 if (second != heap->raw_unchecked_empty_string()) { |
| 363 return object; | 364 return object; |
| 364 } | 365 } |
| 365 | 366 |
| 366 // Since we don't have the object's start, it is impossible to update the | 367 // Since we don't have the object's start, it is impossible to update the |
| 367 // page dirty marks. Therefore, we only replace the string with its left | 368 // page dirty marks. Therefore, we only replace the string with its left |
| 368 // substring when page dirty marks do not change. | 369 // substring when page dirty marks do not change. |
| 369 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); | 370 Object* first = reinterpret_cast<ConsString*>(object)->unchecked_first(); |
| 370 if (!Heap::InNewSpace(object) && Heap::InNewSpace(first)) return object; | 371 if (!heap->InNewSpace(object) && heap->InNewSpace(first)) return object; |
| 371 | 372 |
| 372 *p = first; | 373 *p = first; |
| 373 return HeapObject::cast(first); | 374 return HeapObject::cast(first); |
| 374 } | 375 } |
| 375 | 376 |
| 376 | 377 |
| 377 class StaticMarkingVisitor : public StaticVisitorBase { | 378 class StaticMarkingVisitor : public StaticVisitorBase { |
| 378 public: | 379 public: |
| 379 static inline void IterateBody(Map* map, HeapObject* obj) { | 380 static inline void IterateBody(Map* map, HeapObject* obj) { |
| 380 table_.GetVisitor(map)(map, obj); | 381 table_.GetVisitor(map)(map, obj); |
| 381 } | 382 } |
| 382 | 383 |
| 383 static void EnableCodeFlushing(bool enabled) { | |
| 384 if (enabled) { | |
| 385 table_.Register(kVisitJSFunction, &VisitJSFunctionAndFlushCode); | |
| 386 table_.Register(kVisitSharedFunctionInfo, | |
| 387 &VisitSharedFunctionInfoAndFlushCode); | |
| 388 | |
| 389 } else { | |
| 390 table_.Register(kVisitJSFunction, &VisitJSFunction); | |
| 391 table_.Register(kVisitSharedFunctionInfo, | |
| 392 &VisitSharedFunctionInfoGeneric); | |
| 393 } | |
| 394 } | |
| 395 | |
| 396 static void Initialize() { | 384 static void Initialize() { |
| 397 table_.Register(kVisitShortcutCandidate, | 385 table_.Register(kVisitShortcutCandidate, |
| 398 &FixedBodyVisitor<StaticMarkingVisitor, | 386 &FixedBodyVisitor<StaticMarkingVisitor, |
| 399 ConsString::BodyDescriptor, | 387 ConsString::BodyDescriptor, |
| 400 void>::Visit); | 388 void>::Visit); |
| 401 | 389 |
| 402 table_.Register(kVisitConsString, | 390 table_.Register(kVisitConsString, |
| 403 &FixedBodyVisitor<StaticMarkingVisitor, | 391 &FixedBodyVisitor<StaticMarkingVisitor, |
| 404 ConsString::BodyDescriptor, | 392 ConsString::BodyDescriptor, |
| 405 void>::Visit); | 393 void>::Visit); |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 447 | 435 |
| 448 table_.RegisterSpecializations<JSObjectVisitor, | 436 table_.RegisterSpecializations<JSObjectVisitor, |
| 449 kVisitJSObject, | 437 kVisitJSObject, |
| 450 kVisitJSObjectGeneric>(); | 438 kVisitJSObjectGeneric>(); |
| 451 | 439 |
| 452 table_.RegisterSpecializations<StructObjectVisitor, | 440 table_.RegisterSpecializations<StructObjectVisitor, |
| 453 kVisitStruct, | 441 kVisitStruct, |
| 454 kVisitStructGeneric>(); | 442 kVisitStructGeneric>(); |
| 455 } | 443 } |
| 456 | 444 |
| 457 INLINE(static void VisitPointer(Object** p)) { | 445 INLINE(static void VisitPointer(Heap* heap, Object** p)) { |
| 458 MarkObjectByPointer(p); | 446 MarkObjectByPointer(heap, p); |
| 459 } | 447 } |
| 460 | 448 |
| 461 INLINE(static void VisitPointers(Object** start, Object** end)) { | 449 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) { |
| 462 // Mark all objects pointed to in [start, end). | 450 // Mark all objects pointed to in [start, end). |
| 463 const int kMinRangeForMarkingRecursion = 64; | 451 const int kMinRangeForMarkingRecursion = 64; |
| 464 if (end - start >= kMinRangeForMarkingRecursion) { | 452 if (end - start >= kMinRangeForMarkingRecursion) { |
| 465 if (VisitUnmarkedObjects(start, end)) return; | 453 if (VisitUnmarkedObjects(heap, start, end)) return; |
| 466 // We are close to a stack overflow, so just mark the objects. | 454 // We are close to a stack overflow, so just mark the objects. |
| 467 } | 455 } |
| 468 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 456 for (Object** p = start; p < end; p++) MarkObjectByPointer(heap, p); |
| 469 } | 457 } |
| 470 | 458 |
| 471 static inline void VisitCodeTarget(RelocInfo* rinfo) { | 459 static inline void VisitCodeTarget(RelocInfo* rinfo) { |
| 472 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 460 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 473 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 461 Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 474 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { | 462 if (FLAG_cleanup_ics_at_gc && code->is_inline_cache_stub()) { |
| 475 IC::Clear(rinfo->pc()); | 463 IC::Clear(rinfo->pc()); |
| 476 // Please note targets for cleared inline cached do not have to be | 464 // Please note targets for cleared inline cached do not have to be |
| 477 // marked since they are contained in Heap::non_monomorphic_cache(). | 465 // marked since they are contained in HEAP->non_monomorphic_cache(). |
| 478 } else { | 466 } else { |
| 479 MarkCompactCollector::MarkObject(code); | 467 HEAP->mark_compact_collector()->MarkObject(code); |
| 480 } | 468 } |
| 481 } | 469 } |
| 482 | 470 |
| 483 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { | 471 static void VisitGlobalPropertyCell(RelocInfo* rinfo) { |
| 484 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); | 472 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL); |
| 485 Object* cell = rinfo->target_cell(); | 473 Object* cell = rinfo->target_cell(); |
| 486 Object* old_cell = cell; | 474 Object* old_cell = cell; |
| 487 VisitPointer(&cell); | 475 VisitPointer(HEAP, &cell); |
| 488 if (cell != old_cell) { | 476 if (cell != old_cell) { |
| 489 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); | 477 rinfo->set_target_cell(reinterpret_cast<JSGlobalPropertyCell*>(cell)); |
| 490 } | 478 } |
| 491 } | 479 } |
| 492 | 480 |
| 493 static inline void VisitDebugTarget(RelocInfo* rinfo) { | 481 static inline void VisitDebugTarget(RelocInfo* rinfo) { |
| 494 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 482 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 495 rinfo->IsPatchedReturnSequence()) || | 483 rinfo->IsPatchedReturnSequence()) || |
| 496 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 484 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 497 rinfo->IsPatchedDebugBreakSlotSequence())); | 485 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 498 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 486 HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| 499 MarkCompactCollector::MarkObject(code); | 487 HEAP->mark_compact_collector()->MarkObject(code); |
| 500 } | 488 } |
| 501 | 489 |
| 502 // Mark object pointed to by p. | 490 // Mark object pointed to by p. |
| 503 INLINE(static void MarkObjectByPointer(Object** p)) { | 491 INLINE(static void MarkObjectByPointer(Heap* heap, Object** p)) { |
| 504 if (!(*p)->IsHeapObject()) return; | 492 if (!(*p)->IsHeapObject()) return; |
| 505 HeapObject* object = ShortCircuitConsString(p); | 493 HeapObject* object = ShortCircuitConsString(p); |
| 506 MarkCompactCollector::MarkObject(object); | 494 heap->mark_compact_collector()->MarkObject(object); |
| 507 } | 495 } |
| 508 | 496 |
| 497 |
| 509 // Visit an unmarked object. | 498 // Visit an unmarked object. |
| 510 static inline void VisitUnmarkedObject(HeapObject* obj) { | 499 static inline void VisitUnmarkedObject(HeapObject* obj) { |
| 511 #ifdef DEBUG | 500 #ifdef DEBUG |
| 512 ASSERT(Heap::Contains(obj)); | 501 ASSERT(HEAP->Contains(obj)); |
| 513 ASSERT(!obj->IsMarked()); | 502 ASSERT(!obj->IsMarked()); |
| 514 #endif | 503 #endif |
| 515 Map* map = obj->map(); | 504 Map* map = obj->map(); |
| 516 MarkCompactCollector::SetMark(obj); | 505 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 506 collector->SetMark(obj); |
| 517 // Mark the map pointer and the body. | 507 // Mark the map pointer and the body. |
| 518 MarkCompactCollector::MarkObject(map); | 508 collector->MarkObject(map); |
| 519 IterateBody(map, obj); | 509 IterateBody(map, obj); |
| 520 } | 510 } |
| 521 | 511 |
| 522 // Visit all unmarked objects pointed to by [start, end). | 512 // Visit all unmarked objects pointed to by [start, end). |
| 523 // Returns false if the operation fails (lack of stack space). | 513 // Returns false if the operation fails (lack of stack space). |
| 524 static inline bool VisitUnmarkedObjects(Object** start, Object** end) { | 514 static inline bool VisitUnmarkedObjects(Heap* heap, |
| 515 Object** start, |
| 516 Object** end) { |
| 525 // Return false is we are close to the stack limit. | 517 // Return false is we are close to the stack limit. |
| 526 StackLimitCheck check; | 518 StackLimitCheck check(heap->isolate()); |
| 527 if (check.HasOverflowed()) return false; | 519 if (check.HasOverflowed()) return false; |
| 528 | 520 |
| 529 // Visit the unmarked objects. | 521 // Visit the unmarked objects. |
| 530 for (Object** p = start; p < end; p++) { | 522 for (Object** p = start; p < end; p++) { |
| 531 if (!(*p)->IsHeapObject()) continue; | 523 if (!(*p)->IsHeapObject()) continue; |
| 532 HeapObject* obj = HeapObject::cast(*p); | 524 HeapObject* obj = HeapObject::cast(*p); |
| 533 if (obj->IsMarked()) continue; | 525 if (obj->IsMarked()) continue; |
| 534 VisitUnmarkedObject(obj); | 526 VisitUnmarkedObject(obj); |
| 535 } | 527 } |
| 536 return true; | 528 return true; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 552 | 544 |
| 553 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 545 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
| 554 JSObject::BodyDescriptor, | 546 JSObject::BodyDescriptor, |
| 555 void> JSObjectVisitor; | 547 void> JSObjectVisitor; |
| 556 | 548 |
| 557 typedef FlexibleBodyVisitor<StaticMarkingVisitor, | 549 typedef FlexibleBodyVisitor<StaticMarkingVisitor, |
| 558 StructBodyDescriptor, | 550 StructBodyDescriptor, |
| 559 void> StructObjectVisitor; | 551 void> StructObjectVisitor; |
| 560 | 552 |
| 561 static void VisitCode(Map* map, HeapObject* object) { | 553 static void VisitCode(Map* map, HeapObject* object) { |
| 562 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>(); | 554 reinterpret_cast<Code*>(object)->CodeIterateBody<StaticMarkingVisitor>( |
| 555 map->heap()); |
| 563 } | 556 } |
| 564 | 557 |
| 565 // Code flushing support. | 558 // Code flushing support. |
| 566 | 559 |
| 567 // How many collections newly compiled code object will survive before being | 560 // How many collections newly compiled code object will survive before being |
| 568 // flushed. | 561 // flushed. |
| 569 static const int kCodeAgeThreshold = 5; | 562 static const int kCodeAgeThreshold = 5; |
| 570 | 563 |
| 571 inline static bool HasSourceCode(SharedFunctionInfo* info) { | 564 inline static bool HasSourceCode(SharedFunctionInfo* info) { |
| 572 Object* undefined = Heap::raw_unchecked_undefined_value(); | 565 Object* undefined = HEAP->raw_unchecked_undefined_value(); |
| 573 return (info->script() != undefined) && | 566 return (info->script() != undefined) && |
| 574 (reinterpret_cast<Script*>(info->script())->source() != undefined); | 567 (reinterpret_cast<Script*>(info->script())->source() != undefined); |
| 575 } | 568 } |
| 576 | 569 |
| 577 | 570 |
| 578 inline static bool IsCompiled(JSFunction* function) { | 571 inline static bool IsCompiled(JSFunction* function) { |
| 579 return | 572 return function->unchecked_code() != |
| 580 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); | 573 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); |
| 581 } | 574 } |
| 582 | 575 |
| 583 | |
| 584 inline static bool IsCompiled(SharedFunctionInfo* function) { | 576 inline static bool IsCompiled(SharedFunctionInfo* function) { |
| 585 return | 577 return function->unchecked_code() != |
| 586 function->unchecked_code() != Builtins::builtin(Builtins::LazyCompile); | 578 Isolate::Current()->builtins()->builtin(Builtins::LazyCompile); |
| 587 } | 579 } |
| 588 | 580 |
| 589 inline static bool IsFlushable(JSFunction* function) { | 581 inline static bool IsFlushable(JSFunction* function) { |
| 590 SharedFunctionInfo* shared_info = function->unchecked_shared(); | 582 SharedFunctionInfo* shared_info = function->unchecked_shared(); |
| 591 | 583 |
| 592 // Code is either on stack, in compilation cache or referenced | 584 // Code is either on stack, in compilation cache or referenced |
| 593 // by optimized version of function. | 585 // by optimized version of function. |
| 594 if (function->unchecked_code()->IsMarked()) { | 586 if (function->unchecked_code()->IsMarked()) { |
| 595 shared_info->set_code_age(0); | 587 shared_info->set_code_age(0); |
| 596 return false; | 588 return false; |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 638 // Age this shared function info. | 630 // Age this shared function info. |
| 639 if (shared_info->code_age() < kCodeAgeThreshold) { | 631 if (shared_info->code_age() < kCodeAgeThreshold) { |
| 640 shared_info->set_code_age(shared_info->code_age() + 1); | 632 shared_info->set_code_age(shared_info->code_age() + 1); |
| 641 return false; | 633 return false; |
| 642 } | 634 } |
| 643 | 635 |
| 644 return true; | 636 return true; |
| 645 } | 637 } |
| 646 | 638 |
| 647 | 639 |
| 648 static bool FlushCodeForFunction(JSFunction* function) { | 640 static bool FlushCodeForFunction(Heap* heap, JSFunction* function) { |
| 649 if (!IsFlushable(function)) return false; | 641 if (!IsFlushable(function)) return false; |
| 650 | 642 |
| 651 // This function's code looks flushable. But we have to postpone the | 643 // This function's code looks flushable. But we have to postpone the |
| 652 // decision until we see all functions that point to the same | 644 // decision until we see all functions that point to the same |
| 653 // SharedFunctionInfo because some of them might be optimized. | 645 // SharedFunctionInfo because some of them might be optimized. |
| 654 // That would make the nonoptimized version of the code nonflushable, | 646 // That would make the nonoptimized version of the code nonflushable, |
| 655 // because it is required for bailing out from optimized code. | 647 // because it is required for bailing out from optimized code. |
| 656 FlushCode::AddCandidate(function); | 648 heap->mark_compact_collector()->code_flusher()->AddCandidate(function); |
| 657 return true; | 649 return true; |
| 658 } | 650 } |
| 659 | 651 |
| 660 | 652 |
| 661 static inline Map* SafeMap(Object* obj) { | 653 static inline Map* SafeMap(Object* obj) { |
| 662 MapWord map_word = HeapObject::cast(obj)->map_word(); | 654 MapWord map_word = HeapObject::cast(obj)->map_word(); |
| 663 map_word.ClearMark(); | 655 map_word.ClearMark(); |
| 664 map_word.ClearOverflow(); | 656 map_word.ClearOverflow(); |
| 665 return map_word.ToMap(); | 657 return map_word.ToMap(); |
| 666 } | 658 } |
| 667 | 659 |
| 668 | 660 |
| 669 static inline bool IsJSBuiltinsObject(Object* obj) { | 661 static inline bool IsJSBuiltinsObject(Object* obj) { |
| 670 return obj->IsHeapObject() && | 662 return obj->IsHeapObject() && |
| 671 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); | 663 (SafeMap(obj)->instance_type() == JS_BUILTINS_OBJECT_TYPE); |
| 672 } | 664 } |
| 673 | 665 |
| 674 | 666 |
| 675 static inline bool IsValidNotBuiltinContext(Object* ctx) { | 667 static inline bool IsValidNotBuiltinContext(Object* ctx) { |
| 676 if (!ctx->IsHeapObject()) return false; | 668 if (!ctx->IsHeapObject()) return false; |
| 677 | 669 |
| 678 Map* map = SafeMap(ctx); | 670 Map* map = SafeMap(ctx); |
| 679 if (!(map == Heap::raw_unchecked_context_map() || | 671 if (!(map == HEAP->raw_unchecked_context_map() || |
| 680 map == Heap::raw_unchecked_catch_context_map() || | 672 map == HEAP->raw_unchecked_catch_context_map() || |
| 681 map == Heap::raw_unchecked_global_context_map())) { | 673 map == HEAP->raw_unchecked_global_context_map())) { |
| 682 return false; | 674 return false; |
| 683 } | 675 } |
| 684 | 676 |
| 685 Context* context = reinterpret_cast<Context*>(ctx); | 677 Context* context = reinterpret_cast<Context*>(ctx); |
| 686 | 678 |
| 687 if (IsJSBuiltinsObject(context->global())) { | 679 if (IsJSBuiltinsObject(context->global())) { |
| 688 return false; | 680 return false; |
| 689 } | 681 } |
| 690 | 682 |
| 691 return true; | 683 return true; |
| 692 } | 684 } |
| 693 | 685 |
| 694 | 686 |
| 695 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { | 687 static void VisitSharedFunctionInfoGeneric(Map* map, HeapObject* object) { |
| 696 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 688 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 697 | 689 |
| 698 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 690 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 699 | 691 |
| 700 FixedBodyVisitor<StaticMarkingVisitor, | 692 FixedBodyVisitor<StaticMarkingVisitor, |
| 701 SharedFunctionInfo::BodyDescriptor, | 693 SharedFunctionInfo::BodyDescriptor, |
| 702 void>::Visit(map, object); | 694 void>::Visit(map, object); |
| 703 } | 695 } |
| 704 | 696 |
| 705 | 697 |
| 706 static void VisitSharedFunctionInfoAndFlushCode(Map* map, | 698 static void VisitSharedFunctionInfoAndFlushCode(Map* map, |
| 707 HeapObject* object) { | 699 HeapObject* object) { |
| 700 MarkCompactCollector* collector = map->heap()->mark_compact_collector(); |
| 701 if (!collector->is_code_flushing_enabled()) { |
| 702 VisitSharedFunctionInfoGeneric(map, object); |
| 703 return; |
| 704 } |
| 708 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); | 705 VisitSharedFunctionInfoAndFlushCodeGeneric(map, object, false); |
| 709 } | 706 } |
| 710 | 707 |
| 711 | 708 |
| 712 static void VisitSharedFunctionInfoAndFlushCodeGeneric( | 709 static void VisitSharedFunctionInfoAndFlushCodeGeneric( |
| 713 Map* map, HeapObject* object, bool known_flush_code_candidate) { | 710 Map* map, HeapObject* object, bool known_flush_code_candidate) { |
| 711 Heap* heap = map->heap(); |
| 714 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); | 712 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(object); |
| 715 | 713 |
| 716 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); | 714 if (shared->IsInobjectSlackTrackingInProgress()) shared->DetachInitialMap(); |
| 717 | 715 |
| 718 if (!known_flush_code_candidate) { | 716 if (!known_flush_code_candidate) { |
| 719 known_flush_code_candidate = IsFlushable(shared); | 717 known_flush_code_candidate = IsFlushable(shared); |
| 720 if (known_flush_code_candidate) FlushCode::AddCandidate(shared); | 718 if (known_flush_code_candidate) { |
| 719 heap->mark_compact_collector()->code_flusher()->AddCandidate(shared); |
| 720 } |
| 721 } | 721 } |
| 722 | 722 |
| 723 VisitSharedFunctionInfoFields(object, known_flush_code_candidate); | 723 VisitSharedFunctionInfoFields(heap, object, known_flush_code_candidate); |
| 724 } | 724 } |
| 725 | 725 |
| 726 | 726 |
| 727 static void VisitCodeEntry(Address entry_address) { | 727 static void VisitCodeEntry(Heap* heap, Address entry_address) { |
| 728 Object* code = Code::GetObjectFromEntryAddress(entry_address); | 728 Object* code = Code::GetObjectFromEntryAddress(entry_address); |
| 729 Object* old_code = code; | 729 Object* old_code = code; |
| 730 VisitPointer(&code); | 730 VisitPointer(heap, &code); |
| 731 if (code != old_code) { | 731 if (code != old_code) { |
| 732 Memory::Address_at(entry_address) = | 732 Memory::Address_at(entry_address) = |
| 733 reinterpret_cast<Code*>(code)->entry(); | 733 reinterpret_cast<Code*>(code)->entry(); |
| 734 } | 734 } |
| 735 } | 735 } |
| 736 | 736 |
| 737 | 737 |
| 738 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { | 738 static void VisitJSFunctionAndFlushCode(Map* map, HeapObject* object) { |
| 739 Heap* heap = map->heap(); |
| 740 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 741 if (!collector->is_code_flushing_enabled()) { |
| 742 VisitJSFunction(map, object); |
| 743 return; |
| 744 } |
| 745 |
| 739 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); | 746 JSFunction* jsfunction = reinterpret_cast<JSFunction*>(object); |
| 740 // The function must have a valid context and not be a builtin. | 747 // The function must have a valid context and not be a builtin. |
| 741 bool flush_code_candidate = false; | 748 bool flush_code_candidate = false; |
| 742 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { | 749 if (IsValidNotBuiltinContext(jsfunction->unchecked_context())) { |
| 743 flush_code_candidate = FlushCodeForFunction(jsfunction); | 750 flush_code_candidate = FlushCodeForFunction(heap, jsfunction); |
| 744 } | 751 } |
| 745 | 752 |
| 746 if (!flush_code_candidate) { | 753 if (!flush_code_candidate) { |
| 747 MarkCompactCollector::MarkObject( | 754 collector->MarkObject(jsfunction->unchecked_shared()->unchecked_code()); |
| 748 jsfunction->unchecked_shared()->unchecked_code()); | |
| 749 | 755 |
| 750 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { | 756 if (jsfunction->unchecked_code()->kind() == Code::OPTIMIZED_FUNCTION) { |
| 751 // For optimized functions we should retain both non-optimized version | 757 // For optimized functions we should retain both non-optimized version |
| 752 // of it's code and non-optimized version of all inlined functions. | 758 // of it's code and non-optimized version of all inlined functions. |
| 753 // This is required to support bailing out from inlined code. | 759 // This is required to support bailing out from inlined code. |
| 754 DeoptimizationInputData* data = | 760 DeoptimizationInputData* data = |
| 755 reinterpret_cast<DeoptimizationInputData*>( | 761 reinterpret_cast<DeoptimizationInputData*>( |
| 756 jsfunction->unchecked_code()->unchecked_deoptimization_data()); | 762 jsfunction->unchecked_code()->unchecked_deoptimization_data()); |
| 757 | 763 |
| 758 FixedArray* literals = data->UncheckedLiteralArray(); | 764 FixedArray* literals = data->UncheckedLiteralArray(); |
| 759 | 765 |
| 760 for (int i = 0, count = data->InlinedFunctionCount()->value(); | 766 for (int i = 0, count = data->InlinedFunctionCount()->value(); |
| 761 i < count; | 767 i < count; |
| 762 i++) { | 768 i++) { |
| 763 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); | 769 JSFunction* inlined = reinterpret_cast<JSFunction*>(literals->get(i)); |
| 764 MarkCompactCollector::MarkObject( | 770 collector->MarkObject(inlined->unchecked_shared()->unchecked_code()); |
| 765 inlined->unchecked_shared()->unchecked_code()); | |
| 766 } | 771 } |
| 767 } | 772 } |
| 768 } | 773 } |
| 769 | 774 |
| 770 VisitJSFunctionFields(map, | 775 VisitJSFunctionFields(map, |
| 771 reinterpret_cast<JSFunction*>(object), | 776 reinterpret_cast<JSFunction*>(object), |
| 772 flush_code_candidate); | 777 flush_code_candidate); |
| 773 } | 778 } |
| 774 | 779 |
| 775 | 780 |
| 776 static void VisitJSFunction(Map* map, HeapObject* object) { | 781 static void VisitJSFunction(Map* map, HeapObject* object) { |
| 777 VisitJSFunctionFields(map, | 782 VisitJSFunctionFields(map, |
| 778 reinterpret_cast<JSFunction*>(object), | 783 reinterpret_cast<JSFunction*>(object), |
| 779 false); | 784 false); |
| 780 } | 785 } |
| 781 | 786 |
| 782 | 787 |
| 783 #define SLOT_ADDR(obj, offset) \ | 788 #define SLOT_ADDR(obj, offset) \ |
| 784 reinterpret_cast<Object**>((obj)->address() + offset) | 789 reinterpret_cast<Object**>((obj)->address() + offset) |
| 785 | 790 |
| 786 | 791 |
| 787 static inline void VisitJSFunctionFields(Map* map, | 792 static inline void VisitJSFunctionFields(Map* map, |
| 788 JSFunction* object, | 793 JSFunction* object, |
| 789 bool flush_code_candidate) { | 794 bool flush_code_candidate) { |
| 790 VisitPointers(SLOT_ADDR(object, JSFunction::kPropertiesOffset), | 795 Heap* heap = map->heap(); |
| 796 MarkCompactCollector* collector = heap->mark_compact_collector(); |
| 797 |
| 798 VisitPointers(heap, |
| 799 SLOT_ADDR(object, JSFunction::kPropertiesOffset), |
| 791 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); | 800 SLOT_ADDR(object, JSFunction::kCodeEntryOffset)); |
| 792 | 801 |
| 793 if (!flush_code_candidate) { | 802 if (!flush_code_candidate) { |
| 794 VisitCodeEntry(object->address() + JSFunction::kCodeEntryOffset); | 803 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset); |
| 795 } else { | 804 } else { |
| 796 // Don't visit code object. | 805 // Don't visit code object. |
| 797 | 806 |
| 798 // Visit shared function info to avoid double checking of it's | 807 // Visit shared function info to avoid double checking of it's |
| 799 // flushability. | 808 // flushability. |
| 800 SharedFunctionInfo* shared_info = object->unchecked_shared(); | 809 SharedFunctionInfo* shared_info = object->unchecked_shared(); |
| 801 if (!shared_info->IsMarked()) { | 810 if (!shared_info->IsMarked()) { |
| 802 Map* shared_info_map = shared_info->map(); | 811 Map* shared_info_map = shared_info->map(); |
| 803 MarkCompactCollector::SetMark(shared_info); | 812 collector->SetMark(shared_info); |
| 804 MarkCompactCollector::MarkObject(shared_info_map); | 813 collector->MarkObject(shared_info_map); |
| 805 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, | 814 VisitSharedFunctionInfoAndFlushCodeGeneric(shared_info_map, |
| 806 shared_info, | 815 shared_info, |
| 807 true); | 816 true); |
| 808 } | 817 } |
| 809 } | 818 } |
| 810 | 819 |
| 811 VisitPointers(SLOT_ADDR(object, | 820 VisitPointers(heap, |
| 821 SLOT_ADDR(object, |
| 812 JSFunction::kCodeEntryOffset + kPointerSize), | 822 JSFunction::kCodeEntryOffset + kPointerSize), |
| 813 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); | 823 SLOT_ADDR(object, JSFunction::kNonWeakFieldsEndOffset)); |
| 814 | 824 |
| 815 // Don't visit the next function list field as it is a weak reference. | 825 // Don't visit the next function list field as it is a weak reference. |
| 816 } | 826 } |
| 817 | 827 |
| 818 | 828 |
| 819 static void VisitSharedFunctionInfoFields(HeapObject* object, | 829 static void VisitSharedFunctionInfoFields(Heap* heap, |
| 830 HeapObject* object, |
| 820 bool flush_code_candidate) { | 831 bool flush_code_candidate) { |
| 821 VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); | 832 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kNameOffset)); |
| 822 | 833 |
| 823 if (!flush_code_candidate) { | 834 if (!flush_code_candidate) { |
| 824 VisitPointer(SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); | 835 VisitPointer(heap, SLOT_ADDR(object, SharedFunctionInfo::kCodeOffset)); |
| 825 } | 836 } |
| 826 | 837 |
| 827 VisitPointers(SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset), | 838 VisitPointers(heap, |
| 839 SLOT_ADDR(object, SharedFunctionInfo::kScopeInfoOffset), |
| 828 SLOT_ADDR(object, SharedFunctionInfo::kSize)); | 840 SLOT_ADDR(object, SharedFunctionInfo::kSize)); |
| 829 } | 841 } |
| 830 | 842 |
| 831 #undef SLOT_ADDR | 843 #undef SLOT_ADDR |
| 832 | 844 |
| 833 typedef void (*Callback)(Map* map, HeapObject* object); | 845 typedef void (*Callback)(Map* map, HeapObject* object); |
| 834 | 846 |
| 835 static VisitorDispatchTable<Callback> table_; | 847 static VisitorDispatchTable<Callback> table_; |
| 836 }; | 848 }; |
| 837 | 849 |
| 838 | 850 |
| 839 VisitorDispatchTable<StaticMarkingVisitor::Callback> | 851 VisitorDispatchTable<StaticMarkingVisitor::Callback> |
| 840 StaticMarkingVisitor::table_; | 852 StaticMarkingVisitor::table_; |
| 841 | 853 |
| 842 | 854 |
| 843 class MarkingVisitor : public ObjectVisitor { | 855 class MarkingVisitor : public ObjectVisitor { |
| 844 public: | 856 public: |
| 857 explicit MarkingVisitor(Heap* heap) : heap_(heap) { } |
| 858 |
| 845 void VisitPointer(Object** p) { | 859 void VisitPointer(Object** p) { |
| 846 StaticMarkingVisitor::VisitPointer(p); | 860 StaticMarkingVisitor::VisitPointer(heap_, p); |
| 847 } | 861 } |
| 848 | 862 |
| 849 void VisitPointers(Object** start, Object** end) { | 863 void VisitPointers(Object** start, Object** end) { |
| 850 StaticMarkingVisitor::VisitPointers(start, end); | 864 StaticMarkingVisitor::VisitPointers(heap_, start, end); |
| 851 } | 865 } |
| 852 | 866 |
| 853 void VisitCodeTarget(RelocInfo* rinfo) { | 867 void VisitCodeTarget(RelocInfo* rinfo) { |
| 854 StaticMarkingVisitor::VisitCodeTarget(rinfo); | 868 StaticMarkingVisitor::VisitCodeTarget(rinfo); |
| 855 } | 869 } |
| 856 | 870 |
| 857 void VisitGlobalPropertyCell(RelocInfo* rinfo) { | 871 void VisitGlobalPropertyCell(RelocInfo* rinfo) { |
| 858 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo); | 872 StaticMarkingVisitor::VisitGlobalPropertyCell(rinfo); |
| 859 } | 873 } |
| 860 | 874 |
| 861 void VisitDebugTarget(RelocInfo* rinfo) { | 875 void VisitDebugTarget(RelocInfo* rinfo) { |
| 862 StaticMarkingVisitor::VisitDebugTarget(rinfo); | 876 StaticMarkingVisitor::VisitDebugTarget(rinfo); |
| 863 } | 877 } |
| 878 |
| 879 private: |
| 880 Heap* heap_; |
| 864 }; | 881 }; |
| 865 | 882 |
| 866 | 883 |
| 867 class CodeMarkingVisitor : public ThreadVisitor { | 884 class CodeMarkingVisitor : public ThreadVisitor { |
| 868 public: | 885 public: |
| 886 explicit CodeMarkingVisitor(MarkCompactCollector* collector) |
| 887 : collector_(collector) {} |
| 888 |
| 869 void VisitThread(ThreadLocalTop* top) { | 889 void VisitThread(ThreadLocalTop* top) { |
| 870 for (StackFrameIterator it(top); !it.done(); it.Advance()) { | 890 for (StackFrameIterator it(top); !it.done(); it.Advance()) { |
| 871 MarkCompactCollector::MarkObject(it.frame()->unchecked_code()); | 891 collector_->MarkObject(it.frame()->unchecked_code()); |
| 872 } | 892 } |
| 873 } | 893 } |
| 894 |
| 895 private: |
| 896 MarkCompactCollector* collector_; |
| 874 }; | 897 }; |
| 875 | 898 |
| 876 | 899 |
| 877 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { | 900 class SharedFunctionInfoMarkingVisitor : public ObjectVisitor { |
| 878 public: | 901 public: |
| 902 explicit SharedFunctionInfoMarkingVisitor(MarkCompactCollector* collector) |
| 903 : collector_(collector) {} |
| 904 |
| 879 void VisitPointers(Object** start, Object** end) { | 905 void VisitPointers(Object** start, Object** end) { |
| 880 for (Object** p = start; p < end; p++) VisitPointer(p); | 906 for (Object** p = start; p < end; p++) VisitPointer(p); |
| 881 } | 907 } |
| 882 | 908 |
| 883 void VisitPointer(Object** slot) { | 909 void VisitPointer(Object** slot) { |
| 884 Object* obj = *slot; | 910 Object* obj = *slot; |
| 885 if (obj->IsSharedFunctionInfo()) { | 911 if (obj->IsSharedFunctionInfo()) { |
| 886 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); | 912 SharedFunctionInfo* shared = reinterpret_cast<SharedFunctionInfo*>(obj); |
| 887 MarkCompactCollector::MarkObject(shared->unchecked_code()); | 913 collector_->MarkObject(shared->unchecked_code()); |
| 888 MarkCompactCollector::MarkObject(shared); | 914 collector_->MarkObject(shared); |
| 889 } | 915 } |
| 890 } | 916 } |
| 917 |
| 918 private: |
| 919 MarkCompactCollector* collector_; |
| 891 }; | 920 }; |
| 892 | 921 |
| 893 | 922 |
| 894 void MarkCompactCollector::PrepareForCodeFlushing() { | 923 void MarkCompactCollector::PrepareForCodeFlushing() { |
| 924 ASSERT(heap_ == Isolate::Current()->heap()); |
| 925 |
| 895 if (!FLAG_flush_code) { | 926 if (!FLAG_flush_code) { |
| 896 StaticMarkingVisitor::EnableCodeFlushing(false); | 927 EnableCodeFlushing(false); |
| 897 return; | 928 return; |
| 898 } | 929 } |
| 899 | 930 |
| 900 #ifdef ENABLE_DEBUGGER_SUPPORT | 931 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 901 if (Debug::IsLoaded() || Debug::has_break_points()) { | 932 if (heap_->isolate()->debug()->IsLoaded() || |
| 902 StaticMarkingVisitor::EnableCodeFlushing(false); | 933 heap_->isolate()->debug()->has_break_points()) { |
| 934 EnableCodeFlushing(false); |
| 903 return; | 935 return; |
| 904 } | 936 } |
| 905 #endif | 937 #endif |
| 906 StaticMarkingVisitor::EnableCodeFlushing(true); | 938 EnableCodeFlushing(true); |
| 907 | 939 |
| 908 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray | 940 // Ensure that empty descriptor array is marked. Method MarkDescriptorArray |
| 909 // relies on it being marked before any other descriptor array. | 941 // relies on it being marked before any other descriptor array. |
| 910 MarkObject(Heap::raw_unchecked_empty_descriptor_array()); | 942 MarkObject(heap_->raw_unchecked_empty_descriptor_array()); |
| 911 | 943 |
| 912 // Make sure we are not referencing the code from the stack. | 944 // Make sure we are not referencing the code from the stack. |
| 945 ASSERT(this == heap_->mark_compact_collector()); |
| 913 for (StackFrameIterator it; !it.done(); it.Advance()) { | 946 for (StackFrameIterator it; !it.done(); it.Advance()) { |
| 914 MarkObject(it.frame()->unchecked_code()); | 947 MarkObject(it.frame()->unchecked_code()); |
| 915 } | 948 } |
| 916 | 949 |
| 917 // Iterate the archived stacks in all threads to check if | 950 // Iterate the archived stacks in all threads to check if |
| 918 // the code is referenced. | 951 // the code is referenced. |
| 919 CodeMarkingVisitor code_marking_visitor; | 952 CodeMarkingVisitor code_marking_visitor(this); |
| 920 ThreadManager::IterateArchivedThreads(&code_marking_visitor); | 953 heap_->isolate()->thread_manager()->IterateArchivedThreads( |
| 954 &code_marking_visitor); |
| 921 | 955 |
| 922 SharedFunctionInfoMarkingVisitor visitor; | 956 SharedFunctionInfoMarkingVisitor visitor(this); |
| 923 CompilationCache::IterateFunctions(&visitor); | 957 heap_->isolate()->compilation_cache()->IterateFunctions(&visitor); |
| 924 HandleScopeImplementer::Iterate(&visitor); | 958 heap_->isolate()->handle_scope_implementer()->Iterate(&visitor); |
| 925 | 959 |
| 926 ProcessMarkingStack(); | 960 ProcessMarkingStack(); |
| 927 } | 961 } |
| 928 | 962 |
| 929 | 963 |
| 930 // Visitor class for marking heap roots. | 964 // Visitor class for marking heap roots. |
| 931 class RootMarkingVisitor : public ObjectVisitor { | 965 class RootMarkingVisitor : public ObjectVisitor { |
| 932 public: | 966 public: |
| 967 explicit RootMarkingVisitor(Heap* heap) |
| 968 : collector_(heap->mark_compact_collector()) { } |
| 969 |
| 933 void VisitPointer(Object** p) { | 970 void VisitPointer(Object** p) { |
| 934 MarkObjectByPointer(p); | 971 MarkObjectByPointer(p); |
| 935 } | 972 } |
| 936 | 973 |
| 937 void VisitPointers(Object** start, Object** end) { | 974 void VisitPointers(Object** start, Object** end) { |
| 938 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); | 975 for (Object** p = start; p < end; p++) MarkObjectByPointer(p); |
| 939 } | 976 } |
| 940 | 977 |
| 941 private: | 978 private: |
| 942 void MarkObjectByPointer(Object** p) { | 979 void MarkObjectByPointer(Object** p) { |
| 943 if (!(*p)->IsHeapObject()) return; | 980 if (!(*p)->IsHeapObject()) return; |
| 944 | 981 |
| 945 // Replace flat cons strings in place. | 982 // Replace flat cons strings in place. |
| 946 HeapObject* object = ShortCircuitConsString(p); | 983 HeapObject* object = ShortCircuitConsString(p); |
| 947 if (object->IsMarked()) return; | 984 if (object->IsMarked()) return; |
| 948 | 985 |
| 949 Map* map = object->map(); | 986 Map* map = object->map(); |
| 950 // Mark the object. | 987 // Mark the object. |
| 951 MarkCompactCollector::SetMark(object); | 988 collector_->SetMark(object); |
| 952 | 989 |
| 953 // Mark the map pointer and body, and push them on the marking stack. | 990 // Mark the map pointer and body, and push them on the marking stack. |
| 954 MarkCompactCollector::MarkObject(map); | 991 collector_->MarkObject(map); |
| 955 StaticMarkingVisitor::IterateBody(map, object); | 992 StaticMarkingVisitor::IterateBody(map, object); |
| 956 | 993 |
| 957 // Mark all the objects reachable from the map and body. May leave | 994 // Mark all the objects reachable from the map and body. May leave |
| 958 // overflowed objects in the heap. | 995 // overflowed objects in the heap. |
| 959 MarkCompactCollector::EmptyMarkingStack(); | 996 collector_->EmptyMarkingStack(); |
| 960 } | 997 } |
| 998 |
| 999 MarkCompactCollector* collector_; |
| 961 }; | 1000 }; |
| 962 | 1001 |
| 963 | 1002 |
| 964 // Helper class for pruning the symbol table. | 1003 // Helper class for pruning the symbol table. |
| 965 class SymbolTableCleaner : public ObjectVisitor { | 1004 class SymbolTableCleaner : public ObjectVisitor { |
| 966 public: | 1005 public: |
| 967 SymbolTableCleaner() : pointers_removed_(0) { } | 1006 SymbolTableCleaner() : pointers_removed_(0) { } |
| 968 | 1007 |
| 969 virtual void VisitPointers(Object** start, Object** end) { | 1008 virtual void VisitPointers(Object** start, Object** end) { |
| 970 // Visit all HeapObject pointers in [start, end). | 1009 // Visit all HeapObject pointers in [start, end). |
| 971 for (Object** p = start; p < end; p++) { | 1010 for (Object** p = start; p < end; p++) { |
| 972 if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) { | 1011 if ((*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked()) { |
| 973 // Check if the symbol being pruned is an external symbol. We need to | 1012 // Check if the symbol being pruned is an external symbol. We need to |
| 974 // delete the associated external data as this symbol is going away. | 1013 // delete the associated external data as this symbol is going away. |
| 975 | 1014 |
| 976 // Since no objects have yet been moved we can safely access the map of | 1015 // Since no objects have yet been moved we can safely access the map of |
| 977 // the object. | 1016 // the object. |
| 978 if ((*p)->IsExternalString()) { | 1017 if ((*p)->IsExternalString()) { |
| 979 Heap::FinalizeExternalString(String::cast(*p)); | 1018 HEAP->FinalizeExternalString(String::cast(*p)); |
| 980 } | 1019 } |
| 981 // Set the entry to null_value (as deleted). | 1020 // Set the entry to null_value (as deleted). |
| 982 *p = Heap::raw_unchecked_null_value(); | 1021 *p = HEAP->raw_unchecked_null_value(); |
| 983 pointers_removed_++; | 1022 pointers_removed_++; |
| 984 } | 1023 } |
| 985 } | 1024 } |
| 986 } | 1025 } |
| 987 | 1026 |
| 988 int PointersRemoved() { | 1027 int PointersRemoved() { |
| 989 return pointers_removed_; | 1028 return pointers_removed_; |
| 990 } | 1029 } |
| 991 private: | 1030 private: |
| 992 int pointers_removed_; | 1031 int pointers_removed_; |
| (...skipping 10 matching lines...) Expand all Loading... |
| 1003 return object; | 1042 return object; |
| 1004 } else { | 1043 } else { |
| 1005 return NULL; | 1044 return NULL; |
| 1006 } | 1045 } |
| 1007 } | 1046 } |
| 1008 }; | 1047 }; |
| 1009 | 1048 |
| 1010 | 1049 |
| 1011 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { | 1050 void MarkCompactCollector::MarkUnmarkedObject(HeapObject* object) { |
| 1012 ASSERT(!object->IsMarked()); | 1051 ASSERT(!object->IsMarked()); |
| 1013 ASSERT(Heap::Contains(object)); | 1052 ASSERT(HEAP->Contains(object)); |
| 1014 if (object->IsMap()) { | 1053 if (object->IsMap()) { |
| 1015 Map* map = Map::cast(object); | 1054 Map* map = Map::cast(object); |
| 1016 if (FLAG_cleanup_caches_in_maps_at_gc) { | 1055 if (FLAG_cleanup_caches_in_maps_at_gc) { |
| 1017 map->ClearCodeCache(); | 1056 map->ClearCodeCache(heap_); |
| 1018 } | 1057 } |
| 1019 SetMark(map); | 1058 SetMark(map); |
| 1020 if (FLAG_collect_maps && | 1059 if (FLAG_collect_maps && |
| 1021 map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 1060 map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 1022 map->instance_type() <= JS_FUNCTION_TYPE) { | 1061 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 1023 MarkMapContents(map); | 1062 MarkMapContents(map); |
| 1024 } else { | 1063 } else { |
| 1025 marking_stack.Push(map); | 1064 marking_stack_.Push(map); |
| 1026 } | 1065 } |
| 1027 } else { | 1066 } else { |
| 1028 SetMark(object); | 1067 SetMark(object); |
| 1029 marking_stack.Push(object); | 1068 marking_stack_.Push(object); |
| 1030 } | 1069 } |
| 1031 } | 1070 } |
| 1032 | 1071 |
| 1033 | 1072 |
| 1034 void MarkCompactCollector::MarkMapContents(Map* map) { | 1073 void MarkCompactCollector::MarkMapContents(Map* map) { |
| 1035 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( | 1074 MarkDescriptorArray(reinterpret_cast<DescriptorArray*>( |
| 1036 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); | 1075 *HeapObject::RawField(map, Map::kInstanceDescriptorsOffset))); |
| 1037 | 1076 |
| 1038 // Mark the Object* fields of the Map. | 1077 // Mark the Object* fields of the Map. |
| 1039 // Since the descriptor array has been marked already, it is fine | 1078 // Since the descriptor array has been marked already, it is fine |
| 1040 // that one of these fields contains a pointer to it. | 1079 // that one of these fields contains a pointer to it. |
| 1041 Object** start_slot = HeapObject::RawField(map, | 1080 Object** start_slot = HeapObject::RawField(map, |
| 1042 Map::kPointerFieldsBeginOffset); | 1081 Map::kPointerFieldsBeginOffset); |
| 1043 | 1082 |
| 1044 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); | 1083 Object** end_slot = HeapObject::RawField(map, Map::kPointerFieldsEndOffset); |
| 1045 | 1084 |
| 1046 StaticMarkingVisitor::VisitPointers(start_slot, end_slot); | 1085 StaticMarkingVisitor::VisitPointers(map->heap(), start_slot, end_slot); |
| 1047 } | 1086 } |
| 1048 | 1087 |
| 1049 | 1088 |
| 1050 void MarkCompactCollector::MarkDescriptorArray( | 1089 void MarkCompactCollector::MarkDescriptorArray( |
| 1051 DescriptorArray* descriptors) { | 1090 DescriptorArray* descriptors) { |
| 1052 if (descriptors->IsMarked()) return; | 1091 if (descriptors->IsMarked()) return; |
| 1053 // Empty descriptor array is marked as a root before any maps are marked. | 1092 // Empty descriptor array is marked as a root before any maps are marked. |
| 1054 ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); | 1093 ASSERT(descriptors != HEAP->raw_unchecked_empty_descriptor_array()); |
| 1055 SetMark(descriptors); | 1094 SetMark(descriptors); |
| 1056 | 1095 |
| 1057 FixedArray* contents = reinterpret_cast<FixedArray*>( | 1096 FixedArray* contents = reinterpret_cast<FixedArray*>( |
| 1058 descriptors->get(DescriptorArray::kContentArrayIndex)); | 1097 descriptors->get(DescriptorArray::kContentArrayIndex)); |
| 1059 ASSERT(contents->IsHeapObject()); | 1098 ASSERT(contents->IsHeapObject()); |
| 1060 ASSERT(!contents->IsMarked()); | 1099 ASSERT(!contents->IsMarked()); |
| 1061 ASSERT(contents->IsFixedArray()); | 1100 ASSERT(contents->IsFixedArray()); |
| 1062 ASSERT(contents->length() >= 2); | 1101 ASSERT(contents->length() >= 2); |
| 1063 SetMark(contents); | 1102 SetMark(contents); |
| 1064 // Contents contains (value, details) pairs. If the details say that | 1103 // Contents contains (value, details) pairs. If the details say that |
| 1065 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or | 1104 // the type of descriptor is MAP_TRANSITION, CONSTANT_TRANSITION, or |
| 1066 // NULL_DESCRIPTOR, we don't mark the value as live. Only for | 1105 // NULL_DESCRIPTOR, we don't mark the value as live. Only for |
| 1067 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a | 1106 // MAP_TRANSITION and CONSTANT_TRANSITION is the value an Object* (a |
| 1068 // Map*). | 1107 // Map*). |
| 1069 for (int i = 0; i < contents->length(); i += 2) { | 1108 for (int i = 0; i < contents->length(); i += 2) { |
| 1070 // If the pair (value, details) at index i, i+1 is not | 1109 // If the pair (value, details) at index i, i+1 is not |
| 1071 // a transition or null descriptor, mark the value. | 1110 // a transition or null descriptor, mark the value. |
| 1072 PropertyDetails details(Smi::cast(contents->get(i + 1))); | 1111 PropertyDetails details(Smi::cast(contents->get(i + 1))); |
| 1073 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { | 1112 if (details.type() < FIRST_PHANTOM_PROPERTY_TYPE) { |
| 1074 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); | 1113 HeapObject* object = reinterpret_cast<HeapObject*>(contents->get(i)); |
| 1075 if (object->IsHeapObject() && !object->IsMarked()) { | 1114 if (object->IsHeapObject() && !object->IsMarked()) { |
| 1076 SetMark(object); | 1115 SetMark(object); |
| 1077 marking_stack.Push(object); | 1116 marking_stack_.Push(object); |
| 1078 } | 1117 } |
| 1079 } | 1118 } |
| 1080 } | 1119 } |
| 1081 // The DescriptorArray descriptors contains a pointer to its contents array, | 1120 // The DescriptorArray descriptors contains a pointer to its contents array, |
| 1082 // but the contents array is already marked. | 1121 // but the contents array is already marked. |
| 1083 marking_stack.Push(descriptors); | 1122 marking_stack_.Push(descriptors); |
| 1084 } | 1123 } |
| 1085 | 1124 |
| 1086 | 1125 |
| 1087 void MarkCompactCollector::CreateBackPointers() { | 1126 void MarkCompactCollector::CreateBackPointers() { |
| 1088 HeapObjectIterator iterator(Heap::map_space()); | 1127 HeapObjectIterator iterator(HEAP->map_space()); |
| 1089 for (HeapObject* next_object = iterator.next(); | 1128 for (HeapObject* next_object = iterator.next(); |
| 1090 next_object != NULL; next_object = iterator.next()) { | 1129 next_object != NULL; next_object = iterator.next()) { |
| 1091 if (next_object->IsMap()) { // Could also be ByteArray on free list. | 1130 if (next_object->IsMap()) { // Could also be ByteArray on free list. |
| 1092 Map* map = Map::cast(next_object); | 1131 Map* map = Map::cast(next_object); |
| 1093 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && | 1132 if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && |
| 1094 map->instance_type() <= JS_FUNCTION_TYPE) { | 1133 map->instance_type() <= JS_FUNCTION_TYPE) { |
| 1095 map->CreateBackPointers(); | 1134 map->CreateBackPointers(); |
| 1096 } else { | 1135 } else { |
| 1097 ASSERT(map->instance_descriptors() == Heap::empty_descriptor_array()); | 1136 ASSERT(map->instance_descriptors() == HEAP->empty_descriptor_array()); |
| 1098 } | 1137 } |
| 1099 } | 1138 } |
| 1100 } | 1139 } |
| 1101 } | 1140 } |
| 1102 | 1141 |
| 1103 | 1142 |
| 1104 static int OverflowObjectSize(HeapObject* obj) { | 1143 static int OverflowObjectSize(HeapObject* obj) { |
| 1105 // Recover the normal map pointer, it might be marked as live and | 1144 // Recover the normal map pointer, it might be marked as live and |
| 1106 // overflowed. | 1145 // overflowed. |
| 1107 MapWord map_word = obj->map_word(); | 1146 MapWord map_word = obj->map_word(); |
| 1108 map_word.ClearMark(); | 1147 map_word.ClearMark(); |
| 1109 map_word.ClearOverflow(); | 1148 map_word.ClearOverflow(); |
| 1110 return obj->SizeFromMap(map_word.ToMap()); | 1149 return obj->SizeFromMap(map_word.ToMap()); |
| 1111 } | 1150 } |
| 1112 | 1151 |
| 1113 | 1152 |
| 1114 // Fill the marking stack with overflowed objects returned by the given | 1153 class OverflowedObjectsScanner : public AllStatic { |
| 1115 // iterator. Stop when the marking stack is filled or the end of the space | 1154 public: |
| 1116 // is reached, whichever comes first. | 1155 // Fill the marking stack with overflowed objects returned by the given |
| 1117 template<class T> | 1156 // iterator. Stop when the marking stack is filled or the end of the space |
| 1118 static void ScanOverflowedObjects(T* it) { | 1157 // is reached, whichever comes first. |
| 1119 // The caller should ensure that the marking stack is initially not full, | 1158 template<class T> |
| 1120 // so that we don't waste effort pointlessly scanning for objects. | 1159 static inline void ScanOverflowedObjects(MarkCompactCollector* collector, |
| 1121 ASSERT(!marking_stack.is_full()); | 1160 T* it) { |
| 1161 // The caller should ensure that the marking stack is initially not full, |
| 1162 // so that we don't waste effort pointlessly scanning for objects. |
| 1163 ASSERT(!collector->marking_stack_.is_full()); |
| 1122 | 1164 |
| 1123 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { | 1165 for (HeapObject* object = it->next(); object != NULL; object = it->next()) { |
| 1124 if (object->IsOverflowed()) { | 1166 if (object->IsOverflowed()) { |
| 1125 object->ClearOverflow(); | 1167 object->ClearOverflow(); |
| 1126 ASSERT(object->IsMarked()); | 1168 ASSERT(object->IsMarked()); |
| 1127 ASSERT(Heap::Contains(object)); | 1169 ASSERT(HEAP->Contains(object)); |
| 1128 marking_stack.Push(object); | 1170 collector->marking_stack_.Push(object); |
| 1129 if (marking_stack.is_full()) return; | 1171 if (collector->marking_stack_.is_full()) return; |
| 1172 } |
| 1130 } | 1173 } |
| 1131 } | 1174 } |
| 1132 } | 1175 }; |
| 1133 | 1176 |
| 1134 | 1177 |
| 1135 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { | 1178 bool MarkCompactCollector::IsUnmarkedHeapObject(Object** p) { |
| 1136 return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked(); | 1179 return (*p)->IsHeapObject() && !HeapObject::cast(*p)->IsMarked(); |
| 1137 } | 1180 } |
| 1138 | 1181 |
| 1139 | 1182 |
| 1140 void MarkCompactCollector::MarkSymbolTable() { | 1183 void MarkCompactCollector::MarkSymbolTable() { |
| 1141 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); | 1184 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); |
| 1142 // Mark the symbol table itself. | 1185 // Mark the symbol table itself. |
| 1143 SetMark(symbol_table); | 1186 SetMark(symbol_table); |
| 1144 // Explicitly mark the prefix. | 1187 // Explicitly mark the prefix. |
| 1145 MarkingVisitor marker; | 1188 MarkingVisitor marker(heap_); |
| 1146 symbol_table->IteratePrefix(&marker); | 1189 symbol_table->IteratePrefix(&marker); |
| 1147 ProcessMarkingStack(); | 1190 ProcessMarkingStack(); |
| 1148 } | 1191 } |
| 1149 | 1192 |
| 1150 | 1193 |
| 1151 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { | 1194 void MarkCompactCollector::MarkRoots(RootMarkingVisitor* visitor) { |
| 1152 // Mark the heap roots including global variables, stack variables, | 1195 // Mark the heap roots including global variables, stack variables, |
| 1153 // etc., and all objects reachable from them. | 1196 // etc., and all objects reachable from them. |
| 1154 Heap::IterateStrongRoots(visitor, VISIT_ONLY_STRONG); | 1197 HEAP->IterateStrongRoots(visitor, VISIT_ONLY_STRONG); |
| 1155 | 1198 |
| 1156 // Handle the symbol table specially. | 1199 // Handle the symbol table specially. |
| 1157 MarkSymbolTable(); | 1200 MarkSymbolTable(); |
| 1158 | 1201 |
| 1159 // There may be overflowed objects in the heap. Visit them now. | 1202 // There may be overflowed objects in the heap. Visit them now. |
| 1160 while (marking_stack.overflowed()) { | 1203 while (marking_stack_.overflowed()) { |
| 1161 RefillMarkingStack(); | 1204 RefillMarkingStack(); |
| 1162 EmptyMarkingStack(); | 1205 EmptyMarkingStack(); |
| 1163 } | 1206 } |
| 1164 } | 1207 } |
| 1165 | 1208 |
| 1166 | 1209 |
| 1167 void MarkCompactCollector::MarkObjectGroups() { | 1210 void MarkCompactCollector::MarkObjectGroups() { |
| 1168 List<ObjectGroup*>* object_groups = GlobalHandles::ObjectGroups(); | 1211 List<ObjectGroup*>* object_groups = |
| 1212 heap_->isolate()->global_handles()->object_groups(); |
| 1169 | 1213 |
| 1170 for (int i = 0; i < object_groups->length(); i++) { | 1214 for (int i = 0; i < object_groups->length(); i++) { |
| 1171 ObjectGroup* entry = object_groups->at(i); | 1215 ObjectGroup* entry = object_groups->at(i); |
| 1172 if (entry == NULL) continue; | 1216 if (entry == NULL) continue; |
| 1173 | 1217 |
| 1174 List<Object**>& objects = entry->objects_; | 1218 List<Object**>& objects = entry->objects_; |
| 1175 bool group_marked = false; | 1219 bool group_marked = false; |
| 1176 for (int j = 0; j < objects.length(); j++) { | 1220 for (int j = 0; j < objects.length(); j++) { |
| 1177 Object* object = *objects[j]; | 1221 Object* object = *objects[j]; |
| 1178 if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) { | 1222 if (object->IsHeapObject() && HeapObject::cast(object)->IsMarked()) { |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1193 | 1237 |
| 1194 // Once the entire group has been colored gray, set the object group | 1238 // Once the entire group has been colored gray, set the object group |
| 1195 // to NULL so it won't be processed again. | 1239 // to NULL so it won't be processed again. |
| 1196 delete entry; | 1240 delete entry; |
| 1197 object_groups->at(i) = NULL; | 1241 object_groups->at(i) = NULL; |
| 1198 } | 1242 } |
| 1199 } | 1243 } |
| 1200 | 1244 |
| 1201 | 1245 |
| 1202 void MarkCompactCollector::MarkImplicitRefGroups() { | 1246 void MarkCompactCollector::MarkImplicitRefGroups() { |
| 1203 List<ImplicitRefGroup*>* ref_groups = GlobalHandles::ImplicitRefGroups(); | 1247 List<ImplicitRefGroup*>* ref_groups = |
| 1248 heap_->isolate()->global_handles()->implicit_ref_groups(); |
| 1204 | 1249 |
| 1205 for (int i = 0; i < ref_groups->length(); i++) { | 1250 for (int i = 0; i < ref_groups->length(); i++) { |
| 1206 ImplicitRefGroup* entry = ref_groups->at(i); | 1251 ImplicitRefGroup* entry = ref_groups->at(i); |
| 1207 if (entry == NULL) continue; | 1252 if (entry == NULL) continue; |
| 1208 | 1253 |
| 1209 if (!entry->parent_->IsMarked()) continue; | 1254 if (!entry->parent_->IsMarked()) continue; |
| 1210 | 1255 |
| 1211 List<Object**>& children = entry->children_; | 1256 List<Object**>& children = entry->children_; |
| 1212 // A parent object is marked, so mark as gray all child white heap | 1257 // A parent object is marked, so mark as gray all child white heap |
| 1213 // objects. | 1258 // objects. |
| 1214 for (int j = 0; j < children.length(); ++j) { | 1259 for (int j = 0; j < children.length(); ++j) { |
| 1215 if ((*children[j])->IsHeapObject()) { | 1260 if ((*children[j])->IsHeapObject()) { |
| 1216 MarkObject(HeapObject::cast(*children[j])); | 1261 MarkObject(HeapObject::cast(*children[j])); |
| 1217 } | 1262 } |
| 1218 } | 1263 } |
| 1219 | 1264 |
| 1220 // Once the entire group has been colored gray, set the group | 1265 // Once the entire group has been colored gray, set the group |
| 1221 // to NULL so it won't be processed again. | 1266 // to NULL so it won't be processed again. |
| 1222 delete entry; | 1267 delete entry; |
| 1223 ref_groups->at(i) = NULL; | 1268 ref_groups->at(i) = NULL; |
| 1224 } | 1269 } |
| 1225 } | 1270 } |
| 1226 | 1271 |
| 1227 | 1272 |
| 1228 // Mark all objects reachable from the objects on the marking stack. | 1273 // Mark all objects reachable from the objects on the marking stack. |
| 1229 // Before: the marking stack contains zero or more heap object pointers. | 1274 // Before: the marking stack contains zero or more heap object pointers. |
| 1230 // After: the marking stack is empty, and all objects reachable from the | 1275 // After: the marking stack is empty, and all objects reachable from the |
| 1231 // marking stack have been marked, or are overflowed in the heap. | 1276 // marking stack have been marked, or are overflowed in the heap. |
| 1232 void MarkCompactCollector::EmptyMarkingStack() { | 1277 void MarkCompactCollector::EmptyMarkingStack() { |
| 1233 while (!marking_stack.is_empty()) { | 1278 while (!marking_stack_.is_empty()) { |
| 1234 HeapObject* object = marking_stack.Pop(); | 1279 HeapObject* object = marking_stack_.Pop(); |
| 1235 ASSERT(object->IsHeapObject()); | 1280 ASSERT(object->IsHeapObject()); |
| 1236 ASSERT(Heap::Contains(object)); | 1281 ASSERT(heap_->Contains(object)); |
| 1237 ASSERT(object->IsMarked()); | 1282 ASSERT(object->IsMarked()); |
| 1238 ASSERT(!object->IsOverflowed()); | 1283 ASSERT(!object->IsOverflowed()); |
| 1239 | 1284 |
| 1240 // Because the object is marked, we have to recover the original map | 1285 // Because the object is marked, we have to recover the original map |
| 1241 // pointer and use it to mark the object's body. | 1286 // pointer and use it to mark the object's body. |
| 1242 MapWord map_word = object->map_word(); | 1287 MapWord map_word = object->map_word(); |
| 1243 map_word.ClearMark(); | 1288 map_word.ClearMark(); |
| 1244 Map* map = map_word.ToMap(); | 1289 Map* map = map_word.ToMap(); |
| 1245 MarkObject(map); | 1290 MarkObject(map); |
| 1246 | 1291 |
| 1247 StaticMarkingVisitor::IterateBody(map, object); | 1292 StaticMarkingVisitor::IterateBody(map, object); |
| 1248 } | 1293 } |
| 1249 } | 1294 } |
| 1250 | 1295 |
| 1251 | 1296 |
| 1252 // Sweep the heap for overflowed objects, clear their overflow bits, and | 1297 // Sweep the heap for overflowed objects, clear their overflow bits, and |
| 1253 // push them on the marking stack. Stop early if the marking stack fills | 1298 // push them on the marking stack. Stop early if the marking stack fills |
| 1254 // before sweeping completes. If sweeping completes, there are no remaining | 1299 // before sweeping completes. If sweeping completes, there are no remaining |
| 1255 // overflowed objects in the heap so the overflow flag on the markings stack | 1300 // overflowed objects in the heap so the overflow flag on the markings stack |
| 1256 // is cleared. | 1301 // is cleared. |
| 1257 void MarkCompactCollector::RefillMarkingStack() { | 1302 void MarkCompactCollector::RefillMarkingStack() { |
| 1258 ASSERT(marking_stack.overflowed()); | 1303 ASSERT(marking_stack_.overflowed()); |
| 1259 | 1304 |
| 1260 SemiSpaceIterator new_it(Heap::new_space(), &OverflowObjectSize); | 1305 SemiSpaceIterator new_it(HEAP->new_space(), &OverflowObjectSize); |
| 1261 ScanOverflowedObjects(&new_it); | 1306 OverflowedObjectsScanner::ScanOverflowedObjects(this, &new_it); |
| 1262 if (marking_stack.is_full()) return; | 1307 if (marking_stack_.is_full()) return; |
| 1263 | 1308 |
| 1264 HeapObjectIterator old_pointer_it(Heap::old_pointer_space(), | 1309 HeapObjectIterator old_pointer_it(HEAP->old_pointer_space(), |
| 1265 &OverflowObjectSize); | 1310 &OverflowObjectSize); |
| 1266 ScanOverflowedObjects(&old_pointer_it); | 1311 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_pointer_it); |
| 1267 if (marking_stack.is_full()) return; | 1312 if (marking_stack_.is_full()) return; |
| 1268 | 1313 |
| 1269 HeapObjectIterator old_data_it(Heap::old_data_space(), &OverflowObjectSize); | 1314 HeapObjectIterator old_data_it(HEAP->old_data_space(), &OverflowObjectSize); |
| 1270 ScanOverflowedObjects(&old_data_it); | 1315 OverflowedObjectsScanner::ScanOverflowedObjects(this, &old_data_it); |
| 1271 if (marking_stack.is_full()) return; | 1316 if (marking_stack_.is_full()) return; |
| 1272 | 1317 |
| 1273 HeapObjectIterator code_it(Heap::code_space(), &OverflowObjectSize); | 1318 HeapObjectIterator code_it(HEAP->code_space(), &OverflowObjectSize); |
| 1274 ScanOverflowedObjects(&code_it); | 1319 OverflowedObjectsScanner::ScanOverflowedObjects(this, &code_it); |
| 1275 if (marking_stack.is_full()) return; | 1320 if (marking_stack_.is_full()) return; |
| 1276 | 1321 |
| 1277 HeapObjectIterator map_it(Heap::map_space(), &OverflowObjectSize); | 1322 HeapObjectIterator map_it(HEAP->map_space(), &OverflowObjectSize); |
| 1278 ScanOverflowedObjects(&map_it); | 1323 OverflowedObjectsScanner::ScanOverflowedObjects(this, &map_it); |
| 1279 if (marking_stack.is_full()) return; | 1324 if (marking_stack_.is_full()) return; |
| 1280 | 1325 |
| 1281 HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize); | 1326 HeapObjectIterator cell_it(HEAP->cell_space(), &OverflowObjectSize); |
| 1282 ScanOverflowedObjects(&cell_it); | 1327 OverflowedObjectsScanner::ScanOverflowedObjects(this, &cell_it); |
| 1283 if (marking_stack.is_full()) return; | 1328 if (marking_stack_.is_full()) return; |
| 1284 | 1329 |
| 1285 LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize); | 1330 LargeObjectIterator lo_it(HEAP->lo_space(), &OverflowObjectSize); |
| 1286 ScanOverflowedObjects(&lo_it); | 1331 OverflowedObjectsScanner::ScanOverflowedObjects(this, &lo_it); |
| 1287 if (marking_stack.is_full()) return; | 1332 if (marking_stack_.is_full()) return; |
| 1288 | 1333 |
| 1289 marking_stack.clear_overflowed(); | 1334 marking_stack_.clear_overflowed(); |
| 1290 } | 1335 } |
| 1291 | 1336 |
| 1292 | 1337 |
| 1293 // Mark all objects reachable (transitively) from objects on the marking | 1338 // Mark all objects reachable (transitively) from objects on the marking |
| 1294 // stack. Before: the marking stack contains zero or more heap object | 1339 // stack. Before: the marking stack contains zero or more heap object |
| 1295 // pointers. After: the marking stack is empty and there are no overflowed | 1340 // pointers. After: the marking stack is empty and there are no overflowed |
| 1296 // objects in the heap. | 1341 // objects in the heap. |
| 1297 void MarkCompactCollector::ProcessMarkingStack() { | 1342 void MarkCompactCollector::ProcessMarkingStack() { |
| 1298 EmptyMarkingStack(); | 1343 EmptyMarkingStack(); |
| 1299 while (marking_stack.overflowed()) { | 1344 while (marking_stack_.overflowed()) { |
| 1300 RefillMarkingStack(); | 1345 RefillMarkingStack(); |
| 1301 EmptyMarkingStack(); | 1346 EmptyMarkingStack(); |
| 1302 } | 1347 } |
| 1303 } | 1348 } |
| 1304 | 1349 |
| 1305 | 1350 |
| 1306 void MarkCompactCollector::ProcessExternalMarking() { | 1351 void MarkCompactCollector::ProcessExternalMarking() { |
| 1307 bool work_to_do = true; | 1352 bool work_to_do = true; |
| 1308 ASSERT(marking_stack.is_empty()); | 1353 ASSERT(marking_stack_.is_empty()); |
| 1309 while (work_to_do) { | 1354 while (work_to_do) { |
| 1310 MarkObjectGroups(); | 1355 MarkObjectGroups(); |
| 1311 MarkImplicitRefGroups(); | 1356 MarkImplicitRefGroups(); |
| 1312 work_to_do = !marking_stack.is_empty(); | 1357 work_to_do = !marking_stack_.is_empty(); |
| 1313 ProcessMarkingStack(); | 1358 ProcessMarkingStack(); |
| 1314 } | 1359 } |
| 1315 } | 1360 } |
| 1316 | 1361 |
| 1317 | 1362 |
| 1318 void MarkCompactCollector::MarkLiveObjects() { | 1363 void MarkCompactCollector::MarkLiveObjects() { |
| 1319 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); | 1364 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_MARK); |
| 1320 // The recursive GC marker detects when it is nearing stack overflow, | 1365 // The recursive GC marker detects when it is nearing stack overflow, |
| 1321 // and switches to a different marking system. JS interrupts interfere | 1366 // and switches to a different marking system. JS interrupts interfere |
| 1322 // with the C stack limit check. | 1367 // with the C stack limit check. |
| 1323 PostponeInterruptsScope postpone; | 1368 PostponeInterruptsScope postpone(heap_->isolate()); |
| 1324 | 1369 |
| 1325 #ifdef DEBUG | 1370 #ifdef DEBUG |
| 1326 ASSERT(state_ == PREPARE_GC); | 1371 ASSERT(state_ == PREPARE_GC); |
| 1327 state_ = MARK_LIVE_OBJECTS; | 1372 state_ = MARK_LIVE_OBJECTS; |
| 1328 #endif | 1373 #endif |
| 1329 // The to space contains live objects, the from space is used as a marking | 1374 // The to space contains live objects, the from space is used as a marking |
| 1330 // stack. | 1375 // stack. |
| 1331 marking_stack.Initialize(Heap::new_space()->FromSpaceLow(), | 1376 marking_stack_.Initialize(heap_->new_space()->FromSpaceLow(), |
| 1332 Heap::new_space()->FromSpaceHigh()); | 1377 heap_->new_space()->FromSpaceHigh()); |
| 1333 | 1378 |
| 1334 ASSERT(!marking_stack.overflowed()); | 1379 ASSERT(!marking_stack_.overflowed()); |
| 1335 | 1380 |
| 1336 PrepareForCodeFlushing(); | 1381 PrepareForCodeFlushing(); |
| 1337 | 1382 |
| 1338 RootMarkingVisitor root_visitor; | 1383 RootMarkingVisitor root_visitor(heap_); |
| 1339 MarkRoots(&root_visitor); | 1384 MarkRoots(&root_visitor); |
| 1340 | 1385 |
| 1341 // The objects reachable from the roots are marked, yet unreachable | 1386 // The objects reachable from the roots are marked, yet unreachable |
| 1342 // objects are unmarked. Mark objects reachable due to host | 1387 // objects are unmarked. Mark objects reachable due to host |
| 1343 // application specific logic. | 1388 // application specific logic. |
| 1344 ProcessExternalMarking(); | 1389 ProcessExternalMarking(); |
| 1345 | 1390 |
| 1346 // The objects reachable from the roots or object groups are marked, | 1391 // The objects reachable from the roots or object groups are marked, |
| 1347 // yet unreachable objects are unmarked. Mark objects reachable | 1392 // yet unreachable objects are unmarked. Mark objects reachable |
| 1348 // only from weak global handles. | 1393 // only from weak global handles. |
| 1349 // | 1394 // |
| 1350 // First we identify nonlive weak handles and mark them as pending | 1395 // First we identify nonlive weak handles and mark them as pending |
| 1351 // destruction. | 1396 // destruction. |
| 1352 GlobalHandles::IdentifyWeakHandles(&IsUnmarkedHeapObject); | 1397 heap_->isolate()->global_handles()->IdentifyWeakHandles( |
| 1398 &IsUnmarkedHeapObject); |
| 1353 // Then we mark the objects and process the transitive closure. | 1399 // Then we mark the objects and process the transitive closure. |
| 1354 GlobalHandles::IterateWeakRoots(&root_visitor); | 1400 heap_->isolate()->global_handles()->IterateWeakRoots(&root_visitor); |
| 1355 while (marking_stack.overflowed()) { | 1401 while (marking_stack_.overflowed()) { |
| 1356 RefillMarkingStack(); | 1402 RefillMarkingStack(); |
| 1357 EmptyMarkingStack(); | 1403 EmptyMarkingStack(); |
| 1358 } | 1404 } |
| 1359 | 1405 |
| 1360 // Repeat host application specific marking to mark unmarked objects | 1406 // Repeat host application specific marking to mark unmarked objects |
| 1361 // reachable from the weak roots. | 1407 // reachable from the weak roots. |
| 1362 ProcessExternalMarking(); | 1408 ProcessExternalMarking(); |
| 1363 | 1409 |
| 1364 // Prune the symbol table removing all symbols only pointed to by the | 1410 // Prune the symbol table removing all symbols only pointed to by the |
| 1365 // symbol table. Cannot use symbol_table() here because the symbol | 1411 // symbol table. Cannot use symbol_table() here because the symbol |
| 1366 // table is marked. | 1412 // table is marked. |
| 1367 SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); | 1413 SymbolTable* symbol_table = heap_->raw_unchecked_symbol_table(); |
| 1368 SymbolTableCleaner v; | 1414 SymbolTableCleaner v; |
| 1369 symbol_table->IterateElements(&v); | 1415 symbol_table->IterateElements(&v); |
| 1370 symbol_table->ElementsRemoved(v.PointersRemoved()); | 1416 symbol_table->ElementsRemoved(v.PointersRemoved()); |
| 1371 ExternalStringTable::Iterate(&v); | 1417 heap_->external_string_table_.Iterate(&v); |
| 1372 ExternalStringTable::CleanUp(); | 1418 heap_->external_string_table_.CleanUp(); |
| 1373 | 1419 |
| 1374 // Process the weak references. | 1420 // Process the weak references. |
| 1375 MarkCompactWeakObjectRetainer mark_compact_object_retainer; | 1421 MarkCompactWeakObjectRetainer mark_compact_object_retainer; |
| 1376 Heap::ProcessWeakReferences(&mark_compact_object_retainer); | 1422 heap_->ProcessWeakReferences(&mark_compact_object_retainer); |
| 1377 | 1423 |
| 1378 // Remove object groups after marking phase. | 1424 // Remove object groups after marking phase. |
| 1379 GlobalHandles::RemoveObjectGroups(); | 1425 heap_->isolate()->global_handles()->RemoveObjectGroups(); |
| 1380 GlobalHandles::RemoveImplicitRefGroups(); | 1426 heap_->isolate()->global_handles()->RemoveImplicitRefGroups(); |
| 1381 | 1427 |
| 1382 // Flush code from collected candidates. | 1428 // Flush code from collected candidates. |
| 1383 FlushCode::ProcessCandidates(); | 1429 if (is_code_flushing_enabled()) { |
| 1430 code_flusher_->ProcessCandidates(); |
| 1431 } |
| 1384 | 1432 |
| 1385 // Clean up dead objects from the runtime profiler. | 1433 // Clean up dead objects from the runtime profiler. |
| 1386 RuntimeProfiler::RemoveDeadSamples(); | 1434 heap_->isolate()->runtime_profiler()->RemoveDeadSamples(); |
| 1387 } | 1435 } |
| 1388 | 1436 |
| 1389 | 1437 |
| 1390 #ifdef DEBUG | 1438 #ifdef DEBUG |
| 1391 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { | 1439 void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { |
| 1392 live_bytes_ += obj->Size(); | 1440 live_bytes_ += obj->Size(); |
| 1393 if (Heap::new_space()->Contains(obj)) { | 1441 if (HEAP->new_space()->Contains(obj)) { |
| 1394 live_young_objects_size_ += obj->Size(); | 1442 live_young_objects_size_ += obj->Size(); |
| 1395 } else if (Heap::map_space()->Contains(obj)) { | 1443 } else if (HEAP->map_space()->Contains(obj)) { |
| 1396 ASSERT(obj->IsMap()); | 1444 ASSERT(obj->IsMap()); |
| 1397 live_map_objects_size_ += obj->Size(); | 1445 live_map_objects_size_ += obj->Size(); |
| 1398 } else if (Heap::cell_space()->Contains(obj)) { | 1446 } else if (HEAP->cell_space()->Contains(obj)) { |
| 1399 ASSERT(obj->IsJSGlobalPropertyCell()); | 1447 ASSERT(obj->IsJSGlobalPropertyCell()); |
| 1400 live_cell_objects_size_ += obj->Size(); | 1448 live_cell_objects_size_ += obj->Size(); |
| 1401 } else if (Heap::old_pointer_space()->Contains(obj)) { | 1449 } else if (HEAP->old_pointer_space()->Contains(obj)) { |
| 1402 live_old_pointer_objects_size_ += obj->Size(); | 1450 live_old_pointer_objects_size_ += obj->Size(); |
| 1403 } else if (Heap::old_data_space()->Contains(obj)) { | 1451 } else if (HEAP->old_data_space()->Contains(obj)) { |
| 1404 live_old_data_objects_size_ += obj->Size(); | 1452 live_old_data_objects_size_ += obj->Size(); |
| 1405 } else if (Heap::code_space()->Contains(obj)) { | 1453 } else if (HEAP->code_space()->Contains(obj)) { |
| 1406 live_code_objects_size_ += obj->Size(); | 1454 live_code_objects_size_ += obj->Size(); |
| 1407 } else if (Heap::lo_space()->Contains(obj)) { | 1455 } else if (HEAP->lo_space()->Contains(obj)) { |
| 1408 live_lo_objects_size_ += obj->Size(); | 1456 live_lo_objects_size_ += obj->Size(); |
| 1409 } else { | 1457 } else { |
| 1410 UNREACHABLE(); | 1458 UNREACHABLE(); |
| 1411 } | 1459 } |
| 1412 } | 1460 } |
| 1413 #endif // DEBUG | 1461 #endif // DEBUG |
| 1414 | 1462 |
| 1415 | 1463 |
| 1416 void MarkCompactCollector::SweepLargeObjectSpace() { | 1464 void MarkCompactCollector::SweepLargeObjectSpace() { |
| 1417 #ifdef DEBUG | 1465 #ifdef DEBUG |
| 1418 ASSERT(state_ == MARK_LIVE_OBJECTS); | 1466 ASSERT(state_ == MARK_LIVE_OBJECTS); |
| 1419 state_ = | 1467 state_ = |
| 1420 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; | 1468 compacting_collection_ ? ENCODE_FORWARDING_ADDRESSES : SWEEP_SPACES; |
| 1421 #endif | 1469 #endif |
| 1422 // Deallocate unmarked objects and clear marked bits for marked objects. | 1470 // Deallocate unmarked objects and clear marked bits for marked objects. |
| 1423 Heap::lo_space()->FreeUnmarkedObjects(); | 1471 HEAP->lo_space()->FreeUnmarkedObjects(); |
| 1424 } | 1472 } |
| 1425 | 1473 |
| 1426 | 1474 |
| 1427 // Safe to use during marking phase only. | 1475 // Safe to use during marking phase only. |
| 1428 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { | 1476 bool MarkCompactCollector::SafeIsMap(HeapObject* object) { |
| 1429 MapWord metamap = object->map_word(); | 1477 MapWord metamap = object->map_word(); |
| 1430 metamap.ClearMark(); | 1478 metamap.ClearMark(); |
| 1431 return metamap.ToMap()->instance_type() == MAP_TYPE; | 1479 return metamap.ToMap()->instance_type() == MAP_TYPE; |
| 1432 } | 1480 } |
| 1433 | 1481 |
| 1434 | 1482 |
| 1435 void MarkCompactCollector::ClearNonLiveTransitions() { | 1483 void MarkCompactCollector::ClearNonLiveTransitions() { |
| 1436 HeapObjectIterator map_iterator(Heap::map_space(), &SizeOfMarkedObject); | 1484 HeapObjectIterator map_iterator(HEAP->map_space(), &SizeOfMarkedObject); |
| 1437 // Iterate over the map space, setting map transitions that go from | 1485 // Iterate over the map space, setting map transitions that go from |
| 1438 // a marked map to an unmarked map to null transitions. At the same time, | 1486 // a marked map to an unmarked map to null transitions. At the same time, |
| 1439 // set all the prototype fields of maps back to their original value, | 1487 // set all the prototype fields of maps back to their original value, |
| 1440 // dropping the back pointers temporarily stored in the prototype field. | 1488 // dropping the back pointers temporarily stored in the prototype field. |
| 1441 // Setting the prototype field requires following the linked list of | 1489 // Setting the prototype field requires following the linked list of |
| 1442 // back pointers, reversing them all at once. This allows us to find | 1490 // back pointers, reversing them all at once. This allows us to find |
| 1443 // those maps with map transitions that need to be nulled, and only | 1491 // those maps with map transitions that need to be nulled, and only |
| 1444 // scan the descriptor arrays of those maps, not all maps. | 1492 // scan the descriptor arrays of those maps, not all maps. |
| 1445 // All of these actions are carried out only on maps of JSObjects | 1493 // All of these actions are carried out only on maps of JSObjects |
| 1446 // and related subtypes. | 1494 // and related subtypes. |
| (...skipping 29 matching lines...) Expand all Loading... |
| 1476 Object* next; | 1524 Object* next; |
| 1477 while (SafeIsMap(current)) { | 1525 while (SafeIsMap(current)) { |
| 1478 next = current->prototype(); | 1526 next = current->prototype(); |
| 1479 // There should never be a dead map above a live map. | 1527 // There should never be a dead map above a live map. |
| 1480 ASSERT(on_dead_path || current->IsMarked()); | 1528 ASSERT(on_dead_path || current->IsMarked()); |
| 1481 | 1529 |
| 1482 // A live map above a dead map indicates a dead transition. | 1530 // A live map above a dead map indicates a dead transition. |
| 1483 // This test will always be false on the first iteration. | 1531 // This test will always be false on the first iteration. |
| 1484 if (on_dead_path && current->IsMarked()) { | 1532 if (on_dead_path && current->IsMarked()) { |
| 1485 on_dead_path = false; | 1533 on_dead_path = false; |
| 1486 current->ClearNonLiveTransitions(real_prototype); | 1534 current->ClearNonLiveTransitions(heap_, real_prototype); |
| 1487 } | 1535 } |
| 1488 *HeapObject::RawField(current, Map::kPrototypeOffset) = | 1536 *HeapObject::RawField(current, Map::kPrototypeOffset) = |
| 1489 real_prototype; | 1537 real_prototype; |
| 1490 current = reinterpret_cast<Map*>(next); | 1538 current = reinterpret_cast<Map*>(next); |
| 1491 } | 1539 } |
| 1492 } | 1540 } |
| 1493 } | 1541 } |
| 1494 | 1542 |
| 1495 // ------------------------------------------------------------------------- | 1543 // ------------------------------------------------------------------------- |
| 1496 // Phase 2: Encode forwarding addresses. | 1544 // Phase 2: Encode forwarding addresses. |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1538 Memory::Address_at(free_start + offset) = kZapValue; | 1586 Memory::Address_at(free_start + offset) = kZapValue; |
| 1539 } | 1587 } |
| 1540 } | 1588 } |
| 1541 #endif | 1589 #endif |
| 1542 } | 1590 } |
| 1543 | 1591 |
| 1544 | 1592 |
| 1545 // Try to promote all objects in new space. Heap numbers and sequential | 1593 // Try to promote all objects in new space. Heap numbers and sequential |
| 1546 // strings are promoted to the code space, large objects to large object space, | 1594 // strings are promoted to the code space, large objects to large object space, |
| 1547 // and all others to the old space. | 1595 // and all others to the old space. |
| 1548 inline MaybeObject* MCAllocateFromNewSpace(HeapObject* object, | 1596 inline MaybeObject* MCAllocateFromNewSpace(Heap* heap, |
| 1597 HeapObject* object, |
| 1549 int object_size) { | 1598 int object_size) { |
| 1550 MaybeObject* forwarded; | 1599 MaybeObject* forwarded; |
| 1551 if (object_size > Heap::MaxObjectSizeInPagedSpace()) { | 1600 if (object_size > heap->MaxObjectSizeInPagedSpace()) { |
| 1552 forwarded = Failure::Exception(); | 1601 forwarded = Failure::Exception(); |
| 1553 } else { | 1602 } else { |
| 1554 OldSpace* target_space = Heap::TargetSpace(object); | 1603 OldSpace* target_space = heap->TargetSpace(object); |
| 1555 ASSERT(target_space == Heap::old_pointer_space() || | 1604 ASSERT(target_space == heap->old_pointer_space() || |
| 1556 target_space == Heap::old_data_space()); | 1605 target_space == heap->old_data_space()); |
| 1557 forwarded = target_space->MCAllocateRaw(object_size); | 1606 forwarded = target_space->MCAllocateRaw(object_size); |
| 1558 } | 1607 } |
| 1559 Object* result; | 1608 Object* result; |
| 1560 if (!forwarded->ToObject(&result)) { | 1609 if (!forwarded->ToObject(&result)) { |
| 1561 result = Heap::new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked(); | 1610 result = heap->new_space()->MCAllocateRaw(object_size)->ToObjectUnchecked(); |
| 1562 } | 1611 } |
| 1563 return result; | 1612 return result; |
| 1564 } | 1613 } |
| 1565 | 1614 |
| 1566 | 1615 |
| 1567 // Allocation functions for the paged spaces call the space's MCAllocateRaw. | 1616 // Allocation functions for the paged spaces call the space's MCAllocateRaw. |
| 1568 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace( | 1617 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldPointerSpace( |
| 1618 Heap *heap, |
| 1569 HeapObject* ignore, | 1619 HeapObject* ignore, |
| 1570 int object_size) { | 1620 int object_size) { |
| 1571 return Heap::old_pointer_space()->MCAllocateRaw(object_size); | 1621 return heap->old_pointer_space()->MCAllocateRaw(object_size); |
| 1572 } | 1622 } |
| 1573 | 1623 |
| 1574 | 1624 |
| 1575 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace( | 1625 MUST_USE_RESULT inline MaybeObject* MCAllocateFromOldDataSpace( |
| 1626 Heap* heap, |
| 1576 HeapObject* ignore, | 1627 HeapObject* ignore, |
| 1577 int object_size) { | 1628 int object_size) { |
| 1578 return Heap::old_data_space()->MCAllocateRaw(object_size); | 1629 return heap->old_data_space()->MCAllocateRaw(object_size); |
| 1579 } | 1630 } |
| 1580 | 1631 |
| 1581 | 1632 |
| 1582 MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace( | 1633 MUST_USE_RESULT inline MaybeObject* MCAllocateFromCodeSpace( |
| 1634 Heap* heap, |
| 1583 HeapObject* ignore, | 1635 HeapObject* ignore, |
| 1584 int object_size) { | 1636 int object_size) { |
| 1585 return Heap::code_space()->MCAllocateRaw(object_size); | 1637 return heap->code_space()->MCAllocateRaw(object_size); |
| 1586 } | 1638 } |
| 1587 | 1639 |
| 1588 | 1640 |
| 1589 MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace( | 1641 MUST_USE_RESULT inline MaybeObject* MCAllocateFromMapSpace( |
| 1642 Heap* heap, |
| 1590 HeapObject* ignore, | 1643 HeapObject* ignore, |
| 1591 int object_size) { | 1644 int object_size) { |
| 1592 return Heap::map_space()->MCAllocateRaw(object_size); | 1645 return heap->map_space()->MCAllocateRaw(object_size); |
| 1593 } | 1646 } |
| 1594 | 1647 |
| 1595 | 1648 |
| 1596 MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace(HeapObject* ignore, | 1649 MUST_USE_RESULT inline MaybeObject* MCAllocateFromCellSpace( |
| 1597 int object_size) { | 1650 Heap* heap, HeapObject* ignore, int object_size) { |
| 1598 return Heap::cell_space()->MCAllocateRaw(object_size); | 1651 return heap->cell_space()->MCAllocateRaw(object_size); |
| 1599 } | 1652 } |
| 1600 | 1653 |
| 1601 | 1654 |
| 1602 // The forwarding address is encoded at the same offset as the current | 1655 // The forwarding address is encoded at the same offset as the current |
| 1603 // to-space object, but in from space. | 1656 // to-space object, but in from space. |
| 1604 inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object, | 1657 inline void EncodeForwardingAddressInNewSpace(Heap* heap, |
| 1658 HeapObject* old_object, |
| 1605 int object_size, | 1659 int object_size, |
| 1606 Object* new_object, | 1660 Object* new_object, |
| 1607 int* ignored) { | 1661 int* ignored) { |
| 1608 int offset = | 1662 int offset = |
| 1609 Heap::new_space()->ToSpaceOffsetForAddress(old_object->address()); | 1663 heap->new_space()->ToSpaceOffsetForAddress(old_object->address()); |
| 1610 Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset) = | 1664 Memory::Address_at(heap->new_space()->FromSpaceLow() + offset) = |
| 1611 HeapObject::cast(new_object)->address(); | 1665 HeapObject::cast(new_object)->address(); |
| 1612 } | 1666 } |
| 1613 | 1667 |
| 1614 | 1668 |
| 1615 // The forwarding address is encoded in the map pointer of the object as an | 1669 // The forwarding address is encoded in the map pointer of the object as an |
| 1616 // offset (in terms of live bytes) from the address of the first live object | 1670 // offset (in terms of live bytes) from the address of the first live object |
| 1617 // in the page. | 1671 // in the page. |
| 1618 inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object, | 1672 inline void EncodeForwardingAddressInPagedSpace(Heap* heap, |
| 1673 HeapObject* old_object, |
| 1619 int object_size, | 1674 int object_size, |
| 1620 Object* new_object, | 1675 Object* new_object, |
| 1621 int* offset) { | 1676 int* offset) { |
| 1622 // Record the forwarding address of the first live object if necessary. | 1677 // Record the forwarding address of the first live object if necessary. |
| 1623 if (*offset == 0) { | 1678 if (*offset == 0) { |
| 1624 Page::FromAddress(old_object->address())->mc_first_forwarded = | 1679 Page::FromAddress(old_object->address())->mc_first_forwarded = |
| 1625 HeapObject::cast(new_object)->address(); | 1680 HeapObject::cast(new_object)->address(); |
| 1626 } | 1681 } |
| 1627 | 1682 |
| 1628 MapWord encoding = | 1683 MapWord encoding = |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1641 // paged space page), iterates through the objects in the range to clear | 1696 // paged space page), iterates through the objects in the range to clear |
| 1642 // mark bits and compute and encode forwarding addresses. As a side effect, | 1697 // mark bits and compute and encode forwarding addresses. As a side effect, |
| 1643 // maximal free chunks are marked so that they can be skipped on subsequent | 1698 // maximal free chunks are marked so that they can be skipped on subsequent |
| 1644 // sweeps. | 1699 // sweeps. |
| 1645 // | 1700 // |
| 1646 // The template parameters are an allocation function, a forwarding address | 1701 // The template parameters are an allocation function, a forwarding address |
| 1647 // encoding function, and a function to process non-live objects. | 1702 // encoding function, and a function to process non-live objects. |
| 1648 template<MarkCompactCollector::AllocationFunction Alloc, | 1703 template<MarkCompactCollector::AllocationFunction Alloc, |
| 1649 MarkCompactCollector::EncodingFunction Encode, | 1704 MarkCompactCollector::EncodingFunction Encode, |
| 1650 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive> | 1705 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive> |
| 1651 inline void EncodeForwardingAddressesInRange(Address start, | 1706 inline void EncodeForwardingAddressesInRange(MarkCompactCollector* collector, |
| 1707 Address start, |
| 1652 Address end, | 1708 Address end, |
| 1653 int* offset) { | 1709 int* offset) { |
| 1654 // The start address of the current free region while sweeping the space. | 1710 // The start address of the current free region while sweeping the space. |
| 1655 // This address is set when a transition from live to non-live objects is | 1711 // This address is set when a transition from live to non-live objects is |
| 1656 // encountered. A value (an encoding of the 'next free region' pointer) | 1712 // encountered. A value (an encoding of the 'next free region' pointer) |
| 1657 // is written to memory at this address when a transition from non-live to | 1713 // is written to memory at this address when a transition from non-live to |
| 1658 // live objects is encountered. | 1714 // live objects is encountered. |
| 1659 Address free_start = NULL; | 1715 Address free_start = NULL; |
| 1660 | 1716 |
| 1661 // A flag giving the state of the previously swept object. Initially true | 1717 // A flag giving the state of the previously swept object. Initially true |
| 1662 // to ensure that free_start is initialized to a proper address before | 1718 // to ensure that free_start is initialized to a proper address before |
| 1663 // trying to write to it. | 1719 // trying to write to it. |
| 1664 bool is_prev_alive = true; | 1720 bool is_prev_alive = true; |
| 1665 | 1721 |
| 1666 int object_size; // Will be set on each iteration of the loop. | 1722 int object_size; // Will be set on each iteration of the loop. |
| 1667 for (Address current = start; current < end; current += object_size) { | 1723 for (Address current = start; current < end; current += object_size) { |
| 1668 HeapObject* object = HeapObject::FromAddress(current); | 1724 HeapObject* object = HeapObject::FromAddress(current); |
| 1669 if (object->IsMarked()) { | 1725 if (object->IsMarked()) { |
| 1670 object->ClearMark(); | 1726 object->ClearMark(); |
| 1671 MarkCompactCollector::tracer()->decrement_marked_count(); | 1727 collector->tracer()->decrement_marked_count(); |
| 1672 object_size = object->Size(); | 1728 object_size = object->Size(); |
| 1673 | 1729 |
| 1674 // Allocation cannot fail, because we are compacting the space. | 1730 Object* forwarded = |
| 1675 Object* forwarded = Alloc(object, object_size)->ToObjectUnchecked(); | 1731 Alloc(collector->heap(), object, object_size)->ToObjectUnchecked(); |
| 1676 Encode(object, object_size, forwarded, offset); | 1732 Encode(collector->heap(), object, object_size, forwarded, offset); |
| 1677 | 1733 |
| 1678 #ifdef DEBUG | 1734 #ifdef DEBUG |
| 1679 if (FLAG_gc_verbose) { | 1735 if (FLAG_gc_verbose) { |
| 1680 PrintF("forward %p -> %p.\n", object->address(), | 1736 PrintF("forward %p -> %p.\n", object->address(), |
| 1681 HeapObject::cast(forwarded)->address()); | 1737 HeapObject::cast(forwarded)->address()); |
| 1682 } | 1738 } |
| 1683 #endif | 1739 #endif |
| 1684 if (!is_prev_alive) { // Transition from non-live to live. | 1740 if (!is_prev_alive) { // Transition from non-live to live. |
| 1685 EncodeFreeRegion(free_start, static_cast<int>(current - free_start)); | 1741 EncodeFreeRegion(free_start, static_cast<int>(current - free_start)); |
| 1686 is_prev_alive = true; | 1742 is_prev_alive = true; |
| (...skipping 15 matching lines...) Expand all Loading... |
| 1702 } | 1758 } |
| 1703 } | 1759 } |
| 1704 | 1760 |
| 1705 | 1761 |
| 1706 // Functions to encode the forwarding pointers in each compactable space. | 1762 // Functions to encode the forwarding pointers in each compactable space. |
| 1707 void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() { | 1763 void MarkCompactCollector::EncodeForwardingAddressesInNewSpace() { |
| 1708 int ignored; | 1764 int ignored; |
| 1709 EncodeForwardingAddressesInRange<MCAllocateFromNewSpace, | 1765 EncodeForwardingAddressesInRange<MCAllocateFromNewSpace, |
| 1710 EncodeForwardingAddressInNewSpace, | 1766 EncodeForwardingAddressInNewSpace, |
| 1711 IgnoreNonLiveObject>( | 1767 IgnoreNonLiveObject>( |
| 1712 Heap::new_space()->bottom(), | 1768 this, |
| 1713 Heap::new_space()->top(), | 1769 heap_->new_space()->bottom(), |
| 1770 heap_->new_space()->top(), |
| 1714 &ignored); | 1771 &ignored); |
| 1715 } | 1772 } |
| 1716 | 1773 |
| 1717 | 1774 |
| 1718 template<MarkCompactCollector::AllocationFunction Alloc, | 1775 template<MarkCompactCollector::AllocationFunction Alloc, |
| 1719 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive> | 1776 MarkCompactCollector::ProcessNonLiveFunction ProcessNonLive> |
| 1720 void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace( | 1777 void MarkCompactCollector::EncodeForwardingAddressesInPagedSpace( |
| 1721 PagedSpace* space) { | 1778 PagedSpace* space) { |
| 1722 PageIterator it(space, PageIterator::PAGES_IN_USE); | 1779 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 1723 while (it.has_next()) { | 1780 while (it.has_next()) { |
| 1724 Page* p = it.next(); | 1781 Page* p = it.next(); |
| 1725 | 1782 |
| 1726 // The offset of each live object in the page from the first live object | 1783 // The offset of each live object in the page from the first live object |
| 1727 // in the page. | 1784 // in the page. |
| 1728 int offset = 0; | 1785 int offset = 0; |
| 1729 EncodeForwardingAddressesInRange<Alloc, | 1786 EncodeForwardingAddressesInRange<Alloc, |
| 1730 EncodeForwardingAddressInPagedSpace, | 1787 EncodeForwardingAddressInPagedSpace, |
| 1731 ProcessNonLive>( | 1788 ProcessNonLive>( |
| 1789 this, |
| 1732 p->ObjectAreaStart(), | 1790 p->ObjectAreaStart(), |
| 1733 p->AllocationTop(), | 1791 p->AllocationTop(), |
| 1734 &offset); | 1792 &offset); |
| 1735 } | 1793 } |
| 1736 } | 1794 } |
| 1737 | 1795 |
| 1738 | 1796 |
| 1739 // We scavange new space simultaneously with sweeping. This is done in two | 1797 // We scavange new space simultaneously with sweeping. This is done in two |
| 1740 // passes. | 1798 // passes. |
| 1741 // The first pass migrates all alive objects from one semispace to another or | 1799 // The first pass migrates all alive objects from one semispace to another or |
| 1742 // promotes them to old space. Forwading address is written directly into | 1800 // promotes them to old space. Forwading address is written directly into |
| 1743 // first word of object without any encoding. If object is dead we are writing | 1801 // first word of object without any encoding. If object is dead we are writing |
| 1744 // NULL as a forwarding address. | 1802 // NULL as a forwarding address. |
| 1745 // The second pass updates pointers to new space in all spaces. It is possible | 1803 // The second pass updates pointers to new space in all spaces. It is possible |
| 1746 // to encounter pointers to dead objects during traversal of dirty regions we | 1804 // to encounter pointers to dead objects during traversal of dirty regions we |
| 1747 // should clear them to avoid encountering them during next dirty regions | 1805 // should clear them to avoid encountering them during next dirty regions |
| 1748 // iteration. | 1806 // iteration. |
| 1749 static void MigrateObject(Address dst, | 1807 static void MigrateObject(Heap* heap, |
| 1808 Address dst, |
| 1750 Address src, | 1809 Address src, |
| 1751 int size, | 1810 int size, |
| 1752 bool to_old_space) { | 1811 bool to_old_space) { |
| 1753 if (to_old_space) { | 1812 if (to_old_space) { |
| 1754 Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size); | 1813 heap->CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, size); |
| 1755 } else { | 1814 } else { |
| 1756 Heap::CopyBlock(dst, src, size); | 1815 heap->CopyBlock(dst, src, size); |
| 1757 } | 1816 } |
| 1758 | 1817 |
| 1759 Memory::Address_at(src) = dst; | 1818 Memory::Address_at(src) = dst; |
| 1760 } | 1819 } |
| 1761 | 1820 |
| 1762 | 1821 |
| 1763 class StaticPointersToNewGenUpdatingVisitor : public | 1822 class StaticPointersToNewGenUpdatingVisitor : public |
| 1764 StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> { | 1823 StaticNewSpaceVisitor<StaticPointersToNewGenUpdatingVisitor> { |
| 1765 public: | 1824 public: |
| 1766 static inline void VisitPointer(Object** p) { | 1825 static inline void VisitPointer(Heap* heap, Object** p) { |
| 1767 if (!(*p)->IsHeapObject()) return; | 1826 if (!(*p)->IsHeapObject()) return; |
| 1768 | 1827 |
| 1769 HeapObject* obj = HeapObject::cast(*p); | 1828 HeapObject* obj = HeapObject::cast(*p); |
| 1770 Address old_addr = obj->address(); | 1829 Address old_addr = obj->address(); |
| 1771 | 1830 |
| 1772 if (Heap::new_space()->Contains(obj)) { | 1831 if (heap->new_space()->Contains(obj)) { |
| 1773 ASSERT(Heap::InFromSpace(*p)); | 1832 ASSERT(heap->InFromSpace(*p)); |
| 1774 *p = HeapObject::FromAddress(Memory::Address_at(old_addr)); | 1833 *p = HeapObject::FromAddress(Memory::Address_at(old_addr)); |
| 1775 } | 1834 } |
| 1776 } | 1835 } |
| 1777 }; | 1836 }; |
| 1778 | 1837 |
| 1779 | 1838 |
| 1780 // Visitor for updating pointers from live objects in old spaces to new space. | 1839 // Visitor for updating pointers from live objects in old spaces to new space. |
| 1781 // It does not expect to encounter pointers to dead objects. | 1840 // It does not expect to encounter pointers to dead objects. |
| 1782 class PointersToNewGenUpdatingVisitor: public ObjectVisitor { | 1841 class PointersToNewGenUpdatingVisitor: public ObjectVisitor { |
| 1783 public: | 1842 public: |
| 1843 explicit PointersToNewGenUpdatingVisitor(Heap* heap) : heap_(heap) { } |
| 1844 |
| 1784 void VisitPointer(Object** p) { | 1845 void VisitPointer(Object** p) { |
| 1785 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p); | 1846 StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p); |
| 1786 } | 1847 } |
| 1787 | 1848 |
| 1788 void VisitPointers(Object** start, Object** end) { | 1849 void VisitPointers(Object** start, Object** end) { |
| 1789 for (Object** p = start; p < end; p++) { | 1850 for (Object** p = start; p < end; p++) { |
| 1790 StaticPointersToNewGenUpdatingVisitor::VisitPointer(p); | 1851 StaticPointersToNewGenUpdatingVisitor::VisitPointer(heap_, p); |
| 1791 } | 1852 } |
| 1792 } | 1853 } |
| 1793 | 1854 |
| 1794 void VisitCodeTarget(RelocInfo* rinfo) { | 1855 void VisitCodeTarget(RelocInfo* rinfo) { |
| 1795 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); | 1856 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode())); |
| 1796 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); | 1857 Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address()); |
| 1797 VisitPointer(&target); | 1858 VisitPointer(&target); |
| 1798 rinfo->set_target_address(Code::cast(target)->instruction_start()); | 1859 rinfo->set_target_address(Code::cast(target)->instruction_start()); |
| 1799 } | 1860 } |
| 1800 | 1861 |
| 1801 void VisitDebugTarget(RelocInfo* rinfo) { | 1862 void VisitDebugTarget(RelocInfo* rinfo) { |
| 1802 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && | 1863 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) && |
| 1803 rinfo->IsPatchedReturnSequence()) || | 1864 rinfo->IsPatchedReturnSequence()) || |
| 1804 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && | 1865 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) && |
| 1805 rinfo->IsPatchedDebugBreakSlotSequence())); | 1866 rinfo->IsPatchedDebugBreakSlotSequence())); |
| 1806 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); | 1867 Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); |
| 1807 VisitPointer(&target); | 1868 VisitPointer(&target); |
| 1808 rinfo->set_call_address(Code::cast(target)->instruction_start()); | 1869 rinfo->set_call_address(Code::cast(target)->instruction_start()); |
| 1809 } | 1870 } |
| 1871 private: |
| 1872 Heap* heap_; |
| 1810 }; | 1873 }; |
| 1811 | 1874 |
| 1812 | 1875 |
| 1813 // Visitor for updating pointers from live objects in old spaces to new space. | 1876 // Visitor for updating pointers from live objects in old spaces to new space. |
| 1814 // It can encounter pointers to dead objects in new space when traversing map | 1877 // It can encounter pointers to dead objects in new space when traversing map |
| 1815 // space (see comment for MigrateObject). | 1878 // space (see comment for MigrateObject). |
| 1816 static void UpdatePointerToNewGen(HeapObject** p) { | 1879 static void UpdatePointerToNewGen(HeapObject** p) { |
| 1817 if (!(*p)->IsHeapObject()) return; | 1880 if (!(*p)->IsHeapObject()) return; |
| 1818 | 1881 |
| 1819 Address old_addr = (*p)->address(); | 1882 Address old_addr = (*p)->address(); |
| 1820 ASSERT(Heap::InFromSpace(*p)); | 1883 ASSERT(HEAP->InFromSpace(*p)); |
| 1821 | 1884 |
| 1822 Address new_addr = Memory::Address_at(old_addr); | 1885 Address new_addr = Memory::Address_at(old_addr); |
| 1823 | 1886 |
| 1824 if (new_addr == NULL) { | 1887 if (new_addr == NULL) { |
| 1825 // We encountered pointer to a dead object. Clear it so we will | 1888 // We encountered pointer to a dead object. Clear it so we will |
| 1826 // not visit it again during next iteration of dirty regions. | 1889 // not visit it again during next iteration of dirty regions. |
| 1827 *p = NULL; | 1890 *p = NULL; |
| 1828 } else { | 1891 } else { |
| 1829 *p = HeapObject::FromAddress(new_addr); | 1892 *p = HeapObject::FromAddress(new_addr); |
| 1830 } | 1893 } |
| 1831 } | 1894 } |
| 1832 | 1895 |
| 1833 | 1896 |
| 1834 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Object **p) { | 1897 static String* UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
| 1898 Object** p) { |
| 1835 Address old_addr = HeapObject::cast(*p)->address(); | 1899 Address old_addr = HeapObject::cast(*p)->address(); |
| 1836 Address new_addr = Memory::Address_at(old_addr); | 1900 Address new_addr = Memory::Address_at(old_addr); |
| 1837 return String::cast(HeapObject::FromAddress(new_addr)); | 1901 return String::cast(HeapObject::FromAddress(new_addr)); |
| 1838 } | 1902 } |
| 1839 | 1903 |
| 1840 | 1904 |
| 1841 static bool TryPromoteObject(HeapObject* object, int object_size) { | 1905 static bool TryPromoteObject(Heap* heap, HeapObject* object, int object_size) { |
| 1842 Object* result; | 1906 Object* result; |
| 1843 | 1907 |
| 1844 if (object_size > Heap::MaxObjectSizeInPagedSpace()) { | 1908 if (object_size > heap->MaxObjectSizeInPagedSpace()) { |
| 1845 MaybeObject* maybe_result = | 1909 MaybeObject* maybe_result = |
| 1846 Heap::lo_space()->AllocateRawFixedArray(object_size); | 1910 heap->lo_space()->AllocateRawFixedArray(object_size); |
| 1847 if (maybe_result->ToObject(&result)) { | 1911 if (maybe_result->ToObject(&result)) { |
| 1848 HeapObject* target = HeapObject::cast(result); | 1912 HeapObject* target = HeapObject::cast(result); |
| 1849 MigrateObject(target->address(), object->address(), object_size, true); | 1913 MigrateObject(heap, target->address(), object->address(), object_size, |
| 1850 MarkCompactCollector::tracer()-> | 1914 true); |
| 1915 heap->mark_compact_collector()->tracer()-> |
| 1851 increment_promoted_objects_size(object_size); | 1916 increment_promoted_objects_size(object_size); |
| 1852 return true; | 1917 return true; |
| 1853 } | 1918 } |
| 1854 } else { | 1919 } else { |
| 1855 OldSpace* target_space = Heap::TargetSpace(object); | 1920 OldSpace* target_space = heap->TargetSpace(object); |
| 1856 | 1921 |
| 1857 ASSERT(target_space == Heap::old_pointer_space() || | 1922 ASSERT(target_space == heap->old_pointer_space() || |
| 1858 target_space == Heap::old_data_space()); | 1923 target_space == heap->old_data_space()); |
| 1859 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); | 1924 MaybeObject* maybe_result = target_space->AllocateRaw(object_size); |
| 1860 if (maybe_result->ToObject(&result)) { | 1925 if (maybe_result->ToObject(&result)) { |
| 1861 HeapObject* target = HeapObject::cast(result); | 1926 HeapObject* target = HeapObject::cast(result); |
| 1862 MigrateObject(target->address(), | 1927 MigrateObject(heap, |
| 1928 target->address(), |
| 1863 object->address(), | 1929 object->address(), |
| 1864 object_size, | 1930 object_size, |
| 1865 target_space == Heap::old_pointer_space()); | 1931 target_space == heap->old_pointer_space()); |
| 1866 MarkCompactCollector::tracer()-> | 1932 heap->mark_compact_collector()->tracer()-> |
| 1867 increment_promoted_objects_size(object_size); | 1933 increment_promoted_objects_size(object_size); |
| 1868 return true; | 1934 return true; |
| 1869 } | 1935 } |
| 1870 } | 1936 } |
| 1871 | 1937 |
| 1872 return false; | 1938 return false; |
| 1873 } | 1939 } |
| 1874 | 1940 |
| 1875 | 1941 |
| 1876 static void SweepNewSpace(NewSpace* space) { | 1942 static void SweepNewSpace(Heap* heap, NewSpace* space) { |
| 1877 Heap::CheckNewSpaceExpansionCriteria(); | 1943 heap->CheckNewSpaceExpansionCriteria(); |
| 1878 | 1944 |
| 1879 Address from_bottom = space->bottom(); | 1945 Address from_bottom = space->bottom(); |
| 1880 Address from_top = space->top(); | 1946 Address from_top = space->top(); |
| 1881 | 1947 |
| 1882 // Flip the semispaces. After flipping, to space is empty, from space has | 1948 // Flip the semispaces. After flipping, to space is empty, from space has |
| 1883 // live objects. | 1949 // live objects. |
| 1884 space->Flip(); | 1950 space->Flip(); |
| 1885 space->ResetAllocationInfo(); | 1951 space->ResetAllocationInfo(); |
| 1886 | 1952 |
| 1887 int size = 0; | 1953 int size = 0; |
| 1888 int survivors_size = 0; | 1954 int survivors_size = 0; |
| 1889 | 1955 |
| 1890 // First pass: traverse all objects in inactive semispace, remove marks, | 1956 // First pass: traverse all objects in inactive semispace, remove marks, |
| 1891 // migrate live objects and write forwarding addresses. | 1957 // migrate live objects and write forwarding addresses. |
| 1892 for (Address current = from_bottom; current < from_top; current += size) { | 1958 for (Address current = from_bottom; current < from_top; current += size) { |
| 1893 HeapObject* object = HeapObject::FromAddress(current); | 1959 HeapObject* object = HeapObject::FromAddress(current); |
| 1894 | 1960 |
| 1895 if (object->IsMarked()) { | 1961 if (object->IsMarked()) { |
| 1896 object->ClearMark(); | 1962 object->ClearMark(); |
| 1897 MarkCompactCollector::tracer()->decrement_marked_count(); | 1963 heap->mark_compact_collector()->tracer()->decrement_marked_count(); |
| 1898 | 1964 |
| 1899 size = object->Size(); | 1965 size = object->Size(); |
| 1900 survivors_size += size; | 1966 survivors_size += size; |
| 1901 | 1967 |
| 1902 // Aggressively promote young survivors to the old space. | 1968 // Aggressively promote young survivors to the old space. |
| 1903 if (TryPromoteObject(object, size)) { | 1969 if (TryPromoteObject(heap, object, size)) { |
| 1904 continue; | 1970 continue; |
| 1905 } | 1971 } |
| 1906 | 1972 |
| 1907 // Promotion failed. Just migrate object to another semispace. | 1973 // Promotion failed. Just migrate object to another semispace. |
| 1908 // Allocation cannot fail at this point: semispaces are of equal size. | 1974 // Allocation cannot fail at this point: semispaces are of equal size. |
| 1909 Object* target = space->AllocateRaw(size)->ToObjectUnchecked(); | 1975 Object* target = space->AllocateRaw(size)->ToObjectUnchecked(); |
| 1910 | 1976 |
| 1911 MigrateObject(HeapObject::cast(target)->address(), | 1977 MigrateObject(heap, |
| 1978 HeapObject::cast(target)->address(), |
| 1912 current, | 1979 current, |
| 1913 size, | 1980 size, |
| 1914 false); | 1981 false); |
| 1915 } else { | 1982 } else { |
| 1916 // Process the dead object before we write a NULL into its header. | 1983 // Process the dead object before we write a NULL into its header. |
| 1917 LiveObjectList::ProcessNonLive(object); | 1984 LiveObjectList::ProcessNonLive(object); |
| 1918 | 1985 |
| 1919 size = object->Size(); | 1986 size = object->Size(); |
| 1920 Memory::Address_at(current) = NULL; | 1987 Memory::Address_at(current) = NULL; |
| 1921 } | 1988 } |
| 1922 } | 1989 } |
| 1923 | 1990 |
| 1924 // Second pass: find pointers to new space and update them. | 1991 // Second pass: find pointers to new space and update them. |
| 1925 PointersToNewGenUpdatingVisitor updating_visitor; | 1992 PointersToNewGenUpdatingVisitor updating_visitor(heap); |
| 1926 | 1993 |
| 1927 // Update pointers in to space. | 1994 // Update pointers in to space. |
| 1928 Address current = space->bottom(); | 1995 Address current = space->bottom(); |
| 1929 while (current < space->top()) { | 1996 while (current < space->top()) { |
| 1930 HeapObject* object = HeapObject::FromAddress(current); | 1997 HeapObject* object = HeapObject::FromAddress(current); |
| 1931 current += | 1998 current += |
| 1932 StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(), | 1999 StaticPointersToNewGenUpdatingVisitor::IterateBody(object->map(), |
| 1933 object); | 2000 object); |
| 1934 } | 2001 } |
| 1935 | 2002 |
| 1936 // Update roots. | 2003 // Update roots. |
| 1937 Heap::IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE); | 2004 heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE); |
| 1938 LiveObjectList::IterateElements(&updating_visitor); | 2005 LiveObjectList::IterateElements(&updating_visitor); |
| 1939 | 2006 |
| 1940 // Update pointers in old spaces. | 2007 // Update pointers in old spaces. |
| 1941 Heap::IterateDirtyRegions(Heap::old_pointer_space(), | 2008 heap->IterateDirtyRegions(heap->old_pointer_space(), |
| 1942 &Heap::IteratePointersInDirtyRegion, | 2009 &Heap::IteratePointersInDirtyRegion, |
| 1943 &UpdatePointerToNewGen, | 2010 &UpdatePointerToNewGen, |
| 1944 Heap::WATERMARK_SHOULD_BE_VALID); | 2011 heap->WATERMARK_SHOULD_BE_VALID); |
| 1945 | 2012 |
| 1946 Heap::lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen); | 2013 heap->lo_space()->IterateDirtyRegions(&UpdatePointerToNewGen); |
| 1947 | 2014 |
| 1948 // Update pointers from cells. | 2015 // Update pointers from cells. |
| 1949 HeapObjectIterator cell_iterator(Heap::cell_space()); | 2016 HeapObjectIterator cell_iterator(heap->cell_space()); |
| 1950 for (HeapObject* cell = cell_iterator.next(); | 2017 for (HeapObject* cell = cell_iterator.next(); |
| 1951 cell != NULL; | 2018 cell != NULL; |
| 1952 cell = cell_iterator.next()) { | 2019 cell = cell_iterator.next()) { |
| 1953 if (cell->IsJSGlobalPropertyCell()) { | 2020 if (cell->IsJSGlobalPropertyCell()) { |
| 1954 Address value_address = | 2021 Address value_address = |
| 1955 reinterpret_cast<Address>(cell) + | 2022 reinterpret_cast<Address>(cell) + |
| 1956 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); | 2023 (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); |
| 1957 updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); | 2024 updating_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); |
| 1958 } | 2025 } |
| 1959 } | 2026 } |
| 1960 | 2027 |
| 1961 // Update pointer from the global contexts list. | 2028 // Update pointer from the global contexts list. |
| 1962 updating_visitor.VisitPointer(Heap::global_contexts_list_address()); | 2029 updating_visitor.VisitPointer(heap->global_contexts_list_address()); |
| 1963 | 2030 |
| 1964 // Update pointers from external string table. | 2031 // Update pointers from external string table. |
| 1965 Heap::UpdateNewSpaceReferencesInExternalStringTable( | 2032 heap->UpdateNewSpaceReferencesInExternalStringTable( |
| 1966 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 2033 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| 1967 | 2034 |
| 1968 // All pointers were updated. Update auxiliary allocation info. | 2035 // All pointers were updated. Update auxiliary allocation info. |
| 1969 Heap::IncrementYoungSurvivorsCounter(survivors_size); | 2036 heap->IncrementYoungSurvivorsCounter(survivors_size); |
| 1970 space->set_age_mark(space->top()); | 2037 space->set_age_mark(space->top()); |
| 1971 | 2038 |
| 1972 // Update JSFunction pointers from the runtime profiler. | 2039 // Update JSFunction pointers from the runtime profiler. |
| 1973 RuntimeProfiler::UpdateSamplesAfterScavenge(); | 2040 heap->isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); |
| 1974 } | 2041 } |
| 1975 | 2042 |
| 1976 | 2043 |
| 1977 static void SweepSpace(PagedSpace* space) { | 2044 static void SweepSpace(Heap* heap, PagedSpace* space) { |
| 1978 PageIterator it(space, PageIterator::PAGES_IN_USE); | 2045 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 1979 | 2046 |
| 1980 // During sweeping of paged space we are trying to find longest sequences | 2047 // During sweeping of paged space we are trying to find longest sequences |
| 1981 // of pages without live objects and free them (instead of putting them on | 2048 // of pages without live objects and free them (instead of putting them on |
| 1982 // the free list). | 2049 // the free list). |
| 1983 | 2050 |
| 1984 // Page preceding current. | 2051 // Page preceding current. |
| 1985 Page* prev = Page::FromAddress(NULL); | 2052 Page* prev = Page::FromAddress(NULL); |
| 1986 | 2053 |
| 1987 // First empty page in a sequence. | 2054 // First empty page in a sequence. |
| (...skipping 17 matching lines...) Expand all Loading... |
| 2005 bool is_previous_alive = true; | 2072 bool is_previous_alive = true; |
| 2006 Address free_start = NULL; | 2073 Address free_start = NULL; |
| 2007 HeapObject* object; | 2074 HeapObject* object; |
| 2008 | 2075 |
| 2009 for (Address current = p->ObjectAreaStart(); | 2076 for (Address current = p->ObjectAreaStart(); |
| 2010 current < p->AllocationTop(); | 2077 current < p->AllocationTop(); |
| 2011 current += object->Size()) { | 2078 current += object->Size()) { |
| 2012 object = HeapObject::FromAddress(current); | 2079 object = HeapObject::FromAddress(current); |
| 2013 if (object->IsMarked()) { | 2080 if (object->IsMarked()) { |
| 2014 object->ClearMark(); | 2081 object->ClearMark(); |
| 2015 MarkCompactCollector::tracer()->decrement_marked_count(); | 2082 heap->mark_compact_collector()->tracer()->decrement_marked_count(); |
| 2016 | 2083 |
| 2017 if (!is_previous_alive) { // Transition from free to live. | 2084 if (!is_previous_alive) { // Transition from free to live. |
| 2018 space->DeallocateBlock(free_start, | 2085 space->DeallocateBlock(free_start, |
| 2019 static_cast<int>(current - free_start), | 2086 static_cast<int>(current - free_start), |
| 2020 true); | 2087 true); |
| 2021 is_previous_alive = true; | 2088 is_previous_alive = true; |
| 2022 } | 2089 } |
| 2023 } else { | 2090 } else { |
| 2024 MarkCompactCollector::ReportDeleteIfNeeded(object); | 2091 heap->mark_compact_collector()->ReportDeleteIfNeeded(object); |
| 2025 if (is_previous_alive) { // Transition from live to free. | 2092 if (is_previous_alive) { // Transition from live to free. |
| 2026 free_start = current; | 2093 free_start = current; |
| 2027 is_previous_alive = false; | 2094 is_previous_alive = false; |
| 2028 } | 2095 } |
| 2029 LiveObjectList::ProcessNonLive(object); | 2096 LiveObjectList::ProcessNonLive(object); |
| 2030 } | 2097 } |
| 2031 // The object is now unmarked for the call to Size() at the top of the | 2098 // The object is now unmarked for the call to Size() at the top of the |
| 2032 // loop. | 2099 // loop. |
| 2033 } | 2100 } |
| 2034 | 2101 |
| (...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2114 space->SetTop(new_allocation_top); | 2181 space->SetTop(new_allocation_top); |
| 2115 } | 2182 } |
| 2116 } | 2183 } |
| 2117 | 2184 |
| 2118 | 2185 |
| 2119 void MarkCompactCollector::EncodeForwardingAddresses() { | 2186 void MarkCompactCollector::EncodeForwardingAddresses() { |
| 2120 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); | 2187 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); |
| 2121 // Objects in the active semispace of the young generation may be | 2188 // Objects in the active semispace of the young generation may be |
| 2122 // relocated to the inactive semispace (if not promoted). Set the | 2189 // relocated to the inactive semispace (if not promoted). Set the |
| 2123 // relocation info to the beginning of the inactive semispace. | 2190 // relocation info to the beginning of the inactive semispace. |
| 2124 Heap::new_space()->MCResetRelocationInfo(); | 2191 heap_->new_space()->MCResetRelocationInfo(); |
| 2125 | 2192 |
| 2126 // Compute the forwarding pointers in each space. | 2193 // Compute the forwarding pointers in each space. |
| 2127 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, | 2194 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, |
| 2128 ReportDeleteIfNeeded>( | 2195 ReportDeleteIfNeeded>( |
| 2129 Heap::old_pointer_space()); | 2196 heap_->old_pointer_space()); |
| 2130 | 2197 |
| 2131 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace, | 2198 EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace, |
| 2132 IgnoreNonLiveObject>( | 2199 IgnoreNonLiveObject>( |
| 2133 Heap::old_data_space()); | 2200 heap_->old_data_space()); |
| 2134 | 2201 |
| 2135 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace, | 2202 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace, |
| 2136 ReportDeleteIfNeeded>( | 2203 ReportDeleteIfNeeded>( |
| 2137 Heap::code_space()); | 2204 heap_->code_space()); |
| 2138 | 2205 |
| 2139 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace, | 2206 EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace, |
| 2140 IgnoreNonLiveObject>( | 2207 IgnoreNonLiveObject>( |
| 2141 Heap::cell_space()); | 2208 heap_->cell_space()); |
| 2142 | 2209 |
| 2143 | 2210 |
| 2144 // Compute new space next to last after the old and code spaces have been | 2211 // Compute new space next to last after the old and code spaces have been |
| 2145 // compacted. Objects in new space can be promoted to old or code space. | 2212 // compacted. Objects in new space can be promoted to old or code space. |
| 2146 EncodeForwardingAddressesInNewSpace(); | 2213 EncodeForwardingAddressesInNewSpace(); |
| 2147 | 2214 |
| 2148 // Compute map space last because computing forwarding addresses | 2215 // Compute map space last because computing forwarding addresses |
| 2149 // overwrites non-live objects. Objects in the other spaces rely on | 2216 // overwrites non-live objects. Objects in the other spaces rely on |
| 2150 // non-live map pointers to get the sizes of non-live objects. | 2217 // non-live map pointers to get the sizes of non-live objects. |
| 2151 EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace, | 2218 EncodeForwardingAddressesInPagedSpace<MCAllocateFromMapSpace, |
| 2152 IgnoreNonLiveObject>( | 2219 IgnoreNonLiveObject>( |
| 2153 Heap::map_space()); | 2220 heap_->map_space()); |
| 2154 | 2221 |
| 2155 // Write relocation info to the top page, so we can use it later. This is | 2222 // Write relocation info to the top page, so we can use it later. This is |
| 2156 // done after promoting objects from the new space so we get the correct | 2223 // done after promoting objects from the new space so we get the correct |
| 2157 // allocation top. | 2224 // allocation top. |
| 2158 Heap::old_pointer_space()->MCWriteRelocationInfoToPage(); | 2225 heap_->old_pointer_space()->MCWriteRelocationInfoToPage(); |
| 2159 Heap::old_data_space()->MCWriteRelocationInfoToPage(); | 2226 heap_->old_data_space()->MCWriteRelocationInfoToPage(); |
| 2160 Heap::code_space()->MCWriteRelocationInfoToPage(); | 2227 heap_->code_space()->MCWriteRelocationInfoToPage(); |
| 2161 Heap::map_space()->MCWriteRelocationInfoToPage(); | 2228 heap_->map_space()->MCWriteRelocationInfoToPage(); |
| 2162 Heap::cell_space()->MCWriteRelocationInfoToPage(); | 2229 heap_->cell_space()->MCWriteRelocationInfoToPage(); |
| 2163 } | 2230 } |
| 2164 | 2231 |
| 2165 | 2232 |
| 2166 class MapIterator : public HeapObjectIterator { | 2233 class MapIterator : public HeapObjectIterator { |
| 2167 public: | 2234 public: |
| 2168 MapIterator() : HeapObjectIterator(Heap::map_space(), &SizeCallback) { } | 2235 MapIterator() : HeapObjectIterator(HEAP->map_space(), &SizeCallback) { } |
| 2169 | 2236 |
| 2170 explicit MapIterator(Address start) | 2237 explicit MapIterator(Address start) |
| 2171 : HeapObjectIterator(Heap::map_space(), start, &SizeCallback) { } | 2238 : HeapObjectIterator(HEAP->map_space(), start, &SizeCallback) { } |
| 2172 | 2239 |
| 2173 private: | 2240 private: |
| 2174 static int SizeCallback(HeapObject* unused) { | 2241 static int SizeCallback(HeapObject* unused) { |
| 2175 USE(unused); | 2242 USE(unused); |
| 2176 return Map::kSize; | 2243 return Map::kSize; |
| 2177 } | 2244 } |
| 2178 }; | 2245 }; |
| 2179 | 2246 |
| 2180 | 2247 |
| 2181 class MapCompact { | 2248 class MapCompact { |
| 2182 public: | 2249 public: |
| 2183 explicit MapCompact(int live_maps) | 2250 explicit MapCompact(Heap* heap, int live_maps) |
| 2184 : live_maps_(live_maps), | 2251 : heap_(heap), |
| 2185 to_evacuate_start_(Heap::map_space()->TopAfterCompaction(live_maps)), | 2252 live_maps_(live_maps), |
| 2253 to_evacuate_start_(heap->map_space()->TopAfterCompaction(live_maps)), |
| 2186 map_to_evacuate_it_(to_evacuate_start_), | 2254 map_to_evacuate_it_(to_evacuate_start_), |
| 2187 first_map_to_evacuate_( | 2255 first_map_to_evacuate_( |
| 2188 reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) { | 2256 reinterpret_cast<Map*>(HeapObject::FromAddress(to_evacuate_start_))) { |
| 2189 } | 2257 } |
| 2190 | 2258 |
| 2191 void CompactMaps() { | 2259 void CompactMaps() { |
| 2192 // As we know the number of maps to evacuate beforehand, | 2260 // As we know the number of maps to evacuate beforehand, |
| 2193 // we stop then there is no more vacant maps. | 2261 // we stop then there is no more vacant maps. |
| 2194 for (Map* next_vacant_map = NextVacantMap(); | 2262 for (Map* next_vacant_map = NextVacantMap(); |
| 2195 next_vacant_map; | 2263 next_vacant_map; |
| 2196 next_vacant_map = NextVacantMap()) { | 2264 next_vacant_map = NextVacantMap()) { |
| 2197 EvacuateMap(next_vacant_map, NextMapToEvacuate()); | 2265 EvacuateMap(next_vacant_map, NextMapToEvacuate()); |
| 2198 } | 2266 } |
| 2199 | 2267 |
| 2200 #ifdef DEBUG | 2268 #ifdef DEBUG |
| 2201 CheckNoMapsToEvacuate(); | 2269 CheckNoMapsToEvacuate(); |
| 2202 #endif | 2270 #endif |
| 2203 } | 2271 } |
| 2204 | 2272 |
| 2205 void UpdateMapPointersInRoots() { | 2273 void UpdateMapPointersInRoots() { |
| 2206 Heap::IterateRoots(&map_updating_visitor_, VISIT_ONLY_STRONG); | 2274 MapUpdatingVisitor map_updating_visitor; |
| 2207 GlobalHandles::IterateWeakRoots(&map_updating_visitor_); | 2275 heap_->IterateRoots(&map_updating_visitor, VISIT_ONLY_STRONG); |
| 2208 LiveObjectList::IterateElements(&map_updating_visitor_); | 2276 heap_->isolate()->global_handles()->IterateWeakRoots(&map_updating_visitor); |
| 2277 LiveObjectList::IterateElements(&map_updating_visitor); |
| 2209 } | 2278 } |
| 2210 | 2279 |
| 2211 void UpdateMapPointersInPagedSpace(PagedSpace* space) { | 2280 void UpdateMapPointersInPagedSpace(PagedSpace* space) { |
| 2212 ASSERT(space != Heap::map_space()); | 2281 ASSERT(space != heap_->map_space()); |
| 2213 | 2282 |
| 2214 PageIterator it(space, PageIterator::PAGES_IN_USE); | 2283 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 2215 while (it.has_next()) { | 2284 while (it.has_next()) { |
| 2216 Page* p = it.next(); | 2285 Page* p = it.next(); |
| 2217 UpdateMapPointersInRange(p->ObjectAreaStart(), p->AllocationTop()); | 2286 UpdateMapPointersInRange(heap_, p->ObjectAreaStart(), p->AllocationTop()); |
| 2218 } | 2287 } |
| 2219 } | 2288 } |
| 2220 | 2289 |
| 2221 void UpdateMapPointersInNewSpace() { | 2290 void UpdateMapPointersInNewSpace() { |
| 2222 NewSpace* space = Heap::new_space(); | 2291 NewSpace* space = heap_->new_space(); |
| 2223 UpdateMapPointersInRange(space->bottom(), space->top()); | 2292 UpdateMapPointersInRange(heap_, space->bottom(), space->top()); |
| 2224 } | 2293 } |
| 2225 | 2294 |
| 2226 void UpdateMapPointersInLargeObjectSpace() { | 2295 void UpdateMapPointersInLargeObjectSpace() { |
| 2227 LargeObjectIterator it(Heap::lo_space()); | 2296 LargeObjectIterator it(heap_->lo_space()); |
| 2228 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) | 2297 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) |
| 2229 UpdateMapPointersInObject(obj); | 2298 UpdateMapPointersInObject(heap_, obj); |
| 2230 } | 2299 } |
| 2231 | 2300 |
| 2232 void Finish() { | 2301 void Finish() { |
| 2233 Heap::map_space()->FinishCompaction(to_evacuate_start_, live_maps_); | 2302 heap_->map_space()->FinishCompaction(to_evacuate_start_, live_maps_); |
| 2234 } | 2303 } |
| 2235 | 2304 |
| 2236 private: | 2305 private: |
| 2306 Heap* heap_; |
| 2237 int live_maps_; | 2307 int live_maps_; |
| 2238 Address to_evacuate_start_; | 2308 Address to_evacuate_start_; |
| 2239 MapIterator vacant_map_it_; | 2309 MapIterator vacant_map_it_; |
| 2240 MapIterator map_to_evacuate_it_; | 2310 MapIterator map_to_evacuate_it_; |
| 2241 Map* first_map_to_evacuate_; | 2311 Map* first_map_to_evacuate_; |
| 2242 | 2312 |
| 2243 // Helper class for updating map pointers in HeapObjects. | 2313 // Helper class for updating map pointers in HeapObjects. |
| 2244 class MapUpdatingVisitor: public ObjectVisitor { | 2314 class MapUpdatingVisitor: public ObjectVisitor { |
| 2245 public: | 2315 public: |
| 2316 MapUpdatingVisitor() {} |
| 2317 |
| 2246 void VisitPointer(Object** p) { | 2318 void VisitPointer(Object** p) { |
| 2247 UpdateMapPointer(p); | 2319 UpdateMapPointer(p); |
| 2248 } | 2320 } |
| 2249 | 2321 |
| 2250 void VisitPointers(Object** start, Object** end) { | 2322 void VisitPointers(Object** start, Object** end) { |
| 2251 for (Object** p = start; p < end; p++) UpdateMapPointer(p); | 2323 for (Object** p = start; p < end; p++) UpdateMapPointer(p); |
| 2252 } | 2324 } |
| 2253 | 2325 |
| 2254 private: | 2326 private: |
| 2255 void UpdateMapPointer(Object** p) { | 2327 void UpdateMapPointer(Object** p) { |
| 2256 if (!(*p)->IsHeapObject()) return; | 2328 if (!(*p)->IsHeapObject()) return; |
| 2257 HeapObject* old_map = reinterpret_cast<HeapObject*>(*p); | 2329 HeapObject* old_map = reinterpret_cast<HeapObject*>(*p); |
| 2258 | 2330 |
| 2259 // Moved maps are tagged with overflowed map word. They are the only | 2331 // Moved maps are tagged with overflowed map word. They are the only |
| 2260 // objects those map word is overflowed as marking is already complete. | 2332 // objects those map word is overflowed as marking is already complete. |
| 2261 MapWord map_word = old_map->map_word(); | 2333 MapWord map_word = old_map->map_word(); |
| 2262 if (!map_word.IsOverflowed()) return; | 2334 if (!map_word.IsOverflowed()) return; |
| 2263 | 2335 |
| 2264 *p = GetForwardedMap(map_word); | 2336 *p = GetForwardedMap(map_word); |
| 2265 } | 2337 } |
| 2266 }; | 2338 }; |
| 2267 | 2339 |
| 2268 static MapUpdatingVisitor map_updating_visitor_; | |
| 2269 | |
| 2270 static Map* NextMap(MapIterator* it, HeapObject* last, bool live) { | 2340 static Map* NextMap(MapIterator* it, HeapObject* last, bool live) { |
| 2271 while (true) { | 2341 while (true) { |
| 2272 HeapObject* next = it->next(); | 2342 HeapObject* next = it->next(); |
| 2273 ASSERT(next != NULL); | 2343 ASSERT(next != NULL); |
| 2274 if (next == last) | 2344 if (next == last) |
| 2275 return NULL; | 2345 return NULL; |
| 2276 ASSERT(!next->IsOverflowed()); | 2346 ASSERT(!next->IsOverflowed()); |
| 2277 ASSERT(!next->IsMarked()); | 2347 ASSERT(!next->IsMarked()); |
| 2278 ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next)); | 2348 ASSERT(next->IsMap() || FreeListNode::IsFreeListNode(next)); |
| 2279 if (next->IsMap() == live) | 2349 if (next->IsMap() == live) |
| (...skipping 13 matching lines...) Expand all Loading... |
| 2293 ASSERT(map->IsMap()); | 2363 ASSERT(map->IsMap()); |
| 2294 return map; | 2364 return map; |
| 2295 } | 2365 } |
| 2296 | 2366 |
| 2297 static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) { | 2367 static void EvacuateMap(Map* vacant_map, Map* map_to_evacuate) { |
| 2298 ASSERT(FreeListNode::IsFreeListNode(vacant_map)); | 2368 ASSERT(FreeListNode::IsFreeListNode(vacant_map)); |
| 2299 ASSERT(map_to_evacuate->IsMap()); | 2369 ASSERT(map_to_evacuate->IsMap()); |
| 2300 | 2370 |
| 2301 ASSERT(Map::kSize % 4 == 0); | 2371 ASSERT(Map::kSize % 4 == 0); |
| 2302 | 2372 |
| 2303 Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(vacant_map->address(), | 2373 map_to_evacuate->heap()->CopyBlockToOldSpaceAndUpdateRegionMarks( |
| 2304 map_to_evacuate->address(), | 2374 vacant_map->address(), map_to_evacuate->address(), Map::kSize); |
| 2305 Map::kSize); | |
| 2306 | 2375 |
| 2307 ASSERT(vacant_map->IsMap()); // Due to memcpy above. | 2376 ASSERT(vacant_map->IsMap()); // Due to memcpy above. |
| 2308 | 2377 |
| 2309 MapWord forwarding_map_word = MapWord::FromMap(vacant_map); | 2378 MapWord forwarding_map_word = MapWord::FromMap(vacant_map); |
| 2310 forwarding_map_word.SetOverflow(); | 2379 forwarding_map_word.SetOverflow(); |
| 2311 map_to_evacuate->set_map_word(forwarding_map_word); | 2380 map_to_evacuate->set_map_word(forwarding_map_word); |
| 2312 | 2381 |
| 2313 ASSERT(map_to_evacuate->map_word().IsOverflowed()); | 2382 ASSERT(map_to_evacuate->map_word().IsOverflowed()); |
| 2314 ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map); | 2383 ASSERT(GetForwardedMap(map_to_evacuate->map_word()) == vacant_map); |
| 2315 } | 2384 } |
| 2316 | 2385 |
| 2317 static Map* GetForwardedMap(MapWord map_word) { | 2386 static Map* GetForwardedMap(MapWord map_word) { |
| 2318 ASSERT(map_word.IsOverflowed()); | 2387 ASSERT(map_word.IsOverflowed()); |
| 2319 map_word.ClearOverflow(); | 2388 map_word.ClearOverflow(); |
| 2320 Map* new_map = map_word.ToMap(); | 2389 Map* new_map = map_word.ToMap(); |
| 2321 ASSERT_MAP_ALIGNED(new_map->address()); | 2390 ASSERT_MAP_ALIGNED(new_map->address()); |
| 2322 return new_map; | 2391 return new_map; |
| 2323 } | 2392 } |
| 2324 | 2393 |
| 2325 static int UpdateMapPointersInObject(HeapObject* obj) { | 2394 static int UpdateMapPointersInObject(Heap* heap, HeapObject* obj) { |
| 2326 ASSERT(!obj->IsMarked()); | 2395 ASSERT(!obj->IsMarked()); |
| 2327 Map* map = obj->map(); | 2396 Map* map = obj->map(); |
| 2328 ASSERT(Heap::map_space()->Contains(map)); | 2397 ASSERT(heap->map_space()->Contains(map)); |
| 2329 MapWord map_word = map->map_word(); | 2398 MapWord map_word = map->map_word(); |
| 2330 ASSERT(!map_word.IsMarked()); | 2399 ASSERT(!map_word.IsMarked()); |
| 2331 if (map_word.IsOverflowed()) { | 2400 if (map_word.IsOverflowed()) { |
| 2332 Map* new_map = GetForwardedMap(map_word); | 2401 Map* new_map = GetForwardedMap(map_word); |
| 2333 ASSERT(Heap::map_space()->Contains(new_map)); | 2402 ASSERT(heap->map_space()->Contains(new_map)); |
| 2334 obj->set_map(new_map); | 2403 obj->set_map(new_map); |
| 2335 | 2404 |
| 2336 #ifdef DEBUG | 2405 #ifdef DEBUG |
| 2337 if (FLAG_gc_verbose) { | 2406 if (FLAG_gc_verbose) { |
| 2338 PrintF("update %p : %p -> %p\n", | 2407 PrintF("update %p : %p -> %p\n", |
| 2339 obj->address(), | 2408 obj->address(), |
| 2340 reinterpret_cast<void*>(map), | 2409 reinterpret_cast<void*>(map), |
| 2341 reinterpret_cast<void*>(new_map)); | 2410 reinterpret_cast<void*>(new_map)); |
| 2342 } | 2411 } |
| 2343 #endif | 2412 #endif |
| 2344 } | 2413 } |
| 2345 | 2414 |
| 2346 int size = obj->SizeFromMap(map); | 2415 int size = obj->SizeFromMap(map); |
| 2347 obj->IterateBody(map->instance_type(), size, &map_updating_visitor_); | 2416 MapUpdatingVisitor map_updating_visitor; |
| 2417 obj->IterateBody(map->instance_type(), size, &map_updating_visitor); |
| 2348 return size; | 2418 return size; |
| 2349 } | 2419 } |
| 2350 | 2420 |
| 2351 static void UpdateMapPointersInRange(Address start, Address end) { | 2421 static void UpdateMapPointersInRange(Heap* heap, Address start, Address end) { |
| 2352 HeapObject* object; | 2422 HeapObject* object; |
| 2353 int size; | 2423 int size; |
| 2354 for (Address current = start; current < end; current += size) { | 2424 for (Address current = start; current < end; current += size) { |
| 2355 object = HeapObject::FromAddress(current); | 2425 object = HeapObject::FromAddress(current); |
| 2356 size = UpdateMapPointersInObject(object); | 2426 size = UpdateMapPointersInObject(heap, object); |
| 2357 ASSERT(size > 0); | 2427 ASSERT(size > 0); |
| 2358 } | 2428 } |
| 2359 } | 2429 } |
| 2360 | 2430 |
| 2361 #ifdef DEBUG | 2431 #ifdef DEBUG |
| 2362 void CheckNoMapsToEvacuate() { | 2432 void CheckNoMapsToEvacuate() { |
| 2363 if (!FLAG_enable_slow_asserts) | 2433 if (!FLAG_enable_slow_asserts) |
| 2364 return; | 2434 return; |
| 2365 | 2435 |
| 2366 for (HeapObject* obj = map_to_evacuate_it_.next(); | 2436 for (HeapObject* obj = map_to_evacuate_it_.next(); |
| 2367 obj != NULL; obj = map_to_evacuate_it_.next()) | 2437 obj != NULL; obj = map_to_evacuate_it_.next()) |
| 2368 ASSERT(FreeListNode::IsFreeListNode(obj)); | 2438 ASSERT(FreeListNode::IsFreeListNode(obj)); |
| 2369 } | 2439 } |
| 2370 #endif | 2440 #endif |
| 2371 }; | 2441 }; |
| 2372 | 2442 |
| 2373 MapCompact::MapUpdatingVisitor MapCompact::map_updating_visitor_; | |
| 2374 | |
| 2375 | 2443 |
| 2376 void MarkCompactCollector::SweepSpaces() { | 2444 void MarkCompactCollector::SweepSpaces() { |
| 2377 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); | 2445 GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP); |
| 2378 | 2446 |
| 2379 ASSERT(state_ == SWEEP_SPACES); | 2447 ASSERT(state_ == SWEEP_SPACES); |
| 2380 ASSERT(!IsCompacting()); | 2448 ASSERT(!IsCompacting()); |
| 2381 // Noncompacting collections simply sweep the spaces to clear the mark | 2449 // Noncompacting collections simply sweep the spaces to clear the mark |
| 2382 // bits and free the nonlive blocks (for old and map spaces). We sweep | 2450 // bits and free the nonlive blocks (for old and map spaces). We sweep |
| 2383 // the map space last because freeing non-live maps overwrites them and | 2451 // the map space last because freeing non-live maps overwrites them and |
| 2384 // the other spaces rely on possibly non-live maps to get the sizes for | 2452 // the other spaces rely on possibly non-live maps to get the sizes for |
| 2385 // non-live objects. | 2453 // non-live objects. |
| 2386 SweepSpace(Heap::old_pointer_space()); | 2454 SweepSpace(heap_, heap_->old_pointer_space()); |
| 2387 SweepSpace(Heap::old_data_space()); | 2455 SweepSpace(heap_, heap_->old_data_space()); |
| 2388 SweepSpace(Heap::code_space()); | 2456 SweepSpace(heap_, heap_->code_space()); |
| 2389 SweepSpace(Heap::cell_space()); | 2457 SweepSpace(heap_, heap_->cell_space()); |
| 2390 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); | 2458 { GCTracer::Scope gc_scope(tracer_, GCTracer::Scope::MC_SWEEP_NEWSPACE); |
| 2391 SweepNewSpace(Heap::new_space()); | 2459 SweepNewSpace(heap_, heap_->new_space()); |
| 2392 } | 2460 } |
| 2393 SweepSpace(Heap::map_space()); | 2461 SweepSpace(heap_, heap_->map_space()); |
| 2394 | 2462 |
| 2395 Heap::IterateDirtyRegions(Heap::map_space(), | 2463 heap_->IterateDirtyRegions(heap_->map_space(), |
| 2396 &Heap::IteratePointersInDirtyMapsRegion, | 2464 &heap_->IteratePointersInDirtyMapsRegion, |
| 2397 &UpdatePointerToNewGen, | 2465 &UpdatePointerToNewGen, |
| 2398 Heap::WATERMARK_SHOULD_BE_VALID); | 2466 heap_->WATERMARK_SHOULD_BE_VALID); |
| 2399 | 2467 |
| 2400 intptr_t live_maps_size = Heap::map_space()->Size(); | 2468 intptr_t live_maps_size = heap_->map_space()->Size(); |
| 2401 int live_maps = static_cast<int>(live_maps_size / Map::kSize); | 2469 int live_maps = static_cast<int>(live_maps_size / Map::kSize); |
| 2402 ASSERT(live_map_objects_size_ == live_maps_size); | 2470 ASSERT(live_map_objects_size_ == live_maps_size); |
| 2403 | 2471 |
| 2404 if (Heap::map_space()->NeedsCompaction(live_maps)) { | 2472 if (heap_->map_space()->NeedsCompaction(live_maps)) { |
| 2405 MapCompact map_compact(live_maps); | 2473 MapCompact map_compact(heap_, live_maps); |
| 2406 | 2474 |
| 2407 map_compact.CompactMaps(); | 2475 map_compact.CompactMaps(); |
| 2408 map_compact.UpdateMapPointersInRoots(); | 2476 map_compact.UpdateMapPointersInRoots(); |
| 2409 | 2477 |
| 2410 PagedSpaces spaces; | 2478 PagedSpaces spaces; |
| 2411 for (PagedSpace* space = spaces.next(); | 2479 for (PagedSpace* space = spaces.next(); |
| 2412 space != NULL; space = spaces.next()) { | 2480 space != NULL; space = spaces.next()) { |
| 2413 if (space == Heap::map_space()) continue; | 2481 if (space == heap_->map_space()) continue; |
| 2414 map_compact.UpdateMapPointersInPagedSpace(space); | 2482 map_compact.UpdateMapPointersInPagedSpace(space); |
| 2415 } | 2483 } |
| 2416 map_compact.UpdateMapPointersInNewSpace(); | 2484 map_compact.UpdateMapPointersInNewSpace(); |
| 2417 map_compact.UpdateMapPointersInLargeObjectSpace(); | 2485 map_compact.UpdateMapPointersInLargeObjectSpace(); |
| 2418 | 2486 |
| 2419 map_compact.Finish(); | 2487 map_compact.Finish(); |
| 2420 } | 2488 } |
| 2421 } | 2489 } |
| 2422 | 2490 |
| 2423 | 2491 |
| 2424 // Iterate the live objects in a range of addresses (eg, a page or a | 2492 // Iterate the live objects in a range of addresses (eg, a page or a |
| 2425 // semispace). The live regions of the range have been linked into a list. | 2493 // semispace). The live regions of the range have been linked into a list. |
| 2426 // The first live region is [first_live_start, first_live_end), and the last | 2494 // The first live region is [first_live_start, first_live_end), and the last |
| 2427 // address in the range is top. The callback function is used to get the | 2495 // address in the range is top. The callback function is used to get the |
| 2428 // size of each live object. | 2496 // size of each live object. |
| 2429 int MarkCompactCollector::IterateLiveObjectsInRange( | 2497 int MarkCompactCollector::IterateLiveObjectsInRange( |
| 2430 Address start, | 2498 Address start, |
| 2431 Address end, | 2499 Address end, |
| 2432 HeapObjectCallback size_func) { | 2500 LiveObjectCallback size_func) { |
| 2433 int live_objects_size = 0; | 2501 int live_objects_size = 0; |
| 2434 Address current = start; | 2502 Address current = start; |
| 2435 while (current < end) { | 2503 while (current < end) { |
| 2436 uint32_t encoded_map = Memory::uint32_at(current); | 2504 uint32_t encoded_map = Memory::uint32_at(current); |
| 2437 if (encoded_map == kSingleFreeEncoding) { | 2505 if (encoded_map == kSingleFreeEncoding) { |
| 2438 current += kPointerSize; | 2506 current += kPointerSize; |
| 2439 } else if (encoded_map == kMultiFreeEncoding) { | 2507 } else if (encoded_map == kMultiFreeEncoding) { |
| 2440 current += Memory::int_at(current + kIntSize); | 2508 current += Memory::int_at(current + kIntSize); |
| 2441 } else { | 2509 } else { |
| 2442 int size = size_func(HeapObject::FromAddress(current)); | 2510 int size = (this->*size_func)(HeapObject::FromAddress(current)); |
| 2443 current += size; | 2511 current += size; |
| 2444 live_objects_size += size; | 2512 live_objects_size += size; |
| 2445 } | 2513 } |
| 2446 } | 2514 } |
| 2447 return live_objects_size; | 2515 return live_objects_size; |
| 2448 } | 2516 } |
| 2449 | 2517 |
| 2450 | 2518 |
| 2451 int MarkCompactCollector::IterateLiveObjects(NewSpace* space, | 2519 int MarkCompactCollector::IterateLiveObjects( |
| 2452 HeapObjectCallback size_f) { | 2520 NewSpace* space, LiveObjectCallback size_f) { |
| 2453 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 2521 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); |
| 2454 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); | 2522 return IterateLiveObjectsInRange(space->bottom(), space->top(), size_f); |
| 2455 } | 2523 } |
| 2456 | 2524 |
| 2457 | 2525 |
| 2458 int MarkCompactCollector::IterateLiveObjects(PagedSpace* space, | 2526 int MarkCompactCollector::IterateLiveObjects( |
| 2459 HeapObjectCallback size_f) { | 2527 PagedSpace* space, LiveObjectCallback size_f) { |
| 2460 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); | 2528 ASSERT(MARK_LIVE_OBJECTS < state_ && state_ <= RELOCATE_OBJECTS); |
| 2461 int total = 0; | 2529 int total = 0; |
| 2462 PageIterator it(space, PageIterator::PAGES_IN_USE); | 2530 PageIterator it(space, PageIterator::PAGES_IN_USE); |
| 2463 while (it.has_next()) { | 2531 while (it.has_next()) { |
| 2464 Page* p = it.next(); | 2532 Page* p = it.next(); |
| 2465 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), | 2533 total += IterateLiveObjectsInRange(p->ObjectAreaStart(), |
| 2466 p->AllocationTop(), | 2534 p->AllocationTop(), |
| 2467 size_f); | 2535 size_f); |
| 2468 } | 2536 } |
| 2469 return total; | 2537 return total; |
| 2470 } | 2538 } |
| 2471 | 2539 |
| 2472 | 2540 |
| 2473 // ------------------------------------------------------------------------- | 2541 // ------------------------------------------------------------------------- |
| 2474 // Phase 3: Update pointers | 2542 // Phase 3: Update pointers |
| 2475 | 2543 |
| 2476 // Helper class for updating pointers in HeapObjects. | 2544 // Helper class for updating pointers in HeapObjects. |
| 2477 class UpdatingVisitor: public ObjectVisitor { | 2545 class UpdatingVisitor: public ObjectVisitor { |
| 2478 public: | 2546 public: |
| 2547 explicit UpdatingVisitor(Heap* heap) : heap_(heap) {} |
| 2548 |
| 2479 void VisitPointer(Object** p) { | 2549 void VisitPointer(Object** p) { |
| 2480 UpdatePointer(p); | 2550 UpdatePointer(p); |
| 2481 } | 2551 } |
| 2482 | 2552 |
| 2483 void VisitPointers(Object** start, Object** end) { | 2553 void VisitPointers(Object** start, Object** end) { |
| 2484 // Mark all HeapObject pointers in [start, end) | 2554 // Mark all HeapObject pointers in [start, end) |
| 2485 for (Object** p = start; p < end; p++) UpdatePointer(p); | 2555 for (Object** p = start; p < end; p++) UpdatePointer(p); |
| 2486 } | 2556 } |
| 2487 | 2557 |
| 2488 void VisitCodeTarget(RelocInfo* rinfo) { | 2558 void VisitCodeTarget(RelocInfo* rinfo) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 2504 reinterpret_cast<Code*>(target)->instruction_start()); | 2574 reinterpret_cast<Code*>(target)->instruction_start()); |
| 2505 } | 2575 } |
| 2506 | 2576 |
| 2507 private: | 2577 private: |
| 2508 void UpdatePointer(Object** p) { | 2578 void UpdatePointer(Object** p) { |
| 2509 if (!(*p)->IsHeapObject()) return; | 2579 if (!(*p)->IsHeapObject()) return; |
| 2510 | 2580 |
| 2511 HeapObject* obj = HeapObject::cast(*p); | 2581 HeapObject* obj = HeapObject::cast(*p); |
| 2512 Address old_addr = obj->address(); | 2582 Address old_addr = obj->address(); |
| 2513 Address new_addr; | 2583 Address new_addr; |
| 2514 ASSERT(!Heap::InFromSpace(obj)); | 2584 ASSERT(!heap_->InFromSpace(obj)); |
| 2515 | 2585 |
| 2516 if (Heap::new_space()->Contains(obj)) { | 2586 if (heap_->new_space()->Contains(obj)) { |
| 2517 Address forwarding_pointer_addr = | 2587 Address forwarding_pointer_addr = |
| 2518 Heap::new_space()->FromSpaceLow() + | 2588 heap_->new_space()->FromSpaceLow() + |
| 2519 Heap::new_space()->ToSpaceOffsetForAddress(old_addr); | 2589 heap_->new_space()->ToSpaceOffsetForAddress(old_addr); |
| 2520 new_addr = Memory::Address_at(forwarding_pointer_addr); | 2590 new_addr = Memory::Address_at(forwarding_pointer_addr); |
| 2521 | 2591 |
| 2522 #ifdef DEBUG | 2592 #ifdef DEBUG |
| 2523 ASSERT(Heap::old_pointer_space()->Contains(new_addr) || | 2593 ASSERT(heap_->old_pointer_space()->Contains(new_addr) || |
| 2524 Heap::old_data_space()->Contains(new_addr) || | 2594 heap_->old_data_space()->Contains(new_addr) || |
| 2525 Heap::new_space()->FromSpaceContains(new_addr) || | 2595 heap_->new_space()->FromSpaceContains(new_addr) || |
| 2526 Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr))); | 2596 heap_->lo_space()->Contains(HeapObject::FromAddress(new_addr))); |
| 2527 | 2597 |
| 2528 if (Heap::new_space()->FromSpaceContains(new_addr)) { | 2598 if (heap_->new_space()->FromSpaceContains(new_addr)) { |
| 2529 ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= | 2599 ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <= |
| 2530 Heap::new_space()->ToSpaceOffsetForAddress(old_addr)); | 2600 heap_->new_space()->ToSpaceOffsetForAddress(old_addr)); |
| 2531 } | 2601 } |
| 2532 #endif | 2602 #endif |
| 2533 | 2603 |
| 2534 } else if (Heap::lo_space()->Contains(obj)) { | 2604 } else if (heap_->lo_space()->Contains(obj)) { |
| 2535 // Don't move objects in the large object space. | 2605 // Don't move objects in the large object space. |
| 2536 return; | 2606 return; |
| 2537 | 2607 |
| 2538 } else { | 2608 } else { |
| 2539 #ifdef DEBUG | 2609 #ifdef DEBUG |
| 2540 PagedSpaces spaces; | 2610 PagedSpaces spaces; |
| 2541 PagedSpace* original_space = spaces.next(); | 2611 PagedSpace* original_space = spaces.next(); |
| 2542 while (original_space != NULL) { | 2612 while (original_space != NULL) { |
| 2543 if (original_space->Contains(obj)) break; | 2613 if (original_space->Contains(obj)) break; |
| 2544 original_space = spaces.next(); | 2614 original_space = spaces.next(); |
| 2545 } | 2615 } |
| 2546 ASSERT(original_space != NULL); | 2616 ASSERT(original_space != NULL); |
| 2547 #endif | 2617 #endif |
| 2548 new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj); | 2618 new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj); |
| 2549 ASSERT(original_space->Contains(new_addr)); | 2619 ASSERT(original_space->Contains(new_addr)); |
| 2550 ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <= | 2620 ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <= |
| 2551 original_space->MCSpaceOffsetForAddress(old_addr)); | 2621 original_space->MCSpaceOffsetForAddress(old_addr)); |
| 2552 } | 2622 } |
| 2553 | 2623 |
| 2554 *p = HeapObject::FromAddress(new_addr); | 2624 *p = HeapObject::FromAddress(new_addr); |
| 2555 | 2625 |
| 2556 #ifdef DEBUG | 2626 #ifdef DEBUG |
| 2557 if (FLAG_gc_verbose) { | 2627 if (FLAG_gc_verbose) { |
| 2558 PrintF("update %p : %p -> %p\n", | 2628 PrintF("update %p : %p -> %p\n", |
| 2559 reinterpret_cast<Address>(p), old_addr, new_addr); | 2629 reinterpret_cast<Address>(p), old_addr, new_addr); |
| 2560 } | 2630 } |
| 2561 #endif | 2631 #endif |
| 2562 } | 2632 } |
| 2633 |
| 2634 Heap* heap_; |
| 2563 }; | 2635 }; |
| 2564 | 2636 |
| 2565 | 2637 |
| 2566 void MarkCompactCollector::UpdatePointers() { | 2638 void MarkCompactCollector::UpdatePointers() { |
| 2567 #ifdef DEBUG | 2639 #ifdef DEBUG |
| 2568 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); | 2640 ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); |
| 2569 state_ = UPDATE_POINTERS; | 2641 state_ = UPDATE_POINTERS; |
| 2570 #endif | 2642 #endif |
| 2571 UpdatingVisitor updating_visitor; | 2643 UpdatingVisitor updating_visitor(heap_); |
| 2572 RuntimeProfiler::UpdateSamplesAfterCompact(&updating_visitor); | 2644 heap_->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( |
| 2573 Heap::IterateRoots(&updating_visitor, VISIT_ONLY_STRONG); | 2645 &updating_visitor); |
| 2574 GlobalHandles::IterateWeakRoots(&updating_visitor); | 2646 heap_->IterateRoots(&updating_visitor, VISIT_ONLY_STRONG); |
| 2647 heap_->isolate()->global_handles()->IterateWeakRoots(&updating_visitor); |
| 2575 | 2648 |
| 2576 // Update the pointer to the head of the weak list of global contexts. | 2649 // Update the pointer to the head of the weak list of global contexts. |
| 2577 updating_visitor.VisitPointer(&Heap::global_contexts_list_); | 2650 updating_visitor.VisitPointer(&heap_->global_contexts_list_); |
| 2578 | 2651 |
| 2579 LiveObjectList::IterateElements(&updating_visitor); | 2652 LiveObjectList::IterateElements(&updating_visitor); |
| 2580 | 2653 |
| 2581 int live_maps_size = IterateLiveObjects(Heap::map_space(), | 2654 int live_maps_size = IterateLiveObjects( |
| 2582 &UpdatePointersInOldObject); | 2655 heap_->map_space(), &MarkCompactCollector::UpdatePointersInOldObject); |
| 2583 int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(), | 2656 int live_pointer_olds_size = IterateLiveObjects( |
| 2584 &UpdatePointersInOldObject); | 2657 heap_->old_pointer_space(), |
| 2585 int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(), | 2658 &MarkCompactCollector::UpdatePointersInOldObject); |
| 2586 &UpdatePointersInOldObject); | 2659 int live_data_olds_size = IterateLiveObjects( |
| 2587 int live_codes_size = IterateLiveObjects(Heap::code_space(), | 2660 heap_->old_data_space(), |
| 2588 &UpdatePointersInOldObject); | 2661 &MarkCompactCollector::UpdatePointersInOldObject); |
| 2589 int live_cells_size = IterateLiveObjects(Heap::cell_space(), | 2662 int live_codes_size = IterateLiveObjects( |
| 2590 &UpdatePointersInOldObject); | 2663 heap_->code_space(), &MarkCompactCollector::UpdatePointersInOldObject); |
| 2591 int live_news_size = IterateLiveObjects(Heap::new_space(), | 2664 int live_cells_size = IterateLiveObjects( |
| 2592 &UpdatePointersInNewObject); | 2665 heap_->cell_space(), &MarkCompactCollector::UpdatePointersInOldObject); |
| 2666 int live_news_size = IterateLiveObjects( |
| 2667 heap_->new_space(), &MarkCompactCollector::UpdatePointersInNewObject); |
| 2593 | 2668 |
| 2594 // Large objects do not move, the map word can be updated directly. | 2669 // Large objects do not move, the map word can be updated directly. |
| 2595 LargeObjectIterator it(Heap::lo_space()); | 2670 LargeObjectIterator it(heap_->lo_space()); |
| 2596 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { | 2671 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { |
| 2597 UpdatePointersInNewObject(obj); | 2672 UpdatePointersInNewObject(obj); |
| 2598 } | 2673 } |
| 2599 | 2674 |
| 2600 USE(live_maps_size); | 2675 USE(live_maps_size); |
| 2601 USE(live_pointer_olds_size); | 2676 USE(live_pointer_olds_size); |
| 2602 USE(live_data_olds_size); | 2677 USE(live_data_olds_size); |
| 2603 USE(live_codes_size); | 2678 USE(live_codes_size); |
| 2604 USE(live_cells_size); | 2679 USE(live_cells_size); |
| 2605 USE(live_news_size); | 2680 USE(live_news_size); |
| 2606 ASSERT(live_maps_size == live_map_objects_size_); | 2681 ASSERT(live_maps_size == live_map_objects_size_); |
| 2607 ASSERT(live_data_olds_size == live_old_data_objects_size_); | 2682 ASSERT(live_data_olds_size == live_old_data_objects_size_); |
| 2608 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); | 2683 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); |
| 2609 ASSERT(live_codes_size == live_code_objects_size_); | 2684 ASSERT(live_codes_size == live_code_objects_size_); |
| 2610 ASSERT(live_cells_size == live_cell_objects_size_); | 2685 ASSERT(live_cells_size == live_cell_objects_size_); |
| 2611 ASSERT(live_news_size == live_young_objects_size_); | 2686 ASSERT(live_news_size == live_young_objects_size_); |
| 2612 } | 2687 } |
| 2613 | 2688 |
| 2614 | 2689 |
| 2615 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { | 2690 int MarkCompactCollector::UpdatePointersInNewObject(HeapObject* obj) { |
| 2616 // Keep old map pointers | 2691 // Keep old map pointers |
| 2617 Map* old_map = obj->map(); | 2692 Map* old_map = obj->map(); |
| 2618 ASSERT(old_map->IsHeapObject()); | 2693 ASSERT(old_map->IsHeapObject()); |
| 2619 | 2694 |
| 2620 Address forwarded = GetForwardingAddressInOldSpace(old_map); | 2695 Address forwarded = GetForwardingAddressInOldSpace(old_map); |
| 2621 | 2696 |
| 2622 ASSERT(Heap::map_space()->Contains(old_map)); | 2697 ASSERT(heap_->map_space()->Contains(old_map)); |
| 2623 ASSERT(Heap::map_space()->Contains(forwarded)); | 2698 ASSERT(heap_->map_space()->Contains(forwarded)); |
| 2624 #ifdef DEBUG | 2699 #ifdef DEBUG |
| 2625 if (FLAG_gc_verbose) { | 2700 if (FLAG_gc_verbose) { |
| 2626 PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(), | 2701 PrintF("update %p : %p -> %p\n", obj->address(), old_map->address(), |
| 2627 forwarded); | 2702 forwarded); |
| 2628 } | 2703 } |
| 2629 #endif | 2704 #endif |
| 2630 // Update the map pointer. | 2705 // Update the map pointer. |
| 2631 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded))); | 2706 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(forwarded))); |
| 2632 | 2707 |
| 2633 // We have to compute the object size relying on the old map because | 2708 // We have to compute the object size relying on the old map because |
| 2634 // map objects are not relocated yet. | 2709 // map objects are not relocated yet. |
| 2635 int obj_size = obj->SizeFromMap(old_map); | 2710 int obj_size = obj->SizeFromMap(old_map); |
| 2636 | 2711 |
| 2637 // Update pointers in the object body. | 2712 // Update pointers in the object body. |
| 2638 UpdatingVisitor updating_visitor; | 2713 UpdatingVisitor updating_visitor(heap_); |
| 2639 obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor); | 2714 obj->IterateBody(old_map->instance_type(), obj_size, &updating_visitor); |
| 2640 return obj_size; | 2715 return obj_size; |
| 2641 } | 2716 } |
| 2642 | 2717 |
| 2643 | 2718 |
| 2644 int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) { | 2719 int MarkCompactCollector::UpdatePointersInOldObject(HeapObject* obj) { |
| 2645 // Decode the map pointer. | 2720 // Decode the map pointer. |
| 2646 MapWord encoding = obj->map_word(); | 2721 MapWord encoding = obj->map_word(); |
| 2647 Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); | 2722 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); |
| 2648 ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr))); | 2723 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); |
| 2649 | 2724 |
| 2650 // At this point, the first word of map_addr is also encoded, cannot | 2725 // At this point, the first word of map_addr is also encoded, cannot |
| 2651 // cast it to Map* using Map::cast. | 2726 // cast it to Map* using Map::cast. |
| 2652 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)); | 2727 Map* map = reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr)); |
| 2653 int obj_size = obj->SizeFromMap(map); | 2728 int obj_size = obj->SizeFromMap(map); |
| 2654 InstanceType type = map->instance_type(); | 2729 InstanceType type = map->instance_type(); |
| 2655 | 2730 |
| 2656 // Update map pointer. | 2731 // Update map pointer. |
| 2657 Address new_map_addr = GetForwardingAddressInOldSpace(map); | 2732 Address new_map_addr = GetForwardingAddressInOldSpace(map); |
| 2658 int offset = encoding.DecodeOffset(); | 2733 int offset = encoding.DecodeOffset(); |
| 2659 obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset)); | 2734 obj->set_map_word(MapWord::EncodeAddress(new_map_addr, offset)); |
| 2660 | 2735 |
| 2661 #ifdef DEBUG | 2736 #ifdef DEBUG |
| 2662 if (FLAG_gc_verbose) { | 2737 if (FLAG_gc_verbose) { |
| 2663 PrintF("update %p : %p -> %p\n", obj->address(), | 2738 PrintF("update %p : %p -> %p\n", obj->address(), |
| 2664 map_addr, new_map_addr); | 2739 map_addr, new_map_addr); |
| 2665 } | 2740 } |
| 2666 #endif | 2741 #endif |
| 2667 | 2742 |
| 2668 // Update pointers in the object body. | 2743 // Update pointers in the object body. |
| 2669 UpdatingVisitor updating_visitor; | 2744 UpdatingVisitor updating_visitor(heap_); |
| 2670 obj->IterateBody(type, obj_size, &updating_visitor); | 2745 obj->IterateBody(type, obj_size, &updating_visitor); |
| 2671 return obj_size; | 2746 return obj_size; |
| 2672 } | 2747 } |
| 2673 | 2748 |
| 2674 | 2749 |
| 2675 Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) { | 2750 Address MarkCompactCollector::GetForwardingAddressInOldSpace(HeapObject* obj) { |
| 2676 // Object should either in old or map space. | 2751 // Object should either in old or map space. |
| 2677 MapWord encoding = obj->map_word(); | 2752 MapWord encoding = obj->map_word(); |
| 2678 | 2753 |
| 2679 // Offset to the first live object's forwarding address. | 2754 // Offset to the first live object's forwarding address. |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2715 // ------------------------------------------------------------------------- | 2790 // ------------------------------------------------------------------------- |
| 2716 // Phase 4: Relocate objects | 2791 // Phase 4: Relocate objects |
| 2717 | 2792 |
| 2718 void MarkCompactCollector::RelocateObjects() { | 2793 void MarkCompactCollector::RelocateObjects() { |
| 2719 #ifdef DEBUG | 2794 #ifdef DEBUG |
| 2720 ASSERT(state_ == UPDATE_POINTERS); | 2795 ASSERT(state_ == UPDATE_POINTERS); |
| 2721 state_ = RELOCATE_OBJECTS; | 2796 state_ = RELOCATE_OBJECTS; |
| 2722 #endif | 2797 #endif |
| 2723 // Relocates objects, always relocate map objects first. Relocating | 2798 // Relocates objects, always relocate map objects first. Relocating |
| 2724 // objects in other space relies on map objects to get object size. | 2799 // objects in other space relies on map objects to get object size. |
| 2725 int live_maps_size = IterateLiveObjects(Heap::map_space(), | 2800 int live_maps_size = IterateLiveObjects( |
| 2726 &RelocateMapObject); | 2801 heap_->map_space(), &MarkCompactCollector::RelocateMapObject); |
| 2727 int live_pointer_olds_size = IterateLiveObjects(Heap::old_pointer_space(), | 2802 int live_pointer_olds_size = IterateLiveObjects( |
| 2728 &RelocateOldPointerObject); | 2803 heap_->old_pointer_space(), |
| 2729 int live_data_olds_size = IterateLiveObjects(Heap::old_data_space(), | 2804 &MarkCompactCollector::RelocateOldPointerObject); |
| 2730 &RelocateOldDataObject); | 2805 int live_data_olds_size = IterateLiveObjects( |
| 2731 int live_codes_size = IterateLiveObjects(Heap::code_space(), | 2806 heap_->old_data_space(), &MarkCompactCollector::RelocateOldDataObject); |
| 2732 &RelocateCodeObject); | 2807 int live_codes_size = IterateLiveObjects( |
| 2733 int live_cells_size = IterateLiveObjects(Heap::cell_space(), | 2808 heap_->code_space(), &MarkCompactCollector::RelocateCodeObject); |
| 2734 &RelocateCellObject); | 2809 int live_cells_size = IterateLiveObjects( |
| 2735 int live_news_size = IterateLiveObjects(Heap::new_space(), | 2810 heap_->cell_space(), &MarkCompactCollector::RelocateCellObject); |
| 2736 &RelocateNewObject); | 2811 int live_news_size = IterateLiveObjects( |
| 2812 heap_->new_space(), &MarkCompactCollector::RelocateNewObject); |
| 2737 | 2813 |
| 2738 USE(live_maps_size); | 2814 USE(live_maps_size); |
| 2739 USE(live_pointer_olds_size); | 2815 USE(live_pointer_olds_size); |
| 2740 USE(live_data_olds_size); | 2816 USE(live_data_olds_size); |
| 2741 USE(live_codes_size); | 2817 USE(live_codes_size); |
| 2742 USE(live_cells_size); | 2818 USE(live_cells_size); |
| 2743 USE(live_news_size); | 2819 USE(live_news_size); |
| 2744 ASSERT(live_maps_size == live_map_objects_size_); | 2820 ASSERT(live_maps_size == live_map_objects_size_); |
| 2745 ASSERT(live_data_olds_size == live_old_data_objects_size_); | 2821 ASSERT(live_data_olds_size == live_old_data_objects_size_); |
| 2746 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); | 2822 ASSERT(live_pointer_olds_size == live_old_pointer_objects_size_); |
| 2747 ASSERT(live_codes_size == live_code_objects_size_); | 2823 ASSERT(live_codes_size == live_code_objects_size_); |
| 2748 ASSERT(live_cells_size == live_cell_objects_size_); | 2824 ASSERT(live_cells_size == live_cell_objects_size_); |
| 2749 ASSERT(live_news_size == live_young_objects_size_); | 2825 ASSERT(live_news_size == live_young_objects_size_); |
| 2750 | 2826 |
| 2751 // Flip from and to spaces | 2827 // Flip from and to spaces |
| 2752 Heap::new_space()->Flip(); | 2828 heap_->new_space()->Flip(); |
| 2753 | 2829 |
| 2754 Heap::new_space()->MCCommitRelocationInfo(); | 2830 heap_->new_space()->MCCommitRelocationInfo(); |
| 2755 | 2831 |
| 2756 // Set age_mark to bottom in to space | 2832 // Set age_mark to bottom in to space |
| 2757 Address mark = Heap::new_space()->bottom(); | 2833 Address mark = heap_->new_space()->bottom(); |
| 2758 Heap::new_space()->set_age_mark(mark); | 2834 heap_->new_space()->set_age_mark(mark); |
| 2759 | 2835 |
| 2760 PagedSpaces spaces; | 2836 PagedSpaces spaces; |
| 2761 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) | 2837 for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next()) |
| 2762 space->MCCommitRelocationInfo(); | 2838 space->MCCommitRelocationInfo(); |
| 2763 | 2839 |
| 2764 Heap::CheckNewSpaceExpansionCriteria(); | 2840 heap_->CheckNewSpaceExpansionCriteria(); |
| 2765 Heap::IncrementYoungSurvivorsCounter(live_news_size); | 2841 heap_->IncrementYoungSurvivorsCounter(live_news_size); |
| 2766 } | 2842 } |
| 2767 | 2843 |
| 2768 | 2844 |
| 2769 int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { | 2845 int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { |
| 2770 // Recover map pointer. | 2846 // Recover map pointer. |
| 2771 MapWord encoding = obj->map_word(); | 2847 MapWord encoding = obj->map_word(); |
| 2772 Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); | 2848 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); |
| 2773 ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr))); | 2849 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); |
| 2774 | 2850 |
| 2775 // Get forwarding address before resetting map pointer | 2851 // Get forwarding address before resetting map pointer |
| 2776 Address new_addr = GetForwardingAddressInOldSpace(obj); | 2852 Address new_addr = GetForwardingAddressInOldSpace(obj); |
| 2777 | 2853 |
| 2778 // Reset map pointer. The meta map object may not be copied yet so | 2854 // Reset map pointer. The meta map object may not be copied yet so |
| 2779 // Map::cast does not yet work. | 2855 // Map::cast does not yet work. |
| 2780 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr))); | 2856 obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr))); |
| 2781 | 2857 |
| 2782 Address old_addr = obj->address(); | 2858 Address old_addr = obj->address(); |
| 2783 | 2859 |
| 2784 if (new_addr != old_addr) { | 2860 if (new_addr != old_addr) { |
| 2785 // Move contents. | 2861 // Move contents. |
| 2786 Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, | 2862 heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, |
| 2787 old_addr, | 2863 old_addr, |
| 2788 Map::kSize); | 2864 Map::kSize); |
| 2789 } | 2865 } |
| 2790 | 2866 |
| 2791 #ifdef DEBUG | 2867 #ifdef DEBUG |
| 2792 if (FLAG_gc_verbose) { | 2868 if (FLAG_gc_verbose) { |
| 2793 PrintF("relocate %p -> %p\n", old_addr, new_addr); | 2869 PrintF("relocate %p -> %p\n", old_addr, new_addr); |
| 2794 } | 2870 } |
| 2795 #endif | 2871 #endif |
| 2796 | 2872 |
| 2797 return Map::kSize; | 2873 return Map::kSize; |
| 2798 } | 2874 } |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2822 #endif | 2898 #endif |
| 2823 | 2899 |
| 2824 return obj_size; | 2900 return obj_size; |
| 2825 } | 2901 } |
| 2826 | 2902 |
| 2827 | 2903 |
| 2828 int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj, | 2904 int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj, |
| 2829 PagedSpace* space) { | 2905 PagedSpace* space) { |
| 2830 // Recover map pointer. | 2906 // Recover map pointer. |
| 2831 MapWord encoding = obj->map_word(); | 2907 MapWord encoding = obj->map_word(); |
| 2832 Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); | 2908 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); |
| 2833 ASSERT(Heap::map_space()->Contains(map_addr)); | 2909 ASSERT(heap_->map_space()->Contains(map_addr)); |
| 2834 | 2910 |
| 2835 // Get forwarding address before resetting map pointer. | 2911 // Get forwarding address before resetting map pointer. |
| 2836 Address new_addr = GetForwardingAddressInOldSpace(obj); | 2912 Address new_addr = GetForwardingAddressInOldSpace(obj); |
| 2837 | 2913 |
| 2838 // Reset the map pointer. | 2914 // Reset the map pointer. |
| 2839 int obj_size = RestoreMap(obj, space, new_addr, map_addr); | 2915 int obj_size = RestoreMap(obj, space, new_addr, map_addr); |
| 2840 | 2916 |
| 2841 Address old_addr = obj->address(); | 2917 Address old_addr = obj->address(); |
| 2842 | 2918 |
| 2843 if (new_addr != old_addr) { | 2919 if (new_addr != old_addr) { |
| 2844 // Move contents. | 2920 // Move contents. |
| 2845 if (space == Heap::old_data_space()) { | 2921 if (space == heap_->old_data_space()) { |
| 2846 Heap::MoveBlock(new_addr, old_addr, obj_size); | 2922 heap_->MoveBlock(new_addr, old_addr, obj_size); |
| 2847 } else { | 2923 } else { |
| 2848 Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, | 2924 heap_->MoveBlockToOldSpaceAndUpdateRegionMarks(new_addr, |
| 2849 old_addr, | 2925 old_addr, |
| 2850 obj_size); | 2926 obj_size); |
| 2851 } | 2927 } |
| 2852 } | 2928 } |
| 2853 | 2929 |
| 2854 ASSERT(!HeapObject::FromAddress(new_addr)->IsCode()); | 2930 ASSERT(!HeapObject::FromAddress(new_addr)->IsCode()); |
| 2855 | 2931 |
| 2856 HeapObject* copied_to = HeapObject::FromAddress(new_addr); | 2932 HeapObject* copied_to = HeapObject::FromAddress(new_addr); |
| 2857 if (copied_to->IsSharedFunctionInfo()) { | 2933 if (copied_to->IsSharedFunctionInfo()) { |
| 2858 PROFILE(SharedFunctionInfoMoveEvent(old_addr, new_addr)); | 2934 PROFILE(heap_->isolate(), |
| 2935 SharedFunctionInfoMoveEvent(old_addr, new_addr)); |
| 2859 } | 2936 } |
| 2860 HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr)); | 2937 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); |
| 2861 | 2938 |
| 2862 return obj_size; | 2939 return obj_size; |
| 2863 } | 2940 } |
| 2864 | 2941 |
| 2865 | 2942 |
| 2866 int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) { | 2943 int MarkCompactCollector::RelocateOldPointerObject(HeapObject* obj) { |
| 2867 return RelocateOldNonCodeObject(obj, Heap::old_pointer_space()); | 2944 return RelocateOldNonCodeObject(obj, heap_->old_pointer_space()); |
| 2868 } | 2945 } |
| 2869 | 2946 |
| 2870 | 2947 |
| 2871 int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) { | 2948 int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) { |
| 2872 return RelocateOldNonCodeObject(obj, Heap::old_data_space()); | 2949 return RelocateOldNonCodeObject(obj, heap_->old_data_space()); |
| 2873 } | 2950 } |
| 2874 | 2951 |
| 2875 | 2952 |
| 2876 int MarkCompactCollector::RelocateCellObject(HeapObject* obj) { | 2953 int MarkCompactCollector::RelocateCellObject(HeapObject* obj) { |
| 2877 return RelocateOldNonCodeObject(obj, Heap::cell_space()); | 2954 return RelocateOldNonCodeObject(obj, heap_->cell_space()); |
| 2878 } | 2955 } |
| 2879 | 2956 |
| 2880 | 2957 |
| 2881 int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) { | 2958 int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) { |
| 2882 // Recover map pointer. | 2959 // Recover map pointer. |
| 2883 MapWord encoding = obj->map_word(); | 2960 MapWord encoding = obj->map_word(); |
| 2884 Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); | 2961 Address map_addr = encoding.DecodeMapAddress(heap_->map_space()); |
| 2885 ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr))); | 2962 ASSERT(heap_->map_space()->Contains(HeapObject::FromAddress(map_addr))); |
| 2886 | 2963 |
| 2887 // Get forwarding address before resetting map pointer | 2964 // Get forwarding address before resetting map pointer |
| 2888 Address new_addr = GetForwardingAddressInOldSpace(obj); | 2965 Address new_addr = GetForwardingAddressInOldSpace(obj); |
| 2889 | 2966 |
| 2890 // Reset the map pointer. | 2967 // Reset the map pointer. |
| 2891 int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr); | 2968 int obj_size = RestoreMap(obj, heap_->code_space(), new_addr, map_addr); |
| 2892 | 2969 |
| 2893 Address old_addr = obj->address(); | 2970 Address old_addr = obj->address(); |
| 2894 | 2971 |
| 2895 if (new_addr != old_addr) { | 2972 if (new_addr != old_addr) { |
| 2896 // Move contents. | 2973 // Move contents. |
| 2897 Heap::MoveBlock(new_addr, old_addr, obj_size); | 2974 heap_->MoveBlock(new_addr, old_addr, obj_size); |
| 2898 } | 2975 } |
| 2899 | 2976 |
| 2900 HeapObject* copied_to = HeapObject::FromAddress(new_addr); | 2977 HeapObject* copied_to = HeapObject::FromAddress(new_addr); |
| 2901 if (copied_to->IsCode()) { | 2978 if (copied_to->IsCode()) { |
| 2902 // May also update inline cache target. | 2979 // May also update inline cache target. |
| 2903 Code::cast(copied_to)->Relocate(new_addr - old_addr); | 2980 Code::cast(copied_to)->Relocate(new_addr - old_addr); |
| 2904 // Notify the logger that compiled code has moved. | 2981 // Notify the logger that compiled code has moved. |
| 2905 PROFILE(CodeMoveEvent(old_addr, new_addr)); | 2982 PROFILE(heap_->isolate(), CodeMoveEvent(old_addr, new_addr)); |
| 2906 } | 2983 } |
| 2907 HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr)); | 2984 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); |
| 2908 | 2985 |
| 2909 return obj_size; | 2986 return obj_size; |
| 2910 } | 2987 } |
| 2911 | 2988 |
| 2912 | 2989 |
| 2913 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { | 2990 int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { |
| 2914 int obj_size = obj->Size(); | 2991 int obj_size = obj->Size(); |
| 2915 | 2992 |
| 2916 // Get forwarding address | 2993 // Get forwarding address |
| 2917 Address old_addr = obj->address(); | 2994 Address old_addr = obj->address(); |
| 2918 int offset = Heap::new_space()->ToSpaceOffsetForAddress(old_addr); | 2995 int offset = heap_->new_space()->ToSpaceOffsetForAddress(old_addr); |
| 2919 | 2996 |
| 2920 Address new_addr = | 2997 Address new_addr = |
| 2921 Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset); | 2998 Memory::Address_at(heap_->new_space()->FromSpaceLow() + offset); |
| 2922 | 2999 |
| 2923 #ifdef DEBUG | 3000 #ifdef DEBUG |
| 2924 if (Heap::new_space()->FromSpaceContains(new_addr)) { | 3001 if (heap_->new_space()->FromSpaceContains(new_addr)) { |
| 2925 ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= | 3002 ASSERT(heap_->new_space()->FromSpaceOffsetForAddress(new_addr) <= |
| 2926 Heap::new_space()->ToSpaceOffsetForAddress(old_addr)); | 3003 heap_->new_space()->ToSpaceOffsetForAddress(old_addr)); |
| 2927 } else { | 3004 } else { |
| 2928 ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() || | 3005 ASSERT(heap_->TargetSpace(obj) == heap_->old_pointer_space() || |
| 2929 Heap::TargetSpace(obj) == Heap::old_data_space()); | 3006 heap_->TargetSpace(obj) == heap_->old_data_space()); |
| 2930 } | 3007 } |
| 2931 #endif | 3008 #endif |
| 2932 | 3009 |
| 2933 // New and old addresses cannot overlap. | 3010 // New and old addresses cannot overlap. |
| 2934 if (Heap::InNewSpace(HeapObject::FromAddress(new_addr))) { | 3011 if (heap_->InNewSpace(HeapObject::FromAddress(new_addr))) { |
| 2935 Heap::CopyBlock(new_addr, old_addr, obj_size); | 3012 heap_->CopyBlock(new_addr, old_addr, obj_size); |
| 2936 } else { | 3013 } else { |
| 2937 Heap::CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr, | 3014 heap_->CopyBlockToOldSpaceAndUpdateRegionMarks(new_addr, |
| 2938 old_addr, | 3015 old_addr, |
| 2939 obj_size); | 3016 obj_size); |
| 2940 } | 3017 } |
| 2941 | 3018 |
| 2942 #ifdef DEBUG | 3019 #ifdef DEBUG |
| 2943 if (FLAG_gc_verbose) { | 3020 if (FLAG_gc_verbose) { |
| 2944 PrintF("relocate %p -> %p\n", old_addr, new_addr); | 3021 PrintF("relocate %p -> %p\n", old_addr, new_addr); |
| 2945 } | 3022 } |
| 2946 #endif | 3023 #endif |
| 2947 | 3024 |
| 2948 HeapObject* copied_to = HeapObject::FromAddress(new_addr); | 3025 HeapObject* copied_to = HeapObject::FromAddress(new_addr); |
| 2949 if (copied_to->IsSharedFunctionInfo()) { | 3026 if (copied_to->IsSharedFunctionInfo()) { |
| 2950 PROFILE(SharedFunctionInfoMoveEvent(old_addr, new_addr)); | 3027 PROFILE(heap_->isolate(), |
| 3028 SharedFunctionInfoMoveEvent(old_addr, new_addr)); |
| 2951 } | 3029 } |
| 2952 HEAP_PROFILE(ObjectMoveEvent(old_addr, new_addr)); | 3030 HEAP_PROFILE(heap_, ObjectMoveEvent(old_addr, new_addr)); |
| 2953 | 3031 |
| 2954 return obj_size; | 3032 return obj_size; |
| 2955 } | 3033 } |
| 2956 | 3034 |
| 2957 | 3035 |
| 3036 void MarkCompactCollector::EnableCodeFlushing(bool enable) { |
| 3037 if (enable) { |
| 3038 if (code_flusher_ != NULL) return; |
| 3039 code_flusher_ = new CodeFlusher(heap_->isolate()); |
| 3040 } else { |
| 3041 if (code_flusher_ == NULL) return; |
| 3042 delete code_flusher_; |
| 3043 code_flusher_ = NULL; |
| 3044 } |
| 3045 } |
| 3046 |
| 3047 |
| 2958 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { | 3048 void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) { |
| 2959 #ifdef ENABLE_GDB_JIT_INTERFACE | 3049 #ifdef ENABLE_GDB_JIT_INTERFACE |
| 2960 if (obj->IsCode()) { | 3050 if (obj->IsCode()) { |
| 2961 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); | 3051 GDBJITInterface::RemoveCode(reinterpret_cast<Code*>(obj)); |
| 2962 } | 3052 } |
| 2963 #endif | 3053 #endif |
| 2964 #ifdef ENABLE_LOGGING_AND_PROFILING | 3054 #ifdef ENABLE_LOGGING_AND_PROFILING |
| 2965 if (obj->IsCode()) { | 3055 if (obj->IsCode()) { |
| 2966 PROFILE(CodeDeleteEvent(obj->address())); | 3056 PROFILE(ISOLATE, CodeDeleteEvent(obj->address())); |
| 2967 } | 3057 } |
| 2968 #endif | 3058 #endif |
| 2969 } | 3059 } |
| 2970 | 3060 |
| 2971 | 3061 |
| 2972 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) { | 3062 int MarkCompactCollector::SizeOfMarkedObject(HeapObject* obj) { |
| 2973 MapWord map_word = obj->map_word(); | 3063 MapWord map_word = obj->map_word(); |
| 2974 map_word.ClearMark(); | 3064 map_word.ClearMark(); |
| 2975 return obj->SizeFromMap(map_word.ToMap()); | 3065 return obj->SizeFromMap(map_word.ToMap()); |
| 2976 } | 3066 } |
| 2977 | 3067 |
| 2978 | 3068 |
| 2979 void MarkCompactCollector::Initialize() { | 3069 void MarkCompactCollector::Initialize() { |
| 2980 StaticPointersToNewGenUpdatingVisitor::Initialize(); | 3070 StaticPointersToNewGenUpdatingVisitor::Initialize(); |
| 2981 StaticMarkingVisitor::Initialize(); | 3071 StaticMarkingVisitor::Initialize(); |
| 2982 } | 3072 } |
| 2983 | 3073 |
| 2984 | 3074 |
| 2985 } } // namespace v8::internal | 3075 } } // namespace v8::internal |
| OLD | NEW |