| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/context-slot-cache.h" | 9 #include "src/ast/context-slot-cache.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 89 // Will be 4 * reserved_semispace_size_ to ensure that young | 89 // Will be 4 * reserved_semispace_size_ to ensure that young |
| 90 // generation can be aligned to its size. | 90 // generation can be aligned to its size. |
| 91 maximum_committed_(0), | 91 maximum_committed_(0), |
| 92 survived_since_last_expansion_(0), | 92 survived_since_last_expansion_(0), |
| 93 survived_last_scavenge_(0), | 93 survived_last_scavenge_(0), |
| 94 always_allocate_scope_count_(0), | 94 always_allocate_scope_count_(0), |
| 95 memory_pressure_level_(MemoryPressureLevel::kNone), | 95 memory_pressure_level_(MemoryPressureLevel::kNone), |
| 96 contexts_disposed_(0), | 96 contexts_disposed_(0), |
| 97 number_of_disposed_maps_(0), | 97 number_of_disposed_maps_(0), |
| 98 global_ic_age_(0), | 98 global_ic_age_(0), |
| 99 new_space_(this), | 99 new_space_(nullptr), |
| 100 old_space_(NULL), | 100 old_space_(NULL), |
| 101 code_space_(NULL), | 101 code_space_(NULL), |
| 102 map_space_(NULL), | 102 map_space_(NULL), |
| 103 lo_space_(NULL), | 103 lo_space_(NULL), |
| 104 gc_state_(NOT_IN_GC), | 104 gc_state_(NOT_IN_GC), |
| 105 gc_post_processing_depth_(0), | 105 gc_post_processing_depth_(0), |
| 106 allocations_count_(0), | 106 allocations_count_(0), |
| 107 raw_allocations_hash_(0), | 107 raw_allocations_hash_(0), |
| 108 ms_count_(0), | 108 ms_count_(0), |
| 109 gc_count_(0), | 109 gc_count_(0), |
| (...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 182 set_encountered_transition_arrays(Smi::FromInt(0)); | 182 set_encountered_transition_arrays(Smi::FromInt(0)); |
| 183 // Put a dummy entry in the remembered pages so we can find the list the | 183 // Put a dummy entry in the remembered pages so we can find the list the |
| 184 // minidump even if there are no real unmapped pages. | 184 // minidump even if there are no real unmapped pages. |
| 185 RememberUnmappedPage(NULL, false); | 185 RememberUnmappedPage(NULL, false); |
| 186 } | 186 } |
| 187 | 187 |
| 188 | 188 |
| 189 intptr_t Heap::Capacity() { | 189 intptr_t Heap::Capacity() { |
| 190 if (!HasBeenSetUp()) return 0; | 190 if (!HasBeenSetUp()) return 0; |
| 191 | 191 |
| 192 return new_space_.Capacity() + OldGenerationCapacity(); | 192 return new_space_->Capacity() + OldGenerationCapacity(); |
| 193 } | 193 } |
| 194 | 194 |
| 195 intptr_t Heap::OldGenerationCapacity() { | 195 intptr_t Heap::OldGenerationCapacity() { |
| 196 if (!HasBeenSetUp()) return 0; | 196 if (!HasBeenSetUp()) return 0; |
| 197 | 197 |
| 198 return old_space_->Capacity() + code_space_->Capacity() + | 198 return old_space_->Capacity() + code_space_->Capacity() + |
| 199 map_space_->Capacity() + lo_space_->SizeOfObjects(); | 199 map_space_->Capacity() + lo_space_->SizeOfObjects(); |
| 200 } | 200 } |
| 201 | 201 |
| 202 | 202 |
| 203 intptr_t Heap::CommittedOldGenerationMemory() { | 203 intptr_t Heap::CommittedOldGenerationMemory() { |
| 204 if (!HasBeenSetUp()) return 0; | 204 if (!HasBeenSetUp()) return 0; |
| 205 | 205 |
| 206 return old_space_->CommittedMemory() + code_space_->CommittedMemory() + | 206 return old_space_->CommittedMemory() + code_space_->CommittedMemory() + |
| 207 map_space_->CommittedMemory() + lo_space_->Size(); | 207 map_space_->CommittedMemory() + lo_space_->Size(); |
| 208 } | 208 } |
| 209 | 209 |
| 210 | 210 |
| 211 intptr_t Heap::CommittedMemory() { | 211 intptr_t Heap::CommittedMemory() { |
| 212 if (!HasBeenSetUp()) return 0; | 212 if (!HasBeenSetUp()) return 0; |
| 213 | 213 |
| 214 return new_space_.CommittedMemory() + CommittedOldGenerationMemory(); | 214 return new_space_->CommittedMemory() + CommittedOldGenerationMemory(); |
| 215 } | 215 } |
| 216 | 216 |
| 217 | 217 |
| 218 size_t Heap::CommittedPhysicalMemory() { | 218 size_t Heap::CommittedPhysicalMemory() { |
| 219 if (!HasBeenSetUp()) return 0; | 219 if (!HasBeenSetUp()) return 0; |
| 220 | 220 |
| 221 return new_space_.CommittedPhysicalMemory() + | 221 return new_space_->CommittedPhysicalMemory() + |
| 222 old_space_->CommittedPhysicalMemory() + | 222 old_space_->CommittedPhysicalMemory() + |
| 223 code_space_->CommittedPhysicalMemory() + | 223 code_space_->CommittedPhysicalMemory() + |
| 224 map_space_->CommittedPhysicalMemory() + | 224 map_space_->CommittedPhysicalMemory() + |
| 225 lo_space_->CommittedPhysicalMemory(); | 225 lo_space_->CommittedPhysicalMemory(); |
| 226 } | 226 } |
| 227 | 227 |
| 228 | 228 |
| 229 intptr_t Heap::CommittedMemoryExecutable() { | 229 intptr_t Heap::CommittedMemoryExecutable() { |
| 230 if (!HasBeenSetUp()) return 0; | 230 if (!HasBeenSetUp()) return 0; |
| 231 | 231 |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 293 | 293 |
| 294 // Is there enough space left in OLD to guarantee that a scavenge can | 294 // Is there enough space left in OLD to guarantee that a scavenge can |
| 295 // succeed? | 295 // succeed? |
| 296 // | 296 // |
| 297 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available | 297 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available |
| 298 // for object promotion. It counts only the bytes that the memory | 298 // for object promotion. It counts only the bytes that the memory |
| 299 // allocator has not yet allocated from the OS and assigned to any space, | 299 // allocator has not yet allocated from the OS and assigned to any space, |
| 300 // and does not count available bytes already in the old space or code | 300 // and does not count available bytes already in the old space or code |
| 301 // space. Undercounting is safe---we may get an unrequested full GC when | 301 // space. Undercounting is safe---we may get an unrequested full GC when |
| 302 // a scavenge would have succeeded. | 302 // a scavenge would have succeeded. |
| 303 if (memory_allocator()->MaxAvailable() <= new_space_.Size()) { | 303 if (memory_allocator()->MaxAvailable() <= new_space_->Size()) { |
| 304 isolate_->counters() | 304 isolate_->counters() |
| 305 ->gc_compactor_caused_by_oldspace_exhaustion() | 305 ->gc_compactor_caused_by_oldspace_exhaustion() |
| 306 ->Increment(); | 306 ->Increment(); |
| 307 *reason = "scavenge might not succeed"; | 307 *reason = "scavenge might not succeed"; |
| 308 return MARK_COMPACTOR; | 308 return MARK_COMPACTOR; |
| 309 } | 309 } |
| 310 | 310 |
| 311 // Default | 311 // Default |
| 312 *reason = NULL; | 312 *reason = NULL; |
| 313 return SCAVENGER; | 313 return SCAVENGER; |
| 314 } | 314 } |
| 315 | 315 |
| 316 | 316 |
| 317 // TODO(1238405): Combine the infrastructure for --heap-stats and | 317 // TODO(1238405): Combine the infrastructure for --heap-stats and |
| 318 // --log-gc to avoid the complicated preprocessor and flag testing. | 318 // --log-gc to avoid the complicated preprocessor and flag testing. |
| 319 void Heap::ReportStatisticsBeforeGC() { | 319 void Heap::ReportStatisticsBeforeGC() { |
| 320 // Heap::ReportHeapStatistics will also log NewSpace statistics when | 320 // Heap::ReportHeapStatistics will also log NewSpace statistics when |
| 321 // compiled --log-gc is set. The following logic is used to avoid | 321 // compiled --log-gc is set. The following logic is used to avoid |
| 322 // double logging. | 322 // double logging. |
| 323 #ifdef DEBUG | 323 #ifdef DEBUG |
| 324 if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics(); | 324 if (FLAG_heap_stats || FLAG_log_gc) new_space_->CollectStatistics(); |
| 325 if (FLAG_heap_stats) { | 325 if (FLAG_heap_stats) { |
| 326 ReportHeapStatistics("Before GC"); | 326 ReportHeapStatistics("Before GC"); |
| 327 } else if (FLAG_log_gc) { | 327 } else if (FLAG_log_gc) { |
| 328 new_space_.ReportStatistics(); | 328 new_space_->ReportStatistics(); |
| 329 } | 329 } |
| 330 if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms(); | 330 if (FLAG_heap_stats || FLAG_log_gc) new_space_->ClearHistograms(); |
| 331 #else | 331 #else |
| 332 if (FLAG_log_gc) { | 332 if (FLAG_log_gc) { |
| 333 new_space_.CollectStatistics(); | 333 new_space_->CollectStatistics(); |
| 334 new_space_.ReportStatistics(); | 334 new_space_->ReportStatistics(); |
| 335 new_space_.ClearHistograms(); | 335 new_space_->ClearHistograms(); |
| 336 } | 336 } |
| 337 #endif // DEBUG | 337 #endif // DEBUG |
| 338 } | 338 } |
| 339 | 339 |
| 340 | 340 |
| 341 void Heap::PrintShortHeapStatistics() { | 341 void Heap::PrintShortHeapStatistics() { |
| 342 if (!FLAG_trace_gc_verbose) return; | 342 if (!FLAG_trace_gc_verbose) return; |
| 343 PrintIsolate(isolate_, "Memory allocator, used: %6" V8PRIdPTR | 343 PrintIsolate(isolate_, "Memory allocator, used: %6" V8PRIdPTR |
| 344 " KB, available: %6" V8PRIdPTR " KB\n", | 344 " KB, available: %6" V8PRIdPTR " KB\n", |
| 345 memory_allocator()->Size() / KB, | 345 memory_allocator()->Size() / KB, |
| 346 memory_allocator()->Available() / KB); | 346 memory_allocator()->Available() / KB); |
| 347 PrintIsolate(isolate_, "New space, used: %6" V8PRIdPTR | 347 PrintIsolate(isolate_, "New space, used: %6" V8PRIdPTR |
| 348 " KB" | 348 " KB" |
| 349 ", available: %6" V8PRIdPTR | 349 ", available: %6" V8PRIdPTR |
| 350 " KB" | 350 " KB" |
| 351 ", committed: %6" V8PRIdPTR " KB\n", | 351 ", committed: %6" V8PRIdPTR " KB\n", |
| 352 new_space_.Size() / KB, new_space_.Available() / KB, | 352 new_space_->Size() / KB, new_space_->Available() / KB, |
| 353 new_space_.CommittedMemory() / KB); | 353 new_space_->CommittedMemory() / KB); |
| 354 PrintIsolate(isolate_, "Old space, used: %6" V8PRIdPTR | 354 PrintIsolate(isolate_, "Old space, used: %6" V8PRIdPTR |
| 355 " KB" | 355 " KB" |
| 356 ", available: %6" V8PRIdPTR | 356 ", available: %6" V8PRIdPTR |
| 357 " KB" | 357 " KB" |
| 358 ", committed: %6" V8PRIdPTR " KB\n", | 358 ", committed: %6" V8PRIdPTR " KB\n", |
| 359 old_space_->SizeOfObjects() / KB, old_space_->Available() / KB, | 359 old_space_->SizeOfObjects() / KB, old_space_->Available() / KB, |
| 360 old_space_->CommittedMemory() / KB); | 360 old_space_->CommittedMemory() / KB); |
| 361 PrintIsolate(isolate_, "Code space, used: %6" V8PRIdPTR | 361 PrintIsolate(isolate_, "Code space, used: %6" V8PRIdPTR |
| 362 " KB" | 362 " KB" |
| 363 ", available: %6" V8PRIdPTR | 363 ", available: %6" V8PRIdPTR |
| (...skipping 28 matching lines...) Expand all Loading... |
| 392 total_gc_time_ms_); | 392 total_gc_time_ms_); |
| 393 } | 393 } |
| 394 | 394 |
| 395 // TODO(1238405): Combine the infrastructure for --heap-stats and | 395 // TODO(1238405): Combine the infrastructure for --heap-stats and |
| 396 // --log-gc to avoid the complicated preprocessor and flag testing. | 396 // --log-gc to avoid the complicated preprocessor and flag testing. |
| 397 void Heap::ReportStatisticsAfterGC() { | 397 void Heap::ReportStatisticsAfterGC() { |
| 398 // Similar to the before GC, we use some complicated logic to ensure that | 398 // Similar to the before GC, we use some complicated logic to ensure that |
| 399 // NewSpace statistics are logged exactly once when --log-gc is turned on. | 399 // NewSpace statistics are logged exactly once when --log-gc is turned on. |
| 400 #if defined(DEBUG) | 400 #if defined(DEBUG) |
| 401 if (FLAG_heap_stats) { | 401 if (FLAG_heap_stats) { |
| 402 new_space_.CollectStatistics(); | 402 new_space_->CollectStatistics(); |
| 403 ReportHeapStatistics("After GC"); | 403 ReportHeapStatistics("After GC"); |
| 404 } else if (FLAG_log_gc) { | 404 } else if (FLAG_log_gc) { |
| 405 new_space_.ReportStatistics(); | 405 new_space_->ReportStatistics(); |
| 406 } | 406 } |
| 407 #else | 407 #else |
| 408 if (FLAG_log_gc) new_space_.ReportStatistics(); | 408 if (FLAG_log_gc) new_space_->ReportStatistics(); |
| 409 #endif // DEBUG | 409 #endif // DEBUG |
| 410 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); | 410 for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount); |
| 411 ++i) { | 411 ++i) { |
| 412 int count = deferred_counters_[i]; | 412 int count = deferred_counters_[i]; |
| 413 deferred_counters_[i] = 0; | 413 deferred_counters_[i] = 0; |
| 414 while (count > 0) { | 414 while (count > 0) { |
| 415 count--; | 415 count--; |
| 416 isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i)); | 416 isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i)); |
| 417 } | 417 } |
| 418 } | 418 } |
| 419 } | 419 } |
| 420 | 420 |
| 421 | 421 |
| 422 void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) { | 422 void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) { |
| 423 deferred_counters_[feature]++; | 423 deferred_counters_[feature]++; |
| 424 } | 424 } |
| 425 | 425 |
| 426 bool Heap::UncommitFromSpace() { return new_space_.UncommitFromSpace(); } | 426 bool Heap::UncommitFromSpace() { return new_space_->UncommitFromSpace(); } |
| 427 | 427 |
| 428 void Heap::GarbageCollectionPrologue() { | 428 void Heap::GarbageCollectionPrologue() { |
| 429 { | 429 { |
| 430 AllowHeapAllocation for_the_first_part_of_prologue; | 430 AllowHeapAllocation for_the_first_part_of_prologue; |
| 431 gc_count_++; | 431 gc_count_++; |
| 432 | 432 |
| 433 #ifdef VERIFY_HEAP | 433 #ifdef VERIFY_HEAP |
| 434 if (FLAG_verify_heap) { | 434 if (FLAG_verify_heap) { |
| 435 Verify(); | 435 Verify(); |
| 436 } | 436 } |
| (...skipping 11 matching lines...) Expand all Loading... |
| 448 UpdateMaximumCommitted(); | 448 UpdateMaximumCommitted(); |
| 449 | 449 |
| 450 #ifdef DEBUG | 450 #ifdef DEBUG |
| 451 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); | 451 DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC); |
| 452 | 452 |
| 453 if (FLAG_gc_verbose) Print(); | 453 if (FLAG_gc_verbose) Print(); |
| 454 | 454 |
| 455 ReportStatisticsBeforeGC(); | 455 ReportStatisticsBeforeGC(); |
| 456 #endif // DEBUG | 456 #endif // DEBUG |
| 457 | 457 |
| 458 if (new_space_.IsAtMaximumCapacity()) { | 458 if (new_space_->IsAtMaximumCapacity()) { |
| 459 maximum_size_scavenges_++; | 459 maximum_size_scavenges_++; |
| 460 } else { | 460 } else { |
| 461 maximum_size_scavenges_ = 0; | 461 maximum_size_scavenges_ = 0; |
| 462 } | 462 } |
| 463 CheckNewSpaceExpansionCriteria(); | 463 CheckNewSpaceExpansionCriteria(); |
| 464 UpdateNewSpaceAllocationCounter(); | 464 UpdateNewSpaceAllocationCounter(); |
| 465 store_buffer()->MoveEntriesToRememberedSet(); | 465 store_buffer()->MoveEntriesToRememberedSet(); |
| 466 } | 466 } |
| 467 | 467 |
| 468 | 468 |
| (...skipping 422 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 891 const int kMaxNumberOfAttempts = 7; | 891 const int kMaxNumberOfAttempts = 7; |
| 892 const int kMinNumberOfAttempts = 2; | 892 const int kMinNumberOfAttempts = 2; |
| 893 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { | 893 for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
| 894 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL, | 894 if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL, |
| 895 v8::kGCCallbackFlagCollectAllAvailableGarbage) && | 895 v8::kGCCallbackFlagCollectAllAvailableGarbage) && |
| 896 attempt + 1 >= kMinNumberOfAttempts) { | 896 attempt + 1 >= kMinNumberOfAttempts) { |
| 897 break; | 897 break; |
| 898 } | 898 } |
| 899 } | 899 } |
| 900 set_current_gc_flags(kNoGCFlags); | 900 set_current_gc_flags(kNoGCFlags); |
| 901 new_space_.Shrink(); | 901 new_space_->Shrink(); |
| 902 UncommitFromSpace(); | 902 UncommitFromSpace(); |
| 903 } | 903 } |
| 904 | 904 |
| 905 | 905 |
| 906 void Heap::ReportExternalMemoryPressure(const char* gc_reason) { | 906 void Heap::ReportExternalMemoryPressure(const char* gc_reason) { |
| 907 if (external_memory_ > | 907 if (external_memory_ > |
| 908 (external_memory_at_last_mark_compact_ + external_memory_hard_limit())) { | 908 (external_memory_at_last_mark_compact_ + external_memory_hard_limit())) { |
| 909 CollectAllGarbage( | 909 CollectAllGarbage( |
| 910 kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask, gc_reason, | 910 kReduceMemoryFootprintMask | kFinalizeIncrementalMarkingMask, gc_reason, |
| 911 static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage | | 911 static_cast<GCCallbackFlags>(kGCCallbackFlagCollectAllAvailableGarbage | |
| (...skipping 30 matching lines...) Expand all Loading... |
| 942 IncrementalMarking::FORCE_COMPLETION)); | 942 IncrementalMarking::FORCE_COMPLETION)); |
| 943 } | 943 } |
| 944 } | 944 } |
| 945 | 945 |
| 946 | 946 |
| 947 void Heap::EnsureFillerObjectAtTop() { | 947 void Heap::EnsureFillerObjectAtTop() { |
| 948 // There may be an allocation memento behind objects in new space. Upon | 948 // There may be an allocation memento behind objects in new space. Upon |
| 949 // evacuation of a non-full new space (or if we are on the last page) there | 949 // evacuation of a non-full new space (or if we are on the last page) there |
| 950 // may be uninitialized memory behind top. We fill the remainder of the page | 950 // may be uninitialized memory behind top. We fill the remainder of the page |
| 951 // with a filler. | 951 // with a filler. |
| 952 Address to_top = new_space_.top(); | 952 Address to_top = new_space_->top(); |
| 953 Page* page = Page::FromAddress(to_top - kPointerSize); | 953 Page* page = Page::FromAddress(to_top - kPointerSize); |
| 954 if (page->Contains(to_top)) { | 954 if (page->Contains(to_top)) { |
| 955 int remaining_in_page = static_cast<int>(page->area_end() - to_top); | 955 int remaining_in_page = static_cast<int>(page->area_end() - to_top); |
| 956 CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo); | 956 CreateFillerObjectAt(to_top, remaining_in_page, ClearRecordedSlots::kNo); |
| 957 } | 957 } |
| 958 } | 958 } |
| 959 | 959 |
| 960 | 960 |
| 961 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, | 961 bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
| 962 const char* collector_reason, | 962 const char* collector_reason, |
| (...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1224 break; // Abort for-loop over spaces and retry. | 1224 break; // Abort for-loop over spaces and retry. |
| 1225 } | 1225 } |
| 1226 } | 1226 } |
| 1227 } | 1227 } |
| 1228 | 1228 |
| 1229 return !gc_performed; | 1229 return !gc_performed; |
| 1230 } | 1230 } |
| 1231 | 1231 |
| 1232 | 1232 |
| 1233 void Heap::EnsureFromSpaceIsCommitted() { | 1233 void Heap::EnsureFromSpaceIsCommitted() { |
| 1234 if (new_space_.CommitFromSpaceIfNeeded()) return; | 1234 if (new_space_->CommitFromSpaceIfNeeded()) return; |
| 1235 | 1235 |
| 1236 // Committing memory to from space failed. | 1236 // Committing memory to from space failed. |
| 1237 // Memory is exhausted and we will die. | 1237 // Memory is exhausted and we will die. |
| 1238 V8::FatalProcessOutOfMemory("Committing semi space failed."); | 1238 V8::FatalProcessOutOfMemory("Committing semi space failed."); |
| 1239 } | 1239 } |
| 1240 | 1240 |
| 1241 | 1241 |
| 1242 void Heap::ClearNormalizedMapCaches() { | 1242 void Heap::ClearNormalizedMapCaches() { |
| 1243 if (isolate_->bootstrapper()->IsActive() && | 1243 if (isolate_->bootstrapper()->IsActive() && |
| 1244 !incremental_marking()->IsMarking()) { | 1244 !incremental_marking()->IsMarking()) { |
| (...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1499 | 1499 |
| 1500 CompletelyClearInstanceofCache(); | 1500 CompletelyClearInstanceofCache(); |
| 1501 | 1501 |
| 1502 FlushNumberStringCache(); | 1502 FlushNumberStringCache(); |
| 1503 ClearNormalizedMapCaches(); | 1503 ClearNormalizedMapCaches(); |
| 1504 } | 1504 } |
| 1505 | 1505 |
| 1506 | 1506 |
| 1507 void Heap::CheckNewSpaceExpansionCriteria() { | 1507 void Heap::CheckNewSpaceExpansionCriteria() { |
| 1508 if (FLAG_experimental_new_space_growth_heuristic) { | 1508 if (FLAG_experimental_new_space_growth_heuristic) { |
| 1509 if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && | 1509 if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() && |
| 1510 survived_last_scavenge_ * 100 / new_space_.TotalCapacity() >= 10) { | 1510 survived_last_scavenge_ * 100 / new_space_->TotalCapacity() >= 10) { |
| 1511 // Grow the size of new space if there is room to grow, and more than 10% | 1511 // Grow the size of new space if there is room to grow, and more than 10% |
| 1512 // have survived the last scavenge. | 1512 // have survived the last scavenge. |
| 1513 new_space_.Grow(); | 1513 new_space_->Grow(); |
| 1514 survived_since_last_expansion_ = 0; | 1514 survived_since_last_expansion_ = 0; |
| 1515 } | 1515 } |
| 1516 } else if (new_space_.TotalCapacity() < new_space_.MaximumCapacity() && | 1516 } else if (new_space_->TotalCapacity() < new_space_->MaximumCapacity() && |
| 1517 survived_since_last_expansion_ > new_space_.TotalCapacity()) { | 1517 survived_since_last_expansion_ > new_space_->TotalCapacity()) { |
| 1518 // Grow the size of new space if there is room to grow, and enough data | 1518 // Grow the size of new space if there is room to grow, and enough data |
| 1519 // has survived scavenge since the last expansion. | 1519 // has survived scavenge since the last expansion. |
| 1520 new_space_.Grow(); | 1520 new_space_->Grow(); |
| 1521 survived_since_last_expansion_ = 0; | 1521 survived_since_last_expansion_ = 0; |
| 1522 } | 1522 } |
| 1523 } | 1523 } |
| 1524 | 1524 |
| 1525 | 1525 |
| 1526 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { | 1526 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { |
| 1527 return heap->InNewSpace(*p) && | 1527 return heap->InNewSpace(*p) && |
| 1528 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); | 1528 !HeapObject::cast(*p)->map_word().IsForwardingAddress(); |
| 1529 } | 1529 } |
| 1530 | 1530 |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1633 | 1633 |
| 1634 if (UsingEmbedderHeapTracer()) { | 1634 if (UsingEmbedderHeapTracer()) { |
| 1635 // Register found wrappers with embedder so it can add them to its marking | 1635 // Register found wrappers with embedder so it can add them to its marking |
| 1636 // deque and correctly manage the case when v8 scavenger collects the | 1636 // deque and correctly manage the case when v8 scavenger collects the |
| 1637 // wrappers by either keeping wrappables alive, or cleaning marking deque. | 1637 // wrappers by either keeping wrappables alive, or cleaning marking deque. |
| 1638 mark_compact_collector()->RegisterWrappersWithEmbedderHeapTracer(); | 1638 mark_compact_collector()->RegisterWrappersWithEmbedderHeapTracer(); |
| 1639 } | 1639 } |
| 1640 | 1640 |
| 1641 // Flip the semispaces. After flipping, to space is empty, from space has | 1641 // Flip the semispaces. After flipping, to space is empty, from space has |
| 1642 // live objects. | 1642 // live objects. |
| 1643 new_space_.Flip(); | 1643 new_space_->Flip(); |
| 1644 new_space_.ResetAllocationInfo(); | 1644 new_space_->ResetAllocationInfo(); |
| 1645 | 1645 |
| 1646 // We need to sweep newly copied objects which can be either in the | 1646 // We need to sweep newly copied objects which can be either in the |
| 1647 // to space or promoted to the old generation. For to-space | 1647 // to space or promoted to the old generation. For to-space |
| 1648 // objects, we treat the bottom of the to space as a queue. Newly | 1648 // objects, we treat the bottom of the to space as a queue. Newly |
| 1649 // copied and unswept objects lie between a 'front' mark and the | 1649 // copied and unswept objects lie between a 'front' mark and the |
| 1650 // allocation pointer. | 1650 // allocation pointer. |
| 1651 // | 1651 // |
| 1652 // Promoted objects can go into various old-generation spaces, and | 1652 // Promoted objects can go into various old-generation spaces, and |
| 1653 // can be allocated internally in the spaces (from the free list). | 1653 // can be allocated internally in the spaces (from the free list). |
| 1654 // We treat the top of the to space as a queue of addresses of | 1654 // We treat the top of the to space as a queue of addresses of |
| 1655 // promoted objects. The addresses of newly promoted and unswept | 1655 // promoted objects. The addresses of newly promoted and unswept |
| 1656 // objects lie between a 'front' mark and a 'rear' mark that is | 1656 // objects lie between a 'front' mark and a 'rear' mark that is |
| 1657 // updated as a side effect of promoting an object. | 1657 // updated as a side effect of promoting an object. |
| 1658 // | 1658 // |
| 1659 // There is guaranteed to be enough room at the top of the to space | 1659 // There is guaranteed to be enough room at the top of the to space |
| 1660 // for the addresses of promoted objects: every object promoted | 1660 // for the addresses of promoted objects: every object promoted |
| 1661 // frees up its size in bytes from the top of the new space, and | 1661 // frees up its size in bytes from the top of the new space, and |
| 1662 // objects are at least one pointer in size. | 1662 // objects are at least one pointer in size. |
| 1663 Address new_space_front = new_space_.ToSpaceStart(); | 1663 Address new_space_front = new_space_->ToSpaceStart(); |
| 1664 promotion_queue_.Initialize(); | 1664 promotion_queue_.Initialize(); |
| 1665 | 1665 |
| 1666 PromotionMode promotion_mode = CurrentPromotionMode(); | 1666 PromotionMode promotion_mode = CurrentPromotionMode(); |
| 1667 ScavengeVisitor scavenge_visitor(this); | 1667 ScavengeVisitor scavenge_visitor(this); |
| 1668 | 1668 |
| 1669 if (FLAG_scavenge_reclaim_unmodified_objects) { | 1669 if (FLAG_scavenge_reclaim_unmodified_objects) { |
| 1670 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( | 1670 isolate()->global_handles()->IdentifyWeakUnmodifiedObjects( |
| 1671 &IsUnmodifiedHeapObject); | 1671 &IsUnmodifiedHeapObject); |
| 1672 } | 1672 } |
| 1673 | 1673 |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1750 UpdateNewSpaceReferencesInExternalStringTable( | 1750 UpdateNewSpaceReferencesInExternalStringTable( |
| 1751 &UpdateNewSpaceReferenceInExternalStringTableEntry); | 1751 &UpdateNewSpaceReferenceInExternalStringTableEntry); |
| 1752 | 1752 |
| 1753 promotion_queue_.Destroy(); | 1753 promotion_queue_.Destroy(); |
| 1754 | 1754 |
| 1755 incremental_marking()->UpdateMarkingDequeAfterScavenge(); | 1755 incremental_marking()->UpdateMarkingDequeAfterScavenge(); |
| 1756 | 1756 |
| 1757 ScavengeWeakObjectRetainer weak_object_retainer(this); | 1757 ScavengeWeakObjectRetainer weak_object_retainer(this); |
| 1758 ProcessYoungWeakReferences(&weak_object_retainer); | 1758 ProcessYoungWeakReferences(&weak_object_retainer); |
| 1759 | 1759 |
| 1760 DCHECK(new_space_front == new_space_.top()); | 1760 DCHECK(new_space_front == new_space_->top()); |
| 1761 | 1761 |
| 1762 // Set age mark. | 1762 // Set age mark. |
| 1763 new_space_.set_age_mark(new_space_.top()); | 1763 new_space_->set_age_mark(new_space_->top()); |
| 1764 | 1764 |
| 1765 ArrayBufferTracker::FreeDeadInNewSpace(this); | 1765 ArrayBufferTracker::FreeDeadInNewSpace(this); |
| 1766 | 1766 |
| 1767 // Update how much has survived scavenge. | 1767 // Update how much has survived scavenge. |
| 1768 IncrementYoungSurvivorsCounter(static_cast<int>( | 1768 IncrementYoungSurvivorsCounter( |
| 1769 (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size())); | 1769 static_cast<int>((PromotedSpaceSizeOfObjects() - survived_watermark) + |
| 1770 new_space_->Size())); |
| 1770 | 1771 |
| 1771 LOG(isolate_, ResourceEvent("scavenge", "end")); | 1772 LOG(isolate_, ResourceEvent("scavenge", "end")); |
| 1772 | 1773 |
| 1773 gc_state_ = NOT_IN_GC; | 1774 gc_state_ = NOT_IN_GC; |
| 1774 } | 1775 } |
| 1775 | 1776 |
| 1776 | 1777 |
| 1777 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, | 1778 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap, |
| 1778 Object** p) { | 1779 Object** p) { |
| 1779 MapWord first_word = HeapObject::cast(*p)->map_word(); | 1780 MapWord first_word = HeapObject::cast(*p)->map_word(); |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1923 v8::ExternalResourceVisitor* visitor_; | 1924 v8::ExternalResourceVisitor* visitor_; |
| 1924 } external_string_table_visitor(visitor); | 1925 } external_string_table_visitor(visitor); |
| 1925 | 1926 |
| 1926 external_string_table_.Iterate(&external_string_table_visitor); | 1927 external_string_table_.Iterate(&external_string_table_visitor); |
| 1927 } | 1928 } |
| 1928 | 1929 |
| 1929 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, | 1930 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, |
| 1930 Address new_space_front, | 1931 Address new_space_front, |
| 1931 PromotionMode promotion_mode) { | 1932 PromotionMode promotion_mode) { |
| 1932 do { | 1933 do { |
| 1933 SemiSpace::AssertValidRange(new_space_front, new_space_.top()); | 1934 SemiSpace::AssertValidRange(new_space_front, new_space_->top()); |
| 1934 // The addresses new_space_front and new_space_.top() define a | 1935 // The addresses new_space_front and new_space_.top() define a |
| 1935 // queue of unprocessed copied objects. Process them until the | 1936 // queue of unprocessed copied objects. Process them until the |
| 1936 // queue is empty. | 1937 // queue is empty. |
| 1937 while (new_space_front != new_space_.top()) { | 1938 while (new_space_front != new_space_->top()) { |
| 1938 if (!Page::IsAlignedToPageSize(new_space_front)) { | 1939 if (!Page::IsAlignedToPageSize(new_space_front)) { |
| 1939 HeapObject* object = HeapObject::FromAddress(new_space_front); | 1940 HeapObject* object = HeapObject::FromAddress(new_space_front); |
| 1940 if (promotion_mode == PROMOTE_MARKED) { | 1941 if (promotion_mode == PROMOTE_MARKED) { |
| 1941 new_space_front += StaticScavengeVisitor<PROMOTE_MARKED>::IterateBody( | 1942 new_space_front += StaticScavengeVisitor<PROMOTE_MARKED>::IterateBody( |
| 1942 object->map(), object); | 1943 object->map(), object); |
| 1943 } else { | 1944 } else { |
| 1944 new_space_front += | 1945 new_space_front += |
| 1945 StaticScavengeVisitor<DEFAULT_PROMOTION>::IterateBody( | 1946 StaticScavengeVisitor<DEFAULT_PROMOTION>::IterateBody( |
| 1946 object->map(), object); | 1947 object->map(), object); |
| 1947 } | 1948 } |
| (...skipping 18 matching lines...) Expand all Loading... |
| 1966 // to new space. | 1967 // to new space. |
| 1967 DCHECK(!target->IsMap()); | 1968 DCHECK(!target->IsMap()); |
| 1968 | 1969 |
| 1969 IteratePromotedObject(target, static_cast<int>(size), was_marked_black, | 1970 IteratePromotedObject(target, static_cast<int>(size), was_marked_black, |
| 1970 &Scavenger::ScavengeObject); | 1971 &Scavenger::ScavengeObject); |
| 1971 } | 1972 } |
| 1972 } | 1973 } |
| 1973 | 1974 |
| 1974 // Take another spin if there are now unswept objects in new space | 1975 // Take another spin if there are now unswept objects in new space |
| 1975 // (there are currently no more unswept promoted objects). | 1976 // (there are currently no more unswept promoted objects). |
| 1976 } while (new_space_front != new_space_.top()); | 1977 } while (new_space_front != new_space_->top()); |
| 1977 | 1978 |
| 1978 return new_space_front; | 1979 return new_space_front; |
| 1979 } | 1980 } |
| 1980 | 1981 |
| 1981 | 1982 |
| 1982 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == | 1983 STATIC_ASSERT((FixedDoubleArray::kHeaderSize & kDoubleAlignmentMask) == |
| 1983 0); // NOLINT | 1984 0); // NOLINT |
| 1984 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == | 1985 STATIC_ASSERT((FixedTypedArrayBase::kDataOffset & kDoubleAlignmentMask) == |
| 1985 0); // NOLINT | 1986 0); // NOLINT |
| 1986 #ifdef V8_HOST_ARCH_32_BIT | 1987 #ifdef V8_HOST_ARCH_32_BIT |
| (...skipping 2197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4184 // GCIdleTimeHandler once the change is merged to 4.5. | 4185 // GCIdleTimeHandler once the change is merged to 4.5. |
| 4185 static const size_t kLowAllocationThroughput = 1000; | 4186 static const size_t kLowAllocationThroughput = 1000; |
| 4186 const double allocation_throughput = | 4187 const double allocation_throughput = |
| 4187 tracer()->CurrentAllocationThroughputInBytesPerMillisecond(); | 4188 tracer()->CurrentAllocationThroughputInBytesPerMillisecond(); |
| 4188 | 4189 |
| 4189 if (FLAG_predictable) return; | 4190 if (FLAG_predictable) return; |
| 4190 | 4191 |
| 4191 if (ShouldReduceMemory() || | 4192 if (ShouldReduceMemory() || |
| 4192 ((allocation_throughput != 0) && | 4193 ((allocation_throughput != 0) && |
| 4193 (allocation_throughput < kLowAllocationThroughput))) { | 4194 (allocation_throughput < kLowAllocationThroughput))) { |
| 4194 new_space_.Shrink(); | 4195 new_space_->Shrink(); |
| 4195 UncommitFromSpace(); | 4196 UncommitFromSpace(); |
| 4196 } | 4197 } |
| 4197 } | 4198 } |
| 4198 | 4199 |
| 4199 bool Heap::MarkingDequesAreEmpty() { | 4200 bool Heap::MarkingDequesAreEmpty() { |
| 4200 return mark_compact_collector()->marking_deque()->IsEmpty() && | 4201 return mark_compact_collector()->marking_deque()->IsEmpty() && |
| 4201 (!UsingEmbedderHeapTracer() || | 4202 (!UsingEmbedderHeapTracer() || |
| 4202 (mark_compact_collector()->wrappers_to_trace() == 0 && | 4203 (mark_compact_collector()->wrappers_to_trace() == 0 && |
| 4203 mark_compact_collector() | 4204 mark_compact_collector() |
| 4204 ->embedder_heap_tracer() | 4205 ->embedder_heap_tracer() |
| (...skipping 343 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4548 old_generation_allocation_limit_); | 4549 old_generation_allocation_limit_); |
| 4549 | 4550 |
| 4550 PrintF("\n"); | 4551 PrintF("\n"); |
| 4551 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); | 4552 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); |
| 4552 isolate_->global_handles()->PrintStats(); | 4553 isolate_->global_handles()->PrintStats(); |
| 4553 PrintF("\n"); | 4554 PrintF("\n"); |
| 4554 | 4555 |
| 4555 PrintF("Heap statistics : "); | 4556 PrintF("Heap statistics : "); |
| 4556 memory_allocator()->ReportStatistics(); | 4557 memory_allocator()->ReportStatistics(); |
| 4557 PrintF("To space : "); | 4558 PrintF("To space : "); |
| 4558 new_space_.ReportStatistics(); | 4559 new_space_->ReportStatistics(); |
| 4559 PrintF("Old space : "); | 4560 PrintF("Old space : "); |
| 4560 old_space_->ReportStatistics(); | 4561 old_space_->ReportStatistics(); |
| 4561 PrintF("Code space : "); | 4562 PrintF("Code space : "); |
| 4562 code_space_->ReportStatistics(); | 4563 code_space_->ReportStatistics(); |
| 4563 PrintF("Map space : "); | 4564 PrintF("Map space : "); |
| 4564 map_space_->ReportStatistics(); | 4565 map_space_->ReportStatistics(); |
| 4565 PrintF("Large object space : "); | 4566 PrintF("Large object space : "); |
| 4566 lo_space_->ReportStatistics(); | 4567 lo_space_->ReportStatistics(); |
| 4567 PrintF(">>>>>> ========================================= >>>>>>\n"); | 4568 PrintF(">>>>>> ========================================= >>>>>>\n"); |
| 4568 } | 4569 } |
| 4569 | 4570 |
| 4570 #endif // DEBUG | 4571 #endif // DEBUG |
| 4571 | 4572 |
| 4572 bool Heap::Contains(HeapObject* value) { | 4573 bool Heap::Contains(HeapObject* value) { |
| 4573 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { | 4574 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { |
| 4574 return false; | 4575 return false; |
| 4575 } | 4576 } |
| 4576 return HasBeenSetUp() && | 4577 return HasBeenSetUp() && |
| 4577 (new_space_.ToSpaceContains(value) || old_space_->Contains(value) || | 4578 (new_space_->ToSpaceContains(value) || old_space_->Contains(value) || |
| 4578 code_space_->Contains(value) || map_space_->Contains(value) || | 4579 code_space_->Contains(value) || map_space_->Contains(value) || |
| 4579 lo_space_->Contains(value)); | 4580 lo_space_->Contains(value)); |
| 4580 } | 4581 } |
| 4581 | 4582 |
| 4582 bool Heap::ContainsSlow(Address addr) { | 4583 bool Heap::ContainsSlow(Address addr) { |
| 4583 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { | 4584 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { |
| 4584 return false; | 4585 return false; |
| 4585 } | 4586 } |
| 4586 return HasBeenSetUp() && | 4587 return HasBeenSetUp() && |
| 4587 (new_space_.ToSpaceContainsSlow(addr) || | 4588 (new_space_->ToSpaceContainsSlow(addr) || |
| 4588 old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) || | 4589 old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) || |
| 4589 map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr)); | 4590 map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr)); |
| 4590 } | 4591 } |
| 4591 | 4592 |
| 4592 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { | 4593 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { |
| 4593 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { | 4594 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { |
| 4594 return false; | 4595 return false; |
| 4595 } | 4596 } |
| 4596 if (!HasBeenSetUp()) return false; | 4597 if (!HasBeenSetUp()) return false; |
| 4597 | 4598 |
| 4598 switch (space) { | 4599 switch (space) { |
| 4599 case NEW_SPACE: | 4600 case NEW_SPACE: |
| 4600 return new_space_.ToSpaceContains(value); | 4601 return new_space_->ToSpaceContains(value); |
| 4601 case OLD_SPACE: | 4602 case OLD_SPACE: |
| 4602 return old_space_->Contains(value); | 4603 return old_space_->Contains(value); |
| 4603 case CODE_SPACE: | 4604 case CODE_SPACE: |
| 4604 return code_space_->Contains(value); | 4605 return code_space_->Contains(value); |
| 4605 case MAP_SPACE: | 4606 case MAP_SPACE: |
| 4606 return map_space_->Contains(value); | 4607 return map_space_->Contains(value); |
| 4607 case LO_SPACE: | 4608 case LO_SPACE: |
| 4608 return lo_space_->Contains(value); | 4609 return lo_space_->Contains(value); |
| 4609 } | 4610 } |
| 4610 UNREACHABLE(); | 4611 UNREACHABLE(); |
| 4611 return false; | 4612 return false; |
| 4612 } | 4613 } |
| 4613 | 4614 |
| 4614 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { | 4615 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { |
| 4615 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { | 4616 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { |
| 4616 return false; | 4617 return false; |
| 4617 } | 4618 } |
| 4618 if (!HasBeenSetUp()) return false; | 4619 if (!HasBeenSetUp()) return false; |
| 4619 | 4620 |
| 4620 switch (space) { | 4621 switch (space) { |
| 4621 case NEW_SPACE: | 4622 case NEW_SPACE: |
| 4622 return new_space_.ToSpaceContainsSlow(addr); | 4623 return new_space_->ToSpaceContainsSlow(addr); |
| 4623 case OLD_SPACE: | 4624 case OLD_SPACE: |
| 4624 return old_space_->ContainsSlow(addr); | 4625 return old_space_->ContainsSlow(addr); |
| 4625 case CODE_SPACE: | 4626 case CODE_SPACE: |
| 4626 return code_space_->ContainsSlow(addr); | 4627 return code_space_->ContainsSlow(addr); |
| 4627 case MAP_SPACE: | 4628 case MAP_SPACE: |
| 4628 return map_space_->ContainsSlow(addr); | 4629 return map_space_->ContainsSlow(addr); |
| 4629 case LO_SPACE: | 4630 case LO_SPACE: |
| 4630 return lo_space_->ContainsSlow(addr); | 4631 return lo_space_->ContainsSlow(addr); |
| 4631 } | 4632 } |
| 4632 UNREACHABLE(); | 4633 UNREACHABLE(); |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4675 // We have to wait here for the sweeper threads to have an iterable heap. | 4676 // We have to wait here for the sweeper threads to have an iterable heap. |
| 4676 mark_compact_collector()->EnsureSweepingCompleted(); | 4677 mark_compact_collector()->EnsureSweepingCompleted(); |
| 4677 } | 4678 } |
| 4678 | 4679 |
| 4679 VerifyPointersVisitor visitor; | 4680 VerifyPointersVisitor visitor; |
| 4680 IterateRoots(&visitor, VISIT_ONLY_STRONG); | 4681 IterateRoots(&visitor, VISIT_ONLY_STRONG); |
| 4681 | 4682 |
| 4682 VerifySmisVisitor smis_visitor; | 4683 VerifySmisVisitor smis_visitor; |
| 4683 IterateSmiRoots(&smis_visitor); | 4684 IterateSmiRoots(&smis_visitor); |
| 4684 | 4685 |
| 4685 new_space_.Verify(); | 4686 new_space_->Verify(); |
| 4686 | 4687 |
| 4687 old_space_->Verify(&visitor); | 4688 old_space_->Verify(&visitor); |
| 4688 map_space_->Verify(&visitor); | 4689 map_space_->Verify(&visitor); |
| 4689 | 4690 |
| 4690 VerifyPointersVisitor no_dirty_regions_visitor; | 4691 VerifyPointersVisitor no_dirty_regions_visitor; |
| 4691 code_space_->Verify(&no_dirty_regions_visitor); | 4692 code_space_->Verify(&no_dirty_regions_visitor); |
| 4692 | 4693 |
| 4693 lo_space_->Verify(); | 4694 lo_space_->Verify(); |
| 4694 | 4695 |
| 4695 mark_compact_collector()->VerifyWeakEmbeddedObjectsInCode(); | 4696 mark_compact_collector()->VerifyWeakEmbeddedObjectsInCode(); |
| 4696 if (FLAG_omit_map_checks_for_leaf_maps) { | 4697 if (FLAG_omit_map_checks_for_leaf_maps) { |
| 4697 mark_compact_collector()->VerifyOmittedMapChecks(); | 4698 mark_compact_collector()->VerifyOmittedMapChecks(); |
| 4698 } | 4699 } |
| 4699 } | 4700 } |
| 4700 #endif | 4701 #endif |
| 4701 | 4702 |
| 4702 | 4703 |
| 4703 void Heap::ZapFromSpace() { | 4704 void Heap::ZapFromSpace() { |
| 4704 if (!new_space_.IsFromSpaceCommitted()) return; | 4705 if (!new_space_->IsFromSpaceCommitted()) return; |
| 4705 for (Page* page : NewSpacePageRange(new_space_.FromSpaceStart(), | 4706 for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), |
| 4706 new_space_.FromSpaceEnd())) { | 4707 new_space_->FromSpaceEnd())) { |
| 4707 for (Address cursor = page->area_start(), limit = page->area_end(); | 4708 for (Address cursor = page->area_start(), limit = page->area_end(); |
| 4708 cursor < limit; cursor += kPointerSize) { | 4709 cursor < limit; cursor += kPointerSize) { |
| 4709 Memory::Address_at(cursor) = kFromSpaceZapValue; | 4710 Memory::Address_at(cursor) = kFromSpaceZapValue; |
| 4710 } | 4711 } |
| 4711 } | 4712 } |
| 4712 } | 4713 } |
| 4713 | 4714 |
| 4714 void Heap::IteratePromotedObjectPointers(HeapObject* object, Address start, | 4715 void Heap::IteratePromotedObjectPointers(HeapObject* object, Address start, |
| 4715 Address end, bool record_slots, | 4716 Address end, bool record_slots, |
| 4716 ObjectSlotCallback callback) { | 4717 ObjectSlotCallback callback) { |
| (...skipping 364 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5081 memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_); | 5082 memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_); |
| 5082 } | 5083 } |
| 5083 | 5084 |
| 5084 | 5085 |
| 5085 bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); } | 5086 bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); } |
| 5086 | 5087 |
| 5087 | 5088 |
| 5088 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { | 5089 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) { |
| 5089 *stats->start_marker = HeapStats::kStartMarker; | 5090 *stats->start_marker = HeapStats::kStartMarker; |
| 5090 *stats->end_marker = HeapStats::kEndMarker; | 5091 *stats->end_marker = HeapStats::kEndMarker; |
| 5091 *stats->new_space_size = new_space_.SizeAsInt(); | 5092 *stats->new_space_size = new_space_->SizeAsInt(); |
| 5092 *stats->new_space_capacity = new_space_.Capacity(); | 5093 *stats->new_space_capacity = new_space_->Capacity(); |
| 5093 *stats->old_space_size = old_space_->SizeOfObjects(); | 5094 *stats->old_space_size = old_space_->SizeOfObjects(); |
| 5094 *stats->old_space_capacity = old_space_->Capacity(); | 5095 *stats->old_space_capacity = old_space_->Capacity(); |
| 5095 *stats->code_space_size = code_space_->SizeOfObjects(); | 5096 *stats->code_space_size = code_space_->SizeOfObjects(); |
| 5096 *stats->code_space_capacity = code_space_->Capacity(); | 5097 *stats->code_space_capacity = code_space_->Capacity(); |
| 5097 *stats->map_space_size = map_space_->SizeOfObjects(); | 5098 *stats->map_space_size = map_space_->SizeOfObjects(); |
| 5098 *stats->map_space_capacity = map_space_->Capacity(); | 5099 *stats->map_space_capacity = map_space_->Capacity(); |
| 5099 *stats->lo_space_size = lo_space_->Size(); | 5100 *stats->lo_space_size = lo_space_->Size(); |
| 5100 isolate_->global_handles()->RecordStats(stats); | 5101 isolate_->global_handles()->RecordStats(stats); |
| 5101 *stats->memory_allocator_size = memory_allocator()->Size(); | 5102 *stats->memory_allocator_size = memory_allocator()->Size(); |
| 5102 *stats->memory_allocator_capacity = | 5103 *stats->memory_allocator_capacity = |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5204 return factor; | 5205 return factor; |
| 5205 } | 5206 } |
| 5206 | 5207 |
| 5207 | 5208 |
| 5208 intptr_t Heap::CalculateOldGenerationAllocationLimit(double factor, | 5209 intptr_t Heap::CalculateOldGenerationAllocationLimit(double factor, |
| 5209 intptr_t old_gen_size) { | 5210 intptr_t old_gen_size) { |
| 5210 CHECK(factor > 1.0); | 5211 CHECK(factor > 1.0); |
| 5211 CHECK(old_gen_size > 0); | 5212 CHECK(old_gen_size > 0); |
| 5212 intptr_t limit = static_cast<intptr_t>(old_gen_size * factor); | 5213 intptr_t limit = static_cast<intptr_t>(old_gen_size * factor); |
| 5213 limit = Max(limit, old_gen_size + MinimumAllocationLimitGrowingStep()); | 5214 limit = Max(limit, old_gen_size + MinimumAllocationLimitGrowingStep()); |
| 5214 limit += new_space_.Capacity(); | 5215 limit += new_space_->Capacity(); |
| 5215 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2; | 5216 intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2; |
| 5216 return Min(limit, halfway_to_the_max); | 5217 return Min(limit, halfway_to_the_max); |
| 5217 } | 5218 } |
| 5218 | 5219 |
| 5219 intptr_t Heap::MinimumAllocationLimitGrowingStep() { | 5220 intptr_t Heap::MinimumAllocationLimitGrowingStep() { |
| 5220 const double kRegularAllocationLimitGrowingStep = 8; | 5221 const double kRegularAllocationLimitGrowingStep = 8; |
| 5221 const double kLowMemoryAllocationLimitGrowingStep = 2; | 5222 const double kLowMemoryAllocationLimitGrowingStep = 2; |
| 5222 intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB); | 5223 intptr_t limit = (Page::kPageSize > MB ? Page::kPageSize : MB); |
| 5223 return limit * (ShouldOptimizeForMemoryUsage() | 5224 return limit * (ShouldOptimizeForMemoryUsage() |
| 5224 ? kLowMemoryAllocationLimitGrowingStep | 5225 ? kLowMemoryAllocationLimitGrowingStep |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5345 if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(), | 5346 if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(), |
| 5346 code_range_size_)) | 5347 code_range_size_)) |
| 5347 return false; | 5348 return false; |
| 5348 | 5349 |
| 5349 // Initialize store buffer. | 5350 // Initialize store buffer. |
| 5350 store_buffer_ = new StoreBuffer(this); | 5351 store_buffer_ = new StoreBuffer(this); |
| 5351 | 5352 |
| 5352 // Initialize incremental marking. | 5353 // Initialize incremental marking. |
| 5353 incremental_marking_ = new IncrementalMarking(this); | 5354 incremental_marking_ = new IncrementalMarking(this); |
| 5354 | 5355 |
| 5356 new_space_ = new NewSpace(this); |
| 5357 if (new_space_ == nullptr) return false; |
| 5358 |
| 5355 // Set up new space. | 5359 // Set up new space. |
| 5356 if (!new_space_.SetUp(initial_semispace_size_, max_semi_space_size_)) { | 5360 if (!new_space_->SetUp(initial_semispace_size_, max_semi_space_size_)) { |
| 5357 return false; | 5361 return false; |
| 5358 } | 5362 } |
| 5359 new_space_top_after_last_gc_ = new_space()->top(); | 5363 new_space_top_after_last_gc_ = new_space()->top(); |
| 5360 | 5364 |
| 5361 // Initialize old space. | 5365 // Initialize old space. |
| 5362 old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE); | 5366 old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE); |
| 5363 if (old_space_ == NULL) return false; | 5367 if (old_space_ == NULL) return false; |
| 5364 if (!old_space_->SetUp()) return false; | 5368 if (!old_space_->SetUp()) return false; |
| 5365 | 5369 |
| 5366 // Initialize the code space, set its maximum capacity to the old | 5370 // Initialize the code space, set its maximum capacity to the old |
| (...skipping 164 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5531 PrintF("total_sweeping_time=%.1f ", | 5535 PrintF("total_sweeping_time=%.1f ", |
| 5532 tracer()->cumulative_sweeping_duration()); | 5536 tracer()->cumulative_sweeping_duration()); |
| 5533 PrintF("\n\n"); | 5537 PrintF("\n\n"); |
| 5534 } | 5538 } |
| 5535 | 5539 |
| 5536 if (FLAG_print_max_heap_committed) { | 5540 if (FLAG_print_max_heap_committed) { |
| 5537 PrintF("\n"); | 5541 PrintF("\n"); |
| 5538 PrintF("maximum_committed_by_heap=%" V8PRIdPTR " ", | 5542 PrintF("maximum_committed_by_heap=%" V8PRIdPTR " ", |
| 5539 MaximumCommittedMemory()); | 5543 MaximumCommittedMemory()); |
| 5540 PrintF("maximum_committed_by_new_space=%" V8PRIdPTR " ", | 5544 PrintF("maximum_committed_by_new_space=%" V8PRIdPTR " ", |
| 5541 new_space_.MaximumCommittedMemory()); | 5545 new_space_->MaximumCommittedMemory()); |
| 5542 PrintF("maximum_committed_by_old_space=%" V8PRIdPTR " ", | 5546 PrintF("maximum_committed_by_old_space=%" V8PRIdPTR " ", |
| 5543 old_space_->MaximumCommittedMemory()); | 5547 old_space_->MaximumCommittedMemory()); |
| 5544 PrintF("maximum_committed_by_code_space=%" V8PRIdPTR " ", | 5548 PrintF("maximum_committed_by_code_space=%" V8PRIdPTR " ", |
| 5545 code_space_->MaximumCommittedMemory()); | 5549 code_space_->MaximumCommittedMemory()); |
| 5546 PrintF("maximum_committed_by_map_space=%" V8PRIdPTR " ", | 5550 PrintF("maximum_committed_by_map_space=%" V8PRIdPTR " ", |
| 5547 map_space_->MaximumCommittedMemory()); | 5551 map_space_->MaximumCommittedMemory()); |
| 5548 PrintF("maximum_committed_by_lo_space=%" V8PRIdPTR " ", | 5552 PrintF("maximum_committed_by_lo_space=%" V8PRIdPTR " ", |
| 5549 lo_space_->MaximumCommittedMemory()); | 5553 lo_space_->MaximumCommittedMemory()); |
| 5550 PrintF("\n\n"); | 5554 PrintF("\n\n"); |
| 5551 } | 5555 } |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5592 delete scavenge_job_; | 5596 delete scavenge_job_; |
| 5593 scavenge_job_ = nullptr; | 5597 scavenge_job_ = nullptr; |
| 5594 | 5598 |
| 5595 isolate_->global_handles()->TearDown(); | 5599 isolate_->global_handles()->TearDown(); |
| 5596 | 5600 |
| 5597 external_string_table_.TearDown(); | 5601 external_string_table_.TearDown(); |
| 5598 | 5602 |
| 5599 delete tracer_; | 5603 delete tracer_; |
| 5600 tracer_ = nullptr; | 5604 tracer_ = nullptr; |
| 5601 | 5605 |
| 5602 new_space_.TearDown(); | 5606 new_space_->TearDown(); |
| 5607 delete new_space_; |
| 5608 new_space_ = nullptr; |
| 5603 | 5609 |
| 5604 if (old_space_ != NULL) { | 5610 if (old_space_ != NULL) { |
| 5605 delete old_space_; | 5611 delete old_space_; |
| 5606 old_space_ = NULL; | 5612 old_space_ = NULL; |
| 5607 } | 5613 } |
| 5608 | 5614 |
| 5609 if (code_space_ != NULL) { | 5615 if (code_space_ != NULL) { |
| 5610 delete code_space_; | 5616 delete code_space_; |
| 5611 code_space_ = NULL; | 5617 code_space_ = NULL; |
| 5612 } | 5618 } |
| (...skipping 851 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6464 } | 6470 } |
| 6465 | 6471 |
| 6466 | 6472 |
| 6467 // static | 6473 // static |
| 6468 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6474 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6469 return StaticVisitorBase::GetVisitorId(map); | 6475 return StaticVisitorBase::GetVisitorId(map); |
| 6470 } | 6476 } |
| 6471 | 6477 |
| 6472 } // namespace internal | 6478 } // namespace internal |
| 6473 } // namespace v8 | 6479 } // namespace v8 |
| OLD | NEW |