| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
| 6 | 6 |
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
| 8 #include "src/api.h" | 8 #include "src/api.h" |
| 9 #include "src/ast/scopeinfo.h" | 9 #include "src/ast/scopeinfo.h" |
| 10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 64 heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated); | 64 heap_.ScheduleIdleScavengeIfNeeded(bytes_allocated); |
| 65 } | 65 } |
| 66 | 66 |
| 67 private: | 67 private: |
| 68 Heap& heap_; | 68 Heap& heap_; |
| 69 }; | 69 }; |
| 70 | 70 |
| 71 Heap::Heap() | 71 Heap::Heap() |
| 72 : amount_of_external_allocated_memory_(0), | 72 : amount_of_external_allocated_memory_(0), |
| 73 amount_of_external_allocated_memory_at_last_global_gc_(0), | 73 amount_of_external_allocated_memory_at_last_global_gc_(0), |
| 74 isolate_(NULL), | 74 isolate_(nullptr), |
| 75 code_range_size_(0), | 75 code_range_size_(0), |
| 76 // semispace_size_ should be a power of 2 and old_generation_size_ should | 76 // semispace_size_ should be a power of 2 and old_generation_size_ should |
| 77 // be a multiple of Page::kPageSize. | 77 // be a multiple of Page::kPageSize. |
| 78 max_semi_space_size_(8 * (kPointerSize / 4) * MB), | 78 max_semi_space_size_(8 * (kPointerSize / 4) * MB), |
| 79 initial_semispace_size_(Page::kPageSize), | 79 initial_semispace_size_(Page::kPageSize), |
| 80 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), | 80 max_old_generation_size_(700ul * (kPointerSize / 4) * MB), |
| 81 initial_old_generation_size_(max_old_generation_size_ / | 81 initial_old_generation_size_(max_old_generation_size_ / |
| 82 kInitalOldGenerationLimitFactor), | 82 kInitalOldGenerationLimitFactor), |
| 83 old_generation_size_configured_(false), | 83 old_generation_size_configured_(false), |
| 84 max_executable_size_(256ul * (kPointerSize / 4) * MB), | 84 max_executable_size_(256ul * (kPointerSize / 4) * MB), |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 129 max_gc_pause_(0.0), | 129 max_gc_pause_(0.0), |
| 130 total_gc_time_ms_(0.0), | 130 total_gc_time_ms_(0.0), |
| 131 max_alive_after_gc_(0), | 131 max_alive_after_gc_(0), |
| 132 min_in_mutator_(kMaxInt), | 132 min_in_mutator_(kMaxInt), |
| 133 marking_time_(0.0), | 133 marking_time_(0.0), |
| 134 sweeping_time_(0.0), | 134 sweeping_time_(0.0), |
| 135 last_idle_notification_time_(0.0), | 135 last_idle_notification_time_(0.0), |
| 136 last_gc_time_(0.0), | 136 last_gc_time_(0.0), |
| 137 scavenge_collector_(nullptr), | 137 scavenge_collector_(nullptr), |
| 138 mark_compact_collector_(nullptr), | 138 mark_compact_collector_(nullptr), |
| 139 memory_allocator_(nullptr), |
| 139 store_buffer_(this), | 140 store_buffer_(this), |
| 140 incremental_marking_(nullptr), | 141 incremental_marking_(nullptr), |
| 141 gc_idle_time_handler_(nullptr), | 142 gc_idle_time_handler_(nullptr), |
| 142 memory_reducer_(nullptr), | 143 memory_reducer_(nullptr), |
| 143 object_stats_(nullptr), | 144 object_stats_(nullptr), |
| 144 scavenge_job_(nullptr), | 145 scavenge_job_(nullptr), |
| 145 idle_scavenge_observer_(nullptr), | 146 idle_scavenge_observer_(nullptr), |
| 146 full_codegen_bytes_generated_(0), | 147 full_codegen_bytes_generated_(0), |
| 147 crankshaft_codegen_bytes_generated_(0), | 148 crankshaft_codegen_bytes_generated_(0), |
| 148 new_space_allocation_counter_(0), | 149 new_space_allocation_counter_(0), |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 218 old_space_->CommittedPhysicalMemory() + | 219 old_space_->CommittedPhysicalMemory() + |
| 219 code_space_->CommittedPhysicalMemory() + | 220 code_space_->CommittedPhysicalMemory() + |
| 220 map_space_->CommittedPhysicalMemory() + | 221 map_space_->CommittedPhysicalMemory() + |
| 221 lo_space_->CommittedPhysicalMemory(); | 222 lo_space_->CommittedPhysicalMemory(); |
| 222 } | 223 } |
| 223 | 224 |
| 224 | 225 |
| 225 intptr_t Heap::CommittedMemoryExecutable() { | 226 intptr_t Heap::CommittedMemoryExecutable() { |
| 226 if (!HasBeenSetUp()) return 0; | 227 if (!HasBeenSetUp()) return 0; |
| 227 | 228 |
| 228 return isolate()->memory_allocator()->SizeExecutable(); | 229 return memory_allocator()->SizeExecutable(); |
| 229 } | 230 } |
| 230 | 231 |
| 231 | 232 |
| 232 void Heap::UpdateMaximumCommitted() { | 233 void Heap::UpdateMaximumCommitted() { |
| 233 if (!HasBeenSetUp()) return; | 234 if (!HasBeenSetUp()) return; |
| 234 | 235 |
| 235 intptr_t current_committed_memory = CommittedMemory(); | 236 intptr_t current_committed_memory = CommittedMemory(); |
| 236 if (current_committed_memory > maximum_committed_) { | 237 if (current_committed_memory > maximum_committed_) { |
| 237 maximum_committed_ = current_committed_memory; | 238 maximum_committed_ = current_committed_memory; |
| 238 } | 239 } |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 289 | 290 |
| 290 // Is there enough space left in OLD to guarantee that a scavenge can | 291 // Is there enough space left in OLD to guarantee that a scavenge can |
| 291 // succeed? | 292 // succeed? |
| 292 // | 293 // |
| 293 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available | 294 // Note that MemoryAllocator->MaxAvailable() undercounts the memory available |
| 294 // for object promotion. It counts only the bytes that the memory | 295 // for object promotion. It counts only the bytes that the memory |
| 295 // allocator has not yet allocated from the OS and assigned to any space, | 296 // allocator has not yet allocated from the OS and assigned to any space, |
| 296 // and does not count available bytes already in the old space or code | 297 // and does not count available bytes already in the old space or code |
| 297 // space. Undercounting is safe---we may get an unrequested full GC when | 298 // space. Undercounting is safe---we may get an unrequested full GC when |
| 298 // a scavenge would have succeeded. | 299 // a scavenge would have succeeded. |
| 299 if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) { | 300 if (memory_allocator()->MaxAvailable() <= new_space_.Size()) { |
| 300 isolate_->counters() | 301 isolate_->counters() |
| 301 ->gc_compactor_caused_by_oldspace_exhaustion() | 302 ->gc_compactor_caused_by_oldspace_exhaustion() |
| 302 ->Increment(); | 303 ->Increment(); |
| 303 *reason = "scavenge might not succeed"; | 304 *reason = "scavenge might not succeed"; |
| 304 return MARK_COMPACTOR; | 305 return MARK_COMPACTOR; |
| 305 } | 306 } |
| 306 | 307 |
| 307 // Default | 308 // Default |
| 308 *reason = NULL; | 309 *reason = NULL; |
| 309 return SCAVENGER; | 310 return SCAVENGER; |
| (...skipping 22 matching lines...) Expand all Loading... |
| 332 } | 333 } |
| 333 #endif // DEBUG | 334 #endif // DEBUG |
| 334 } | 335 } |
| 335 | 336 |
| 336 | 337 |
| 337 void Heap::PrintShortHeapStatistics() { | 338 void Heap::PrintShortHeapStatistics() { |
| 338 if (!FLAG_trace_gc_verbose) return; | 339 if (!FLAG_trace_gc_verbose) return; |
| 339 PrintIsolate(isolate_, "Memory allocator, used: %6" V8_PTR_PREFIX | 340 PrintIsolate(isolate_, "Memory allocator, used: %6" V8_PTR_PREFIX |
| 340 "d KB" | 341 "d KB" |
| 341 ", available: %6" V8_PTR_PREFIX "d KB\n", | 342 ", available: %6" V8_PTR_PREFIX "d KB\n", |
| 342 isolate_->memory_allocator()->Size() / KB, | 343 memory_allocator()->Size() / KB, |
| 343 isolate_->memory_allocator()->Available() / KB); | 344 memory_allocator()->Available() / KB); |
| 344 PrintIsolate(isolate_, "New space, used: %6" V8_PTR_PREFIX | 345 PrintIsolate(isolate_, "New space, used: %6" V8_PTR_PREFIX |
| 345 "d KB" | 346 "d KB" |
| 346 ", available: %6" V8_PTR_PREFIX | 347 ", available: %6" V8_PTR_PREFIX |
| 347 "d KB" | 348 "d KB" |
| 348 ", committed: %6" V8_PTR_PREFIX "d KB\n", | 349 ", committed: %6" V8_PTR_PREFIX "d KB\n", |
| 349 new_space_.Size() / KB, new_space_.Available() / KB, | 350 new_space_.Size() / KB, new_space_.Available() / KB, |
| 350 new_space_.CommittedMemory() / KB); | 351 new_space_.CommittedMemory() / KB); |
| 351 PrintIsolate(isolate_, "Old space, used: %6" V8_PTR_PREFIX | 352 PrintIsolate(isolate_, "Old space, used: %6" V8_PTR_PREFIX |
| 352 "d KB" | 353 "d KB" |
| 353 ", available: %6" V8_PTR_PREFIX | 354 ", available: %6" V8_PTR_PREFIX |
| (...skipping 3012 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3366 ClearRecordedSlots::kNo); | 3367 ClearRecordedSlots::kNo); |
| 3367 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); | 3368 allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); |
| 3368 if (!allocation.To(&result)) return allocation; | 3369 if (!allocation.To(&result)) return allocation; |
| 3369 OnAllocationEvent(result, object_size); | 3370 OnAllocationEvent(result, object_size); |
| 3370 } | 3371 } |
| 3371 } | 3372 } |
| 3372 | 3373 |
| 3373 result->set_map_no_write_barrier(code_map()); | 3374 result->set_map_no_write_barrier(code_map()); |
| 3374 Code* code = Code::cast(result); | 3375 Code* code = Code::cast(result); |
| 3375 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); | 3376 DCHECK(IsAligned(bit_cast<intptr_t>(code->address()), kCodeAlignment)); |
| 3376 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || | 3377 DCHECK(memory_allocator()->code_range() == NULL || |
| 3377 isolate_->code_range()->contains(code->address()) || | 3378 !memory_allocator()->code_range()->valid() || |
| 3379 memory_allocator()->code_range()->contains(code->address()) || |
| 3378 object_size <= code_space()->AreaSize()); | 3380 object_size <= code_space()->AreaSize()); |
| 3379 code->set_gc_metadata(Smi::FromInt(0)); | 3381 code->set_gc_metadata(Smi::FromInt(0)); |
| 3380 code->set_ic_age(global_ic_age_); | 3382 code->set_ic_age(global_ic_age_); |
| 3381 return code; | 3383 return code; |
| 3382 } | 3384 } |
| 3383 | 3385 |
| 3384 | 3386 |
| 3385 AllocationResult Heap::CopyCode(Code* code) { | 3387 AllocationResult Heap::CopyCode(Code* code) { |
| 3386 AllocationResult allocation; | 3388 AllocationResult allocation; |
| 3387 | 3389 |
| 3388 HeapObject* result = nullptr; | 3390 HeapObject* result = nullptr; |
| 3389 // Allocate an object the same size as the code object. | 3391 // Allocate an object the same size as the code object. |
| 3390 int obj_size = code->Size(); | 3392 int obj_size = code->Size(); |
| 3391 allocation = AllocateRaw(obj_size, CODE_SPACE); | 3393 allocation = AllocateRaw(obj_size, CODE_SPACE); |
| 3392 if (!allocation.To(&result)) return allocation; | 3394 if (!allocation.To(&result)) return allocation; |
| 3393 | 3395 |
| 3394 // Copy code object. | 3396 // Copy code object. |
| 3395 Address old_addr = code->address(); | 3397 Address old_addr = code->address(); |
| 3396 Address new_addr = result->address(); | 3398 Address new_addr = result->address(); |
| 3397 CopyBlock(new_addr, old_addr, obj_size); | 3399 CopyBlock(new_addr, old_addr, obj_size); |
| 3398 Code* new_code = Code::cast(result); | 3400 Code* new_code = Code::cast(result); |
| 3399 | 3401 |
| 3400 // Relocate the copy. | 3402 // Relocate the copy. |
| 3401 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); | 3403 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); |
| 3402 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || | 3404 DCHECK(memory_allocator()->code_range() == NULL || |
| 3403 isolate_->code_range()->contains(code->address()) || | 3405 !memory_allocator()->code_range()->valid() || |
| 3406 memory_allocator()->code_range()->contains(code->address()) || |
| 3404 obj_size <= code_space()->AreaSize()); | 3407 obj_size <= code_space()->AreaSize()); |
| 3405 new_code->Relocate(new_addr - old_addr); | 3408 new_code->Relocate(new_addr - old_addr); |
| 3406 // We have to iterate over the object and process its pointers when black | 3409 // We have to iterate over the object and process its pointers when black |
| 3407 // allocation is on. | 3410 // allocation is on. |
| 3408 incremental_marking()->IterateBlackObject(new_code); | 3411 incremental_marking()->IterateBlackObject(new_code); |
| 3409 return new_code; | 3412 return new_code; |
| 3410 } | 3413 } |
| 3411 | 3414 |
| 3412 AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) { | 3415 AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) { |
| 3413 int size = BytecodeArray::SizeFor(bytecode_array->length()); | 3416 int size = BytecodeArray::SizeFor(bytecode_array->length()); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3461 | 3464 |
| 3462 Code* new_code = Code::cast(result); | 3465 Code* new_code = Code::cast(result); |
| 3463 new_code->set_relocation_info(reloc_info_array); | 3466 new_code->set_relocation_info(reloc_info_array); |
| 3464 | 3467 |
| 3465 // Copy patched rinfo. | 3468 // Copy patched rinfo. |
| 3466 CopyBytes(new_code->relocation_start(), reloc_info.start(), | 3469 CopyBytes(new_code->relocation_start(), reloc_info.start(), |
| 3467 static_cast<size_t>(reloc_info.length())); | 3470 static_cast<size_t>(reloc_info.length())); |
| 3468 | 3471 |
| 3469 // Relocate the copy. | 3472 // Relocate the copy. |
| 3470 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); | 3473 DCHECK(IsAligned(bit_cast<intptr_t>(new_code->address()), kCodeAlignment)); |
| 3471 DCHECK(isolate_->code_range() == NULL || !isolate_->code_range()->valid() || | 3474 DCHECK(memory_allocator()->code_range() == NULL || |
| 3472 isolate_->code_range()->contains(code->address()) || | 3475 !memory_allocator()->code_range()->valid() || |
| 3476 memory_allocator()->code_range()->contains(code->address()) || |
| 3473 new_obj_size <= code_space()->AreaSize()); | 3477 new_obj_size <= code_space()->AreaSize()); |
| 3474 | 3478 |
| 3475 new_code->Relocate(new_addr - old_addr); | 3479 new_code->Relocate(new_addr - old_addr); |
| 3476 // We have to iterate over over the object and process its pointers when | 3480 // We have to iterate over over the object and process its pointers when |
| 3477 // black allocation is on. | 3481 // black allocation is on. |
| 3478 incremental_marking()->IterateBlackObject(new_code); | 3482 incremental_marking()->IterateBlackObject(new_code); |
| 3479 #ifdef VERIFY_HEAP | 3483 #ifdef VERIFY_HEAP |
| 3480 if (FLAG_verify_heap) code->ObjectVerify(); | 3484 if (FLAG_verify_heap) code->ObjectVerify(); |
| 3481 #endif | 3485 #endif |
| 3482 return new_code; | 3486 return new_code; |
| (...skipping 1024 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4507 gc_count_); | 4511 gc_count_); |
| 4508 PrintF("old_generation_allocation_limit_ %" V8_PTR_PREFIX "d\n", | 4512 PrintF("old_generation_allocation_limit_ %" V8_PTR_PREFIX "d\n", |
| 4509 old_generation_allocation_limit_); | 4513 old_generation_allocation_limit_); |
| 4510 | 4514 |
| 4511 PrintF("\n"); | 4515 PrintF("\n"); |
| 4512 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); | 4516 PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles(isolate_)); |
| 4513 isolate_->global_handles()->PrintStats(); | 4517 isolate_->global_handles()->PrintStats(); |
| 4514 PrintF("\n"); | 4518 PrintF("\n"); |
| 4515 | 4519 |
| 4516 PrintF("Heap statistics : "); | 4520 PrintF("Heap statistics : "); |
| 4517 isolate_->memory_allocator()->ReportStatistics(); | 4521 memory_allocator()->ReportStatistics(); |
| 4518 PrintF("To space : "); | 4522 PrintF("To space : "); |
| 4519 new_space_.ReportStatistics(); | 4523 new_space_.ReportStatistics(); |
| 4520 PrintF("Old space : "); | 4524 PrintF("Old space : "); |
| 4521 old_space_->ReportStatistics(); | 4525 old_space_->ReportStatistics(); |
| 4522 PrintF("Code space : "); | 4526 PrintF("Code space : "); |
| 4523 code_space_->ReportStatistics(); | 4527 code_space_->ReportStatistics(); |
| 4524 PrintF("Map space : "); | 4528 PrintF("Map space : "); |
| 4525 map_space_->ReportStatistics(); | 4529 map_space_->ReportStatistics(); |
| 4526 PrintF("Large object space : "); | 4530 PrintF("Large object space : "); |
| 4527 lo_space_->ReportStatistics(); | 4531 lo_space_->ReportStatistics(); |
| 4528 PrintF(">>>>>> ========================================= >>>>>>\n"); | 4532 PrintF(">>>>>> ========================================= >>>>>>\n"); |
| 4529 } | 4533 } |
| 4530 | 4534 |
| 4531 #endif // DEBUG | 4535 #endif // DEBUG |
| 4532 | 4536 |
| 4533 bool Heap::Contains(HeapObject* value) { | 4537 bool Heap::Contains(HeapObject* value) { |
| 4534 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) { | 4538 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { |
| 4535 return false; | 4539 return false; |
| 4536 } | 4540 } |
| 4537 return HasBeenSetUp() && | 4541 return HasBeenSetUp() && |
| 4538 (new_space_.ToSpaceContains(value) || old_space_->Contains(value) || | 4542 (new_space_.ToSpaceContains(value) || old_space_->Contains(value) || |
| 4539 code_space_->Contains(value) || map_space_->Contains(value) || | 4543 code_space_->Contains(value) || map_space_->Contains(value) || |
| 4540 lo_space_->Contains(value)); | 4544 lo_space_->Contains(value)); |
| 4541 } | 4545 } |
| 4542 | 4546 |
| 4543 bool Heap::ContainsSlow(Address addr) { | 4547 bool Heap::ContainsSlow(Address addr) { |
| 4544 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) { | 4548 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { |
| 4545 return false; | 4549 return false; |
| 4546 } | 4550 } |
| 4547 return HasBeenSetUp() && | 4551 return HasBeenSetUp() && |
| 4548 (new_space_.ToSpaceContainsSlow(addr) || | 4552 (new_space_.ToSpaceContainsSlow(addr) || |
| 4549 old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) || | 4553 old_space_->ContainsSlow(addr) || code_space_->ContainsSlow(addr) || |
| 4550 map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr)); | 4554 map_space_->ContainsSlow(addr) || lo_space_->ContainsSlow(addr)); |
| 4551 } | 4555 } |
| 4552 | 4556 |
| 4553 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { | 4557 bool Heap::InSpace(HeapObject* value, AllocationSpace space) { |
| 4554 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(value->address())) { | 4558 if (memory_allocator()->IsOutsideAllocatedSpace(value->address())) { |
| 4555 return false; | 4559 return false; |
| 4556 } | 4560 } |
| 4557 if (!HasBeenSetUp()) return false; | 4561 if (!HasBeenSetUp()) return false; |
| 4558 | 4562 |
| 4559 switch (space) { | 4563 switch (space) { |
| 4560 case NEW_SPACE: | 4564 case NEW_SPACE: |
| 4561 return new_space_.ToSpaceContains(value); | 4565 return new_space_.ToSpaceContains(value); |
| 4562 case OLD_SPACE: | 4566 case OLD_SPACE: |
| 4563 return old_space_->Contains(value); | 4567 return old_space_->Contains(value); |
| 4564 case CODE_SPACE: | 4568 case CODE_SPACE: |
| 4565 return code_space_->Contains(value); | 4569 return code_space_->Contains(value); |
| 4566 case MAP_SPACE: | 4570 case MAP_SPACE: |
| 4567 return map_space_->Contains(value); | 4571 return map_space_->Contains(value); |
| 4568 case LO_SPACE: | 4572 case LO_SPACE: |
| 4569 return lo_space_->Contains(value); | 4573 return lo_space_->Contains(value); |
| 4570 } | 4574 } |
| 4571 UNREACHABLE(); | 4575 UNREACHABLE(); |
| 4572 return false; | 4576 return false; |
| 4573 } | 4577 } |
| 4574 | 4578 |
| 4575 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { | 4579 bool Heap::InSpaceSlow(Address addr, AllocationSpace space) { |
| 4576 if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) { | 4580 if (memory_allocator()->IsOutsideAllocatedSpace(addr)) { |
| 4577 return false; | 4581 return false; |
| 4578 } | 4582 } |
| 4579 if (!HasBeenSetUp()) return false; | 4583 if (!HasBeenSetUp()) return false; |
| 4580 | 4584 |
| 4581 switch (space) { | 4585 switch (space) { |
| 4582 case NEW_SPACE: | 4586 case NEW_SPACE: |
| 4583 return new_space_.ToSpaceContainsSlow(addr); | 4587 return new_space_.ToSpaceContainsSlow(addr); |
| 4584 case OLD_SPACE: | 4588 case OLD_SPACE: |
| 4585 return old_space_->ContainsSlow(addr); | 4589 return old_space_->ContainsSlow(addr); |
| 4586 case CODE_SPACE: | 4590 case CODE_SPACE: |
| (...skipping 420 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5007 *stats->new_space_size = new_space_.SizeAsInt(); | 5011 *stats->new_space_size = new_space_.SizeAsInt(); |
| 5008 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); | 5012 *stats->new_space_capacity = static_cast<int>(new_space_.Capacity()); |
| 5009 *stats->old_space_size = old_space_->SizeOfObjects(); | 5013 *stats->old_space_size = old_space_->SizeOfObjects(); |
| 5010 *stats->old_space_capacity = old_space_->Capacity(); | 5014 *stats->old_space_capacity = old_space_->Capacity(); |
| 5011 *stats->code_space_size = code_space_->SizeOfObjects(); | 5015 *stats->code_space_size = code_space_->SizeOfObjects(); |
| 5012 *stats->code_space_capacity = code_space_->Capacity(); | 5016 *stats->code_space_capacity = code_space_->Capacity(); |
| 5013 *stats->map_space_size = map_space_->SizeOfObjects(); | 5017 *stats->map_space_size = map_space_->SizeOfObjects(); |
| 5014 *stats->map_space_capacity = map_space_->Capacity(); | 5018 *stats->map_space_capacity = map_space_->Capacity(); |
| 5015 *stats->lo_space_size = lo_space_->Size(); | 5019 *stats->lo_space_size = lo_space_->Size(); |
| 5016 isolate_->global_handles()->RecordStats(stats); | 5020 isolate_->global_handles()->RecordStats(stats); |
| 5017 *stats->memory_allocator_size = isolate()->memory_allocator()->Size(); | 5021 *stats->memory_allocator_size = memory_allocator()->Size(); |
| 5018 *stats->memory_allocator_capacity = | 5022 *stats->memory_allocator_capacity = |
| 5019 isolate()->memory_allocator()->Size() + | 5023 memory_allocator()->Size() + memory_allocator()->Available(); |
| 5020 isolate()->memory_allocator()->Available(); | |
| 5021 *stats->os_error = base::OS::GetLastError(); | 5024 *stats->os_error = base::OS::GetLastError(); |
| 5022 isolate()->memory_allocator()->Available(); | 5025 memory_allocator()->Available(); |
| 5023 if (take_snapshot) { | 5026 if (take_snapshot) { |
| 5024 HeapIterator iterator(this); | 5027 HeapIterator iterator(this); |
| 5025 for (HeapObject* obj = iterator.next(); obj != NULL; | 5028 for (HeapObject* obj = iterator.next(); obj != NULL; |
| 5026 obj = iterator.next()) { | 5029 obj = iterator.next()) { |
| 5027 InstanceType type = obj->map()->instance_type(); | 5030 InstanceType type = obj->map()->instance_type(); |
| 5028 DCHECK(0 <= type && type <= LAST_TYPE); | 5031 DCHECK(0 <= type && type <= LAST_TYPE); |
| 5029 stats->objects_per_type[type]++; | 5032 stats->objects_per_type[type]++; |
| 5030 stats->size_per_type[type] += obj->Size(); | 5033 stats->size_per_type[type] += obj->Size(); |
| 5031 } | 5034 } |
| 5032 } | 5035 } |
| (...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5247 // Configuration is based on the flags new-space-size (really the semispace | 5250 // Configuration is based on the flags new-space-size (really the semispace |
| 5248 // size) and old-space-size if set or the initial values of semispace_size_ | 5251 // size) and old-space-size if set or the initial values of semispace_size_ |
| 5249 // and old_generation_size_ otherwise. | 5252 // and old_generation_size_ otherwise. |
| 5250 if (!configured_) { | 5253 if (!configured_) { |
| 5251 if (!ConfigureHeapDefault()) return false; | 5254 if (!ConfigureHeapDefault()) return false; |
| 5252 } | 5255 } |
| 5253 | 5256 |
| 5254 base::CallOnce(&initialize_gc_once, &InitializeGCOnce); | 5257 base::CallOnce(&initialize_gc_once, &InitializeGCOnce); |
| 5255 | 5258 |
| 5256 // Set up memory allocator. | 5259 // Set up memory allocator. |
| 5257 if (!isolate_->memory_allocator()->SetUp(MaxReserved(), MaxExecutableSize())) | 5260 memory_allocator_ = new MemoryAllocator(isolate_); |
| 5261 if (!memory_allocator_->SetUp(MaxReserved(), MaxExecutableSize(), |
| 5262 code_range_size_)) |
| 5258 return false; | 5263 return false; |
| 5259 | 5264 |
| 5260 // Initialize incremental marking. | 5265 // Initialize incremental marking. |
| 5261 incremental_marking_ = new IncrementalMarking(this); | 5266 incremental_marking_ = new IncrementalMarking(this); |
| 5262 | 5267 |
| 5263 // Set up new space. | 5268 // Set up new space. |
| 5264 if (!new_space_.SetUp(initial_semispace_size_, max_semi_space_size_)) { | 5269 if (!new_space_.SetUp(initial_semispace_size_, max_semi_space_size_)) { |
| 5265 return false; | 5270 return false; |
| 5266 } | 5271 } |
| 5267 new_space_top_after_last_gc_ = new_space()->top(); | 5272 new_space_top_after_last_gc_ = new_space()->top(); |
| 5268 | 5273 |
| 5269 // Initialize old space. | 5274 // Initialize old space. |
| 5270 old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE); | 5275 old_space_ = new OldSpace(this, OLD_SPACE, NOT_EXECUTABLE); |
| 5271 if (old_space_ == NULL) return false; | 5276 if (old_space_ == NULL) return false; |
| 5272 if (!old_space_->SetUp()) return false; | 5277 if (!old_space_->SetUp()) return false; |
| 5273 | 5278 |
| 5274 if (!isolate_->code_range()->SetUp(code_range_size_)) return false; | |
| 5275 | |
| 5276 // Initialize the code space, set its maximum capacity to the old | 5279 // Initialize the code space, set its maximum capacity to the old |
| 5277 // generation size. It needs executable memory. | 5280 // generation size. It needs executable memory. |
| 5278 code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE); | 5281 code_space_ = new OldSpace(this, CODE_SPACE, EXECUTABLE); |
| 5279 if (code_space_ == NULL) return false; | 5282 if (code_space_ == NULL) return false; |
| 5280 if (!code_space_->SetUp()) return false; | 5283 if (!code_space_->SetUp()) return false; |
| 5281 | 5284 |
| 5282 // Initialize map space. | 5285 // Initialize map space. |
| 5283 map_space_ = new MapSpace(this, MAP_SPACE); | 5286 map_space_ = new MapSpace(this, MAP_SPACE); |
| 5284 if (map_space_ == NULL) return false; | 5287 if (map_space_ == NULL) return false; |
| 5285 if (!map_space_->SetUp()) return false; | 5288 if (!map_space_->SetUp()) return false; |
| (...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5505 } | 5508 } |
| 5506 | 5509 |
| 5507 if (lo_space_ != NULL) { | 5510 if (lo_space_ != NULL) { |
| 5508 lo_space_->TearDown(); | 5511 lo_space_->TearDown(); |
| 5509 delete lo_space_; | 5512 delete lo_space_; |
| 5510 lo_space_ = NULL; | 5513 lo_space_ = NULL; |
| 5511 } | 5514 } |
| 5512 | 5515 |
| 5513 store_buffer()->TearDown(); | 5516 store_buffer()->TearDown(); |
| 5514 | 5517 |
| 5515 isolate_->memory_allocator()->TearDown(); | 5518 memory_allocator()->TearDown(); |
| 5516 | 5519 |
| 5517 StrongRootsList* next = NULL; | 5520 StrongRootsList* next = NULL; |
| 5518 for (StrongRootsList* list = strong_roots_list_; list; list = next) { | 5521 for (StrongRootsList* list = strong_roots_list_; list; list = next) { |
| 5519 next = list->next; | 5522 next = list->next; |
| 5520 delete list; | 5523 delete list; |
| 5521 } | 5524 } |
| 5522 strong_roots_list_ = NULL; | 5525 strong_roots_list_ = NULL; |
| 5526 |
| 5527 delete memory_allocator_; |
| 5528 memory_allocator_ = nullptr; |
| 5523 } | 5529 } |
| 5524 | 5530 |
| 5525 | 5531 |
| 5526 void Heap::AddGCPrologueCallback(v8::Isolate::GCCallback callback, | 5532 void Heap::AddGCPrologueCallback(v8::Isolate::GCCallback callback, |
| 5527 GCType gc_type, bool pass_isolate) { | 5533 GCType gc_type, bool pass_isolate) { |
| 5528 DCHECK(callback != NULL); | 5534 DCHECK(callback != NULL); |
| 5529 GCCallbackPair pair(callback, gc_type, pass_isolate); | 5535 GCCallbackPair pair(callback, gc_type, pass_isolate); |
| 5530 DCHECK(!gc_prologue_callbacks_.Contains(pair)); | 5536 DCHECK(!gc_prologue_callbacks_.Contains(pair)); |
| 5531 return gc_prologue_callbacks_.Add(pair); | 5537 return gc_prologue_callbacks_.Add(pair); |
| 5532 } | 5538 } |
| (...skipping 779 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6312 while (concurrent_unmapping_tasks_active_ > 0) { | 6318 while (concurrent_unmapping_tasks_active_ > 0) { |
| 6313 pending_unmapping_tasks_semaphore_.Wait(); | 6319 pending_unmapping_tasks_semaphore_.Wait(); |
| 6314 concurrent_unmapping_tasks_active_--; | 6320 concurrent_unmapping_tasks_active_--; |
| 6315 } | 6321 } |
| 6316 } | 6322 } |
| 6317 | 6323 |
| 6318 | 6324 |
| 6319 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 6325 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { |
| 6320 // PreFree logically frees the memory chunk. However, the actual freeing | 6326 // PreFree logically frees the memory chunk. However, the actual freeing |
| 6321 // will happen on a separate thread sometime later. | 6327 // will happen on a separate thread sometime later. |
| 6322 isolate_->memory_allocator()->PreFreeMemory(chunk); | 6328 memory_allocator()->PreFreeMemory(chunk); |
| 6323 | 6329 |
| 6324 // The chunks added to this queue will be freed by a concurrent thread. | 6330 // The chunks added to this queue will be freed by a concurrent thread. |
| 6325 chunk->set_next_chunk(chunks_queued_for_free_); | 6331 chunk->set_next_chunk(chunks_queued_for_free_); |
| 6326 chunks_queued_for_free_ = chunk; | 6332 chunks_queued_for_free_ = chunk; |
| 6327 } | 6333 } |
| 6328 | 6334 |
| 6329 | 6335 |
| 6330 void Heap::FreeQueuedChunks() { | 6336 void Heap::FreeQueuedChunks() { |
| 6331 if (chunks_queued_for_free_ != NULL) { | 6337 if (chunks_queued_for_free_ != NULL) { |
| 6332 if (FLAG_concurrent_sweeping) { | 6338 if (FLAG_concurrent_sweeping) { |
| (...skipping 12 matching lines...) Expand all Loading... |
| 6345 } | 6351 } |
| 6346 concurrent_unmapping_tasks_active_++; | 6352 concurrent_unmapping_tasks_active_++; |
| 6347 } | 6353 } |
| 6348 | 6354 |
| 6349 | 6355 |
| 6350 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { | 6356 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { |
| 6351 MemoryChunk* next; | 6357 MemoryChunk* next; |
| 6352 MemoryChunk* chunk; | 6358 MemoryChunk* chunk; |
| 6353 for (chunk = list_head; chunk != NULL; chunk = next) { | 6359 for (chunk = list_head; chunk != NULL; chunk = next) { |
| 6354 next = chunk->next_chunk(); | 6360 next = chunk->next_chunk(); |
| 6355 isolate_->memory_allocator()->PerformFreeMemory(chunk); | 6361 memory_allocator()->PerformFreeMemory(chunk); |
| 6356 } | 6362 } |
| 6357 } | 6363 } |
| 6358 | 6364 |
| 6359 | 6365 |
| 6360 void Heap::RememberUnmappedPage(Address page, bool compacted) { | 6366 void Heap::RememberUnmappedPage(Address page, bool compacted) { |
| 6361 uintptr_t p = reinterpret_cast<uintptr_t>(page); | 6367 uintptr_t p = reinterpret_cast<uintptr_t>(page); |
| 6362 // Tag the page pointer to make it findable in the dump file. | 6368 // Tag the page pointer to make it findable in the dump file. |
| 6363 if (compacted) { | 6369 if (compacted) { |
| 6364 p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared. | 6370 p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared. |
| 6365 } else { | 6371 } else { |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6457 } | 6463 } |
| 6458 | 6464 |
| 6459 | 6465 |
| 6460 // static | 6466 // static |
| 6461 int Heap::GetStaticVisitorIdForMap(Map* map) { | 6467 int Heap::GetStaticVisitorIdForMap(Map* map) { |
| 6462 return StaticVisitorBase::GetVisitorId(map); | 6468 return StaticVisitorBase::GetVisitorId(map); |
| 6463 } | 6469 } |
| 6464 | 6470 |
| 6465 } // namespace internal | 6471 } // namespace internal |
| 6466 } // namespace v8 | 6472 } // namespace v8 |
| OLD | NEW |