OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
128 old_generation_size_at_last_gc_(0), | 128 old_generation_size_at_last_gc_(0), |
129 gcs_since_last_deopt_(0), | 129 gcs_since_last_deopt_(0), |
130 allocation_sites_scratchpad_length_(0), | 130 allocation_sites_scratchpad_length_(0), |
131 ring_buffer_full_(false), | 131 ring_buffer_full_(false), |
132 ring_buffer_end_(0), | 132 ring_buffer_end_(0), |
133 promotion_queue_(this), | 133 promotion_queue_(this), |
134 configured_(false), | 134 configured_(false), |
135 current_gc_flags_(Heap::kNoGCFlags), | 135 current_gc_flags_(Heap::kNoGCFlags), |
136 external_string_table_(this), | 136 external_string_table_(this), |
137 chunks_queued_for_free_(NULL), | 137 chunks_queued_for_free_(NULL), |
| 138 pending_unmap_job_semaphore_(0), |
138 gc_callbacks_depth_(0), | 139 gc_callbacks_depth_(0), |
139 deserialization_complete_(false), | 140 deserialization_complete_(false), |
140 concurrent_sweeping_enabled_(false), | 141 concurrent_sweeping_enabled_(false), |
141 strong_roots_list_(NULL) { | 142 strong_roots_list_(NULL) { |
142 // Allow build-time customization of the max semispace size. Building | 143 // Allow build-time customization of the max semispace size. Building |
143 // V8 with snapshots and a non-default max semispace size is much | 144 // V8 with snapshots and a non-default max semispace size is much |
144 // easier if you can define it as part of the build environment. | 145 // easier if you can define it as part of the build environment. |
145 #if defined(V8_MAX_SEMISPACE_SIZE) | 146 #if defined(V8_MAX_SEMISPACE_SIZE) |
146 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 147 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
147 #endif | 148 #endif |
(...skipping 6346 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6494 heap_->FinalizeExternalString(ExternalString::cast(new_space_strings_[i])); | 6495 heap_->FinalizeExternalString(ExternalString::cast(new_space_strings_[i])); |
6495 } | 6496 } |
6496 new_space_strings_.Free(); | 6497 new_space_strings_.Free(); |
6497 for (int i = 0; i < old_space_strings_.length(); ++i) { | 6498 for (int i = 0; i < old_space_strings_.length(); ++i) { |
6498 heap_->FinalizeExternalString(ExternalString::cast(old_space_strings_[i])); | 6499 heap_->FinalizeExternalString(ExternalString::cast(old_space_strings_[i])); |
6499 } | 6500 } |
6500 old_space_strings_.Free(); | 6501 old_space_strings_.Free(); |
6501 } | 6502 } |
6502 | 6503 |
6503 | 6504 |
| 6505 class Heap::UnmapFreeMemoryTask : public v8::Task { |
| 6506 public: |
| 6507 UnmapFreeMemoryTask(Heap* heap, MemoryChunk* head) |
| 6508 : heap_(heap), head_(head) {} |
| 6509 virtual ~UnmapFreeMemoryTask() {} |
| 6510 |
| 6511 private: |
| 6512 // v8::Task overrides. |
| 6513 void Run() override { |
| 6514 heap_->FreeQueuedChunks(head_); |
| 6515 heap_->pending_unmap_job_semaphore_.Signal(); |
| 6516 } |
| 6517 |
| 6518 Heap* heap_; |
| 6519 MemoryChunk* head_; |
| 6520 |
| 6521 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); |
| 6522 }; |
| 6523 |
| 6524 |
| 6525 void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { |
| 6526 // We start an unmap job after sweeping and after compaction. |
| 6527 pending_unmap_job_semaphore_.Wait(); |
| 6528 pending_unmap_job_semaphore_.Wait(); |
| 6529 } |
| 6530 |
| 6531 |
6504 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 6532 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { |
| 6533 // PreFree logically frees the memory chunk. However, the actual freeing |
| 6534 // will happen on a separate thread sometime later. |
| 6535 isolate_->memory_allocator()->PreFreeMemory(chunk); |
| 6536 |
| 6537 // The chunks added to this queue will be freed by a concurrent thread. |
6505 chunk->set_next_chunk(chunks_queued_for_free_); | 6538 chunk->set_next_chunk(chunks_queued_for_free_); |
6506 chunks_queued_for_free_ = chunk; | 6539 chunks_queued_for_free_ = chunk; |
6507 } | 6540 } |
6508 | 6541 |
6509 | 6542 |
6510 void Heap::FilterStoreBufferEntriesOnAboutToBeFreedPages() { | 6543 void Heap::FilterStoreBufferEntriesOnAboutToBeFreedPages() { |
6511 if (chunks_queued_for_free_ == NULL) return; | 6544 if (chunks_queued_for_free_ == NULL) return; |
6512 MemoryChunk* next; | 6545 MemoryChunk* next; |
6513 MemoryChunk* chunk; | 6546 MemoryChunk* chunk; |
6514 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6547 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { |
6515 next = chunk->next_chunk(); | 6548 next = chunk->next_chunk(); |
6516 chunk->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED); | 6549 chunk->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED); |
6517 } | 6550 } |
6518 isolate_->heap()->store_buffer()->Compact(); | 6551 store_buffer()->Compact(); |
6519 isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); | 6552 store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED); |
6520 } | 6553 } |
6521 | 6554 |
6522 | 6555 |
6523 void Heap::FreeQueuedChunks() { | 6556 void Heap::FreeQueuedChunks() { |
6524 MemoryChunk* next; | 6557 if (chunks_queued_for_free_ != NULL) { |
6525 MemoryChunk* chunk; | 6558 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
6526 for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) { | 6559 new UnmapFreeMemoryTask(this, chunks_queued_for_free_), |
6527 next = chunk->next_chunk(); | 6560 v8::Platform::kShortRunningTask); |
6528 isolate_->memory_allocator()->Free(chunk); | 6561 chunks_queued_for_free_ = NULL; |
| 6562 } else { |
| 6563 // If we do not have anything to unmap, we just signal the semaphore |
| 6564 // that we are done. |
| 6565 pending_unmap_job_semaphore_.Signal(); |
6529 } | 6566 } |
6530 chunks_queued_for_free_ = NULL; | |
6531 } | 6567 } |
6532 | 6568 |
6533 | 6569 |
| 6570 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { |
| 6571 MemoryChunk* next; |
| 6572 MemoryChunk* chunk; |
| 6573 for (chunk = list_head; chunk != NULL; chunk = next) { |
| 6574 next = chunk->next_chunk(); |
| 6575 isolate_->memory_allocator()->PerformFreeMemory(chunk); |
| 6576 } |
| 6577 } |
| 6578 |
| 6579 |
6534 void Heap::RememberUnmappedPage(Address page, bool compacted) { | 6580 void Heap::RememberUnmappedPage(Address page, bool compacted) { |
6535 uintptr_t p = reinterpret_cast<uintptr_t>(page); | 6581 uintptr_t p = reinterpret_cast<uintptr_t>(page); |
6536 // Tag the page pointer to make it findable in the dump file. | 6582 // Tag the page pointer to make it findable in the dump file. |
6537 if (compacted) { | 6583 if (compacted) { |
6538 p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared. | 6584 p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared. |
6539 } else { | 6585 } else { |
6540 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. | 6586 p ^= 0x1d1ed & (Page::kPageSize - 1); // I died. |
6541 } | 6587 } |
6542 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = | 6588 remembered_unmapped_pages_[remembered_unmapped_pages_index_] = |
6543 reinterpret_cast<Address>(p); | 6589 reinterpret_cast<Address>(p); |
(...skipping 180 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6724 *object_type = "CODE_TYPE"; \ | 6770 *object_type = "CODE_TYPE"; \ |
6725 *object_sub_type = "CODE_AGE/" #name; \ | 6771 *object_sub_type = "CODE_AGE/" #name; \ |
6726 return true; | 6772 return true; |
6727 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6773 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) |
6728 #undef COMPARE_AND_RETURN_NAME | 6774 #undef COMPARE_AND_RETURN_NAME |
6729 } | 6775 } |
6730 return false; | 6776 return false; |
6731 } | 6777 } |
6732 } // namespace internal | 6778 } // namespace internal |
6733 } // namespace v8 | 6779 } // namespace v8 |
OLD | NEW |