OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" |
6 | 6 |
7 #include "src/accessors.h" | 7 #include "src/accessors.h" |
8 #include "src/api.h" | 8 #include "src/api.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/base/once.h" | 10 #include "src/base/once.h" |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
130 gcs_since_last_deopt_(0), | 130 gcs_since_last_deopt_(0), |
131 allocation_sites_scratchpad_length_(0), | 131 allocation_sites_scratchpad_length_(0), |
132 ring_buffer_full_(false), | 132 ring_buffer_full_(false), |
133 ring_buffer_end_(0), | 133 ring_buffer_end_(0), |
134 promotion_queue_(this), | 134 promotion_queue_(this), |
135 configured_(false), | 135 configured_(false), |
136 current_gc_flags_(Heap::kNoGCFlags), | 136 current_gc_flags_(Heap::kNoGCFlags), |
137 current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), | 137 current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), |
138 external_string_table_(this), | 138 external_string_table_(this), |
139 chunks_queued_for_free_(NULL), | 139 chunks_queued_for_free_(NULL), |
140 pending_unmap_job_semaphore_(0), | 140 concurrent_unmaping_tasks_active_(0), |
141 pending_unmap_tasks_semaphore_(0), | |
141 gc_callbacks_depth_(0), | 142 gc_callbacks_depth_(0), |
142 deserialization_complete_(false), | 143 deserialization_complete_(false), |
143 concurrent_sweeping_enabled_(false), | 144 concurrent_sweeping_enabled_(false), |
144 strong_roots_list_(NULL) { | 145 strong_roots_list_(NULL) { |
145 // Allow build-time customization of the max semispace size. Building | 146 // Allow build-time customization of the max semispace size. Building |
146 // V8 with snapshots and a non-default max semispace size is much | 147 // V8 with snapshots and a non-default max semispace size is much |
147 // easier if you can define it as part of the build environment. | 148 // easier if you can define it as part of the build environment. |
148 #if defined(V8_MAX_SEMISPACE_SIZE) | 149 #if defined(V8_MAX_SEMISPACE_SIZE) |
149 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 150 max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; |
150 #endif | 151 #endif |
(...skipping 6366 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6517 class Heap::UnmapFreeMemoryTask : public v8::Task { | 6518 class Heap::UnmapFreeMemoryTask : public v8::Task { |
6518 public: | 6519 public: |
6519 UnmapFreeMemoryTask(Heap* heap, MemoryChunk* head) | 6520 UnmapFreeMemoryTask(Heap* heap, MemoryChunk* head) |
6520 : heap_(heap), head_(head) {} | 6521 : heap_(heap), head_(head) {} |
6521 virtual ~UnmapFreeMemoryTask() {} | 6522 virtual ~UnmapFreeMemoryTask() {} |
6522 | 6523 |
6523 private: | 6524 private: |
6524 // v8::Task overrides. | 6525 // v8::Task overrides. |
6525 void Run() override { | 6526 void Run() override { |
6526 heap_->FreeQueuedChunks(head_); | 6527 heap_->FreeQueuedChunks(head_); |
6527 heap_->pending_unmap_job_semaphore_.Signal(); | 6528 heap_->pending_unmap_tasks_semaphore_.Signal(); |
6528 } | 6529 } |
6529 | 6530 |
6530 Heap* heap_; | 6531 Heap* heap_; |
6531 MemoryChunk* head_; | 6532 MemoryChunk* head_; |
6532 | 6533 |
6533 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); | 6534 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); |
6534 }; | 6535 }; |
6535 | 6536 |
6536 | 6537 |
6537 void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { | 6538 void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { |
6538 // We start an unmap job after sweeping and after compaction. | 6539 // We start an unmap job after sweeping and after compaction. |
6539 pending_unmap_job_semaphore_.Wait(); | 6540 if (concurrent_unmaping_tasks_active_ > 0) { |
Michael Lippautz
2015/08/27 11:37:41
We could make this more general, so that we can st
Hannes Payer (out of office)
2015/08/27 12:34:37
The intention was to assert if we are waiting for
| |
6540 pending_unmap_job_semaphore_.Wait(); | 6541 // There should be two concurrent unmapping tasks running. |
6542 DCHECK(concurrent_unmaping_tasks_active_ == 2); | |
6543 pending_unmap_tasks_semaphore_.Wait(); | |
6544 pending_unmap_tasks_semaphore_.Wait(); | |
6545 concurrent_unmaping_tasks_active_ = 0; | |
6546 } | |
6541 } | 6547 } |
6542 | 6548 |
6543 | 6549 |
6544 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 6550 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { |
6545 // PreFree logically frees the memory chunk. However, the actual freeing | 6551 // PreFree logically frees the memory chunk. However, the actual freeing |
6546 // will happen on a separate thread sometime later. | 6552 // will happen on a separate thread sometime later. |
6547 isolate_->memory_allocator()->PreFreeMemory(chunk); | 6553 isolate_->memory_allocator()->PreFreeMemory(chunk); |
6548 | 6554 |
6549 // The chunks added to this queue will be freed by a concurrent thread. | 6555 // The chunks added to this queue will be freed by a concurrent thread. |
6550 chunk->set_next_chunk(chunks_queued_for_free_); | 6556 chunk->set_next_chunk(chunks_queued_for_free_); |
(...skipping 16 matching lines...) Expand all Loading... | |
6567 | 6573 |
6568 void Heap::FreeQueuedChunks() { | 6574 void Heap::FreeQueuedChunks() { |
6569 if (chunks_queued_for_free_ != NULL) { | 6575 if (chunks_queued_for_free_ != NULL) { |
6570 V8::GetCurrentPlatform()->CallOnBackgroundThread( | 6576 V8::GetCurrentPlatform()->CallOnBackgroundThread( |
6571 new UnmapFreeMemoryTask(this, chunks_queued_for_free_), | 6577 new UnmapFreeMemoryTask(this, chunks_queued_for_free_), |
6572 v8::Platform::kShortRunningTask); | 6578 v8::Platform::kShortRunningTask); |
6573 chunks_queued_for_free_ = NULL; | 6579 chunks_queued_for_free_ = NULL; |
6574 } else { | 6580 } else { |
6575 // If we do not have anything to unmap, we just signal the semaphore | 6581 // If we do not have anything to unmap, we just signal the semaphore |
6576 // that we are done. | 6582 // that we are done. |
6577 pending_unmap_job_semaphore_.Signal(); | 6583 pending_unmap_tasks_semaphore_.Signal(); |
6578 } | 6584 } |
6585 concurrent_unmaping_tasks_active_++; | |
6579 } | 6586 } |
6580 | 6587 |
6581 | 6588 |
6582 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { | 6589 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { |
6583 MemoryChunk* next; | 6590 MemoryChunk* next; |
6584 MemoryChunk* chunk; | 6591 MemoryChunk* chunk; |
6585 for (chunk = list_head; chunk != NULL; chunk = next) { | 6592 for (chunk = list_head; chunk != NULL; chunk = next) { |
6586 next = chunk->next_chunk(); | 6593 next = chunk->next_chunk(); |
6587 isolate_->memory_allocator()->PerformFreeMemory(chunk); | 6594 isolate_->memory_allocator()->PerformFreeMemory(chunk); |
6588 } | 6595 } |
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
6782 *object_type = "CODE_TYPE"; \ | 6789 *object_type = "CODE_TYPE"; \ |
6783 *object_sub_type = "CODE_AGE/" #name; \ | 6790 *object_sub_type = "CODE_AGE/" #name; \ |
6784 return true; | 6791 return true; |
6785 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6792 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) |
6786 #undef COMPARE_AND_RETURN_NAME | 6793 #undef COMPARE_AND_RETURN_NAME |
6787 } | 6794 } |
6788 return false; | 6795 return false; |
6789 } | 6796 } |
6790 } // namespace internal | 6797 } // namespace internal |
6791 } // namespace v8 | 6798 } // namespace v8 |
OLD | NEW |