| OLD | NEW | 
|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. | 
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be | 
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. | 
| 4 | 4 | 
| 5 #include "src/heap/heap.h" | 5 #include "src/heap/heap.h" | 
| 6 | 6 | 
| 7 #include "src/accessors.h" | 7 #include "src/accessors.h" | 
| 8 #include "src/api.h" | 8 #include "src/api.h" | 
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" | 
| 10 #include "src/base/once.h" | 10 #include "src/base/once.h" | 
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 130       gcs_since_last_deopt_(0), | 130       gcs_since_last_deopt_(0), | 
| 131       allocation_sites_scratchpad_length_(0), | 131       allocation_sites_scratchpad_length_(0), | 
| 132       ring_buffer_full_(false), | 132       ring_buffer_full_(false), | 
| 133       ring_buffer_end_(0), | 133       ring_buffer_end_(0), | 
| 134       promotion_queue_(this), | 134       promotion_queue_(this), | 
| 135       configured_(false), | 135       configured_(false), | 
| 136       current_gc_flags_(Heap::kNoGCFlags), | 136       current_gc_flags_(Heap::kNoGCFlags), | 
| 137       current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), | 137       current_gc_callback_flags_(GCCallbackFlags::kNoGCCallbackFlags), | 
| 138       external_string_table_(this), | 138       external_string_table_(this), | 
| 139       chunks_queued_for_free_(NULL), | 139       chunks_queued_for_free_(NULL), | 
| 140       pending_unmap_job_semaphore_(0), | 140       concurrent_unmapping_tasks_active_(0), | 
|  | 141       pending_unmapping_tasks_semaphore_(0), | 
| 141       gc_callbacks_depth_(0), | 142       gc_callbacks_depth_(0), | 
| 142       deserialization_complete_(false), | 143       deserialization_complete_(false), | 
| 143       concurrent_sweeping_enabled_(false), | 144       concurrent_sweeping_enabled_(false), | 
| 144       strong_roots_list_(NULL) { | 145       strong_roots_list_(NULL) { | 
| 145 // Allow build-time customization of the max semispace size. Building | 146 // Allow build-time customization of the max semispace size. Building | 
| 146 // V8 with snapshots and a non-default max semispace size is much | 147 // V8 with snapshots and a non-default max semispace size is much | 
| 147 // easier if you can define it as part of the build environment. | 148 // easier if you can define it as part of the build environment. | 
| 148 #if defined(V8_MAX_SEMISPACE_SIZE) | 149 #if defined(V8_MAX_SEMISPACE_SIZE) | 
| 149   max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 150   max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE; | 
| 150 #endif | 151 #endif | 
| (...skipping 5602 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 5753   if (FLAG_verify_predictable) { | 5754   if (FLAG_verify_predictable) { | 
| 5754     PrintAlloctionsHash(); | 5755     PrintAlloctionsHash(); | 
| 5755   } | 5756   } | 
| 5756 | 5757 | 
| 5757   if (memory_reducer_ != nullptr) { | 5758   if (memory_reducer_ != nullptr) { | 
| 5758     memory_reducer_->TearDown(); | 5759     memory_reducer_->TearDown(); | 
| 5759     delete memory_reducer_; | 5760     delete memory_reducer_; | 
| 5760     memory_reducer_ = nullptr; | 5761     memory_reducer_ = nullptr; | 
| 5761   } | 5762   } | 
| 5762 | 5763 | 
|  | 5764   WaitUntilUnmappingOfFreeChunksCompleted(); | 
|  | 5765 | 
| 5763   TearDownArrayBuffers(); | 5766   TearDownArrayBuffers(); | 
| 5764 | 5767 | 
| 5765   isolate_->global_handles()->TearDown(); | 5768   isolate_->global_handles()->TearDown(); | 
| 5766 | 5769 | 
| 5767   external_string_table_.TearDown(); | 5770   external_string_table_.TearDown(); | 
| 5768 | 5771 | 
| 5769   mark_compact_collector()->TearDown(); | 5772   mark_compact_collector()->TearDown(); | 
| 5770 | 5773 | 
| 5771   delete tracer_; | 5774   delete tracer_; | 
| 5772   tracer_ = nullptr; | 5775   tracer_ = nullptr; | 
| (...skipping 744 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 6517 class Heap::UnmapFreeMemoryTask : public v8::Task { | 6520 class Heap::UnmapFreeMemoryTask : public v8::Task { | 
| 6518  public: | 6521  public: | 
| 6519   UnmapFreeMemoryTask(Heap* heap, MemoryChunk* head) | 6522   UnmapFreeMemoryTask(Heap* heap, MemoryChunk* head) | 
| 6520       : heap_(heap), head_(head) {} | 6523       : heap_(heap), head_(head) {} | 
| 6521   virtual ~UnmapFreeMemoryTask() {} | 6524   virtual ~UnmapFreeMemoryTask() {} | 
| 6522 | 6525 | 
| 6523  private: | 6526  private: | 
| 6524   // v8::Task overrides. | 6527   // v8::Task overrides. | 
| 6525   void Run() override { | 6528   void Run() override { | 
| 6526     heap_->FreeQueuedChunks(head_); | 6529     heap_->FreeQueuedChunks(head_); | 
| 6527     heap_->pending_unmap_job_semaphore_.Signal(); | 6530     heap_->pending_unmapping_tasks_semaphore_.Signal(); | 
| 6528   } | 6531   } | 
| 6529 | 6532 | 
| 6530   Heap* heap_; | 6533   Heap* heap_; | 
| 6531   MemoryChunk* head_; | 6534   MemoryChunk* head_; | 
| 6532 | 6535 | 
| 6533   DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); | 6536   DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); | 
| 6534 }; | 6537 }; | 
| 6535 | 6538 | 
| 6536 | 6539 | 
| 6537 void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { | 6540 void Heap::WaitUntilUnmappingOfFreeChunksCompleted() { | 
| 6538   // We start an unmap job after sweeping and after compaction. | 6541   while (concurrent_unmapping_tasks_active_ > 0) { | 
| 6539   pending_unmap_job_semaphore_.Wait(); | 6542     pending_unmapping_tasks_semaphore_.Wait(); | 
| 6540   pending_unmap_job_semaphore_.Wait(); | 6543     concurrent_unmapping_tasks_active_--; | 
|  | 6544   } | 
| 6541 } | 6545 } | 
| 6542 | 6546 | 
| 6543 | 6547 | 
| 6544 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 6548 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) { | 
| 6545   // PreFree logically frees the memory chunk. However, the actual freeing | 6549   // PreFree logically frees the memory chunk. However, the actual freeing | 
| 6546   // will happen on a separate thread sometime later. | 6550   // will happen on a separate thread sometime later. | 
| 6547   isolate_->memory_allocator()->PreFreeMemory(chunk); | 6551   isolate_->memory_allocator()->PreFreeMemory(chunk); | 
| 6548 | 6552 | 
| 6549   // The chunks added to this queue will be freed by a concurrent thread. | 6553   // The chunks added to this queue will be freed by a concurrent thread. | 
| 6550   chunk->set_next_chunk(chunks_queued_for_free_); | 6554   chunk->set_next_chunk(chunks_queued_for_free_); | 
| (...skipping 16 matching lines...) Expand all  Loading... | 
| 6567 | 6571 | 
| 6568 void Heap::FreeQueuedChunks() { | 6572 void Heap::FreeQueuedChunks() { | 
| 6569   if (chunks_queued_for_free_ != NULL) { | 6573   if (chunks_queued_for_free_ != NULL) { | 
| 6570     V8::GetCurrentPlatform()->CallOnBackgroundThread( | 6574     V8::GetCurrentPlatform()->CallOnBackgroundThread( | 
| 6571         new UnmapFreeMemoryTask(this, chunks_queued_for_free_), | 6575         new UnmapFreeMemoryTask(this, chunks_queued_for_free_), | 
| 6572         v8::Platform::kShortRunningTask); | 6576         v8::Platform::kShortRunningTask); | 
| 6573     chunks_queued_for_free_ = NULL; | 6577     chunks_queued_for_free_ = NULL; | 
| 6574   } else { | 6578   } else { | 
| 6575     // If we do not have anything to unmap, we just signal the semaphore | 6579     // If we do not have anything to unmap, we just signal the semaphore | 
| 6576     // that we are done. | 6580     // that we are done. | 
| 6577     pending_unmap_job_semaphore_.Signal(); | 6581     pending_unmapping_tasks_semaphore_.Signal(); | 
| 6578   } | 6582   } | 
|  | 6583   concurrent_unmapping_tasks_active_++; | 
| 6579 } | 6584 } | 
| 6580 | 6585 | 
| 6581 | 6586 | 
| 6582 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { | 6587 void Heap::FreeQueuedChunks(MemoryChunk* list_head) { | 
| 6583   MemoryChunk* next; | 6588   MemoryChunk* next; | 
| 6584   MemoryChunk* chunk; | 6589   MemoryChunk* chunk; | 
| 6585   for (chunk = list_head; chunk != NULL; chunk = next) { | 6590   for (chunk = list_head; chunk != NULL; chunk = next) { | 
| 6586     next = chunk->next_chunk(); | 6591     next = chunk->next_chunk(); | 
| 6587     isolate_->memory_allocator()->PerformFreeMemory(chunk); | 6592     isolate_->memory_allocator()->PerformFreeMemory(chunk); | 
| 6588   } | 6593   } | 
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 6782     *object_type = "CODE_TYPE";                                                \ | 6787     *object_type = "CODE_TYPE";                                                \ | 
| 6783     *object_sub_type = "CODE_AGE/" #name;                                      \ | 6788     *object_sub_type = "CODE_AGE/" #name;                                      \ | 
| 6784     return true; | 6789     return true; | 
| 6785     CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 6790     CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) | 
| 6786 #undef COMPARE_AND_RETURN_NAME | 6791 #undef COMPARE_AND_RETURN_NAME | 
| 6787   } | 6792   } | 
| 6788   return false; | 6793   return false; | 
| 6789 } | 6794 } | 
| 6790 }  // namespace internal | 6795 }  // namespace internal | 
| 6791 }  // namespace v8 | 6796 }  // namespace v8 | 
| OLD | NEW | 
|---|