OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/spaces.h" | 5 #include "src/heap/spaces.h" |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/base/platform/platform.h" | 8 #include "src/base/platform/platform.h" |
9 #include "src/base/platform/semaphore.h" | |
10 #include "src/full-codegen/full-codegen.h" | 9 #include "src/full-codegen/full-codegen.h" |
11 #include "src/heap/slot-set.h" | 10 #include "src/heap/slot-set.h" |
12 #include "src/macro-assembler.h" | 11 #include "src/macro-assembler.h" |
13 #include "src/msan.h" | 12 #include "src/msan.h" |
14 #include "src/snapshot/snapshot.h" | 13 #include "src/snapshot/snapshot.h" |
15 #include "src/v8.h" | |
16 | 14 |
17 namespace v8 { | 15 namespace v8 { |
18 namespace internal { | 16 namespace internal { |
19 | 17 |
20 | 18 |
21 // ---------------------------------------------------------------------------- | 19 // ---------------------------------------------------------------------------- |
22 // HeapObjectIterator | 20 // HeapObjectIterator |
23 | 21 |
24 HeapObjectIterator::HeapObjectIterator(PagedSpace* space) { | 22 HeapObjectIterator::HeapObjectIterator(PagedSpace* space) { |
25 // You can't actually iterate over the anchor page. It is not a real page, | 23 // You can't actually iterate over the anchor page. It is not a real page, |
(...skipping 272 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
298 // | 296 // |
299 | 297 |
300 MemoryAllocator::MemoryAllocator(Isolate* isolate) | 298 MemoryAllocator::MemoryAllocator(Isolate* isolate) |
301 : isolate_(isolate), | 299 : isolate_(isolate), |
302 code_range_(nullptr), | 300 code_range_(nullptr), |
303 capacity_(0), | 301 capacity_(0), |
304 capacity_executable_(0), | 302 capacity_executable_(0), |
305 size_(0), | 303 size_(0), |
306 size_executable_(0), | 304 size_executable_(0), |
307 lowest_ever_allocated_(reinterpret_cast<void*>(-1)), | 305 lowest_ever_allocated_(reinterpret_cast<void*>(-1)), |
308 highest_ever_allocated_(reinterpret_cast<void*>(0)), | 306 highest_ever_allocated_(reinterpret_cast<void*>(0)) {} |
309 unmapper_(this) {} | |
310 | 307 |
311 bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable, | 308 bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable, |
312 intptr_t code_range_size) { | 309 intptr_t code_range_size) { |
313 capacity_ = RoundUp(capacity, Page::kPageSize); | 310 capacity_ = RoundUp(capacity, Page::kPageSize); |
314 capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); | 311 capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); |
315 DCHECK_GE(capacity_, capacity_executable_); | 312 DCHECK_GE(capacity_, capacity_executable_); |
316 | 313 |
317 size_ = 0; | 314 size_ = 0; |
318 size_executable_ = 0; | 315 size_executable_ = 0; |
319 | 316 |
320 code_range_ = new CodeRange(isolate_); | 317 code_range_ = new CodeRange(isolate_); |
321 if (!code_range_->SetUp(static_cast<size_t>(code_range_size))) return false; | 318 if (!code_range_->SetUp(static_cast<size_t>(code_range_size))) return false; |
322 | 319 |
323 return true; | 320 return true; |
324 } | 321 } |
325 | 322 |
326 | 323 |
327 void MemoryAllocator::TearDown() { | 324 void MemoryAllocator::TearDown() { |
328 unmapper()->WaitUntilCompleted(); | 325 for (MemoryChunk* chunk : chunk_pool_) { |
329 | |
330 MemoryChunk* chunk = nullptr; | |
331 while ((chunk = unmapper()->TryGetPooledMemoryChunkSafe()) != nullptr) { | |
332 FreeMemory(reinterpret_cast<Address>(chunk), MemoryChunk::kPageSize, | 326 FreeMemory(reinterpret_cast<Address>(chunk), MemoryChunk::kPageSize, |
333 NOT_EXECUTABLE); | 327 NOT_EXECUTABLE); |
334 } | 328 } |
335 | |
336 // Check that spaces were torn down before MemoryAllocator. | 329 // Check that spaces were torn down before MemoryAllocator. |
337 DCHECK_EQ(size_.Value(), 0); | 330 DCHECK_EQ(size_.Value(), 0); |
338 // TODO(gc) this will be true again when we fix FreeMemory. | 331 // TODO(gc) this will be true again when we fix FreeMemory. |
339 // DCHECK(size_executable_ == 0); | 332 // DCHECK(size_executable_ == 0); |
340 capacity_ = 0; | 333 capacity_ = 0; |
341 capacity_executable_ = 0; | 334 capacity_executable_ = 0; |
342 | 335 |
343 if (last_chunk_.IsReserved()) { | 336 if (last_chunk_.IsReserved()) { |
344 last_chunk_.Release(); | 337 last_chunk_.Release(); |
345 } | 338 } |
346 | 339 |
347 delete code_range_; | 340 delete code_range_; |
348 code_range_ = nullptr; | 341 code_range_ = nullptr; |
349 } | 342 } |
350 | 343 |
351 class MemoryAllocator::Unmapper::UnmapFreeMemoryTask : public v8::Task { | |
352 public: | |
353 explicit UnmapFreeMemoryTask(Unmapper* unmapper) : unmapper_(unmapper) {} | |
354 | |
355 private: | |
356 // v8::Task overrides. | |
357 void Run() override { | |
358 unmapper_->PerformFreeMemoryOnQueuedChunks(); | |
359 unmapper_->pending_unmapping_tasks_semaphore_.Signal(); | |
360 } | |
361 | |
362 Unmapper* unmapper_; | |
363 DISALLOW_COPY_AND_ASSIGN(UnmapFreeMemoryTask); | |
364 }; | |
365 | |
366 void MemoryAllocator::Unmapper::FreeQueuedChunks() { | |
367 if (FLAG_concurrent_sweeping) { | |
368 V8::GetCurrentPlatform()->CallOnBackgroundThread( | |
369 new UnmapFreeMemoryTask(this), v8::Platform::kShortRunningTask); | |
370 concurrent_unmapping_tasks_active_++; | |
371 } else { | |
372 PerformFreeMemoryOnQueuedChunks(); | |
373 } | |
374 } | |
375 | |
376 bool MemoryAllocator::Unmapper::WaitUntilCompleted() { | |
377 bool waited = false; | |
378 while (concurrent_unmapping_tasks_active_ > 0) { | |
379 pending_unmapping_tasks_semaphore_.Wait(); | |
380 concurrent_unmapping_tasks_active_--; | |
381 waited = true; | |
382 } | |
383 return waited; | |
384 } | |
385 | |
386 void MemoryAllocator::Unmapper::PerformFreeMemoryOnQueuedChunks() { | |
387 MemoryChunk* chunk = nullptr; | |
388 // Regular chunks. | |
389 while ((chunk = GetMemoryChunkSafe<kRegular>()) != nullptr) { | |
390 bool pooled = chunk->IsFlagSet(MemoryChunk::POOLED); | |
391 allocator_->PerformFreeMemory(chunk); | |
392 if (pooled) AddMemoryChunkSafe<kPooled>(chunk); | |
393 } | |
394 // Non-regular chunks. | |
395 while ((chunk = GetMemoryChunkSafe<kNonRegular>()) != nullptr) { | |
396 allocator_->PerformFreeMemory(chunk); | |
397 } | |
398 } | |
399 | |
400 bool MemoryAllocator::CommitMemory(Address base, size_t size, | 344 bool MemoryAllocator::CommitMemory(Address base, size_t size, |
401 Executability executable) { | 345 Executability executable) { |
402 if (!base::VirtualMemory::CommitRegion(base, size, | 346 if (!base::VirtualMemory::CommitRegion(base, size, |
403 executable == EXECUTABLE)) { | 347 executable == EXECUTABLE)) { |
404 return false; | 348 return false; |
405 } | 349 } |
406 UpdateAllocatedSpaceLimits(base, base + size); | 350 UpdateAllocatedSpaceLimits(base, base + size); |
407 return true; | 351 return true; |
408 } | 352 } |
409 | 353 |
(...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
797 | 741 |
798 chunk->SetFlag(MemoryChunk::PRE_FREED); | 742 chunk->SetFlag(MemoryChunk::PRE_FREED); |
799 } | 743 } |
800 | 744 |
801 | 745 |
802 void MemoryAllocator::PerformFreeMemory(MemoryChunk* chunk) { | 746 void MemoryAllocator::PerformFreeMemory(MemoryChunk* chunk) { |
803 DCHECK(chunk->IsFlagSet(MemoryChunk::PRE_FREED)); | 747 DCHECK(chunk->IsFlagSet(MemoryChunk::PRE_FREED)); |
804 chunk->ReleaseAllocatedMemory(); | 748 chunk->ReleaseAllocatedMemory(); |
805 | 749 |
806 base::VirtualMemory* reservation = chunk->reserved_memory(); | 750 base::VirtualMemory* reservation = chunk->reserved_memory(); |
807 if (chunk->IsFlagSet(MemoryChunk::POOLED)) { | 751 if (reservation->IsReserved()) { |
808 UncommitBlock(reinterpret_cast<Address>(chunk), MemoryChunk::kPageSize); | 752 FreeMemory(reservation, chunk->executable()); |
809 } else { | 753 } else { |
810 if (reservation->IsReserved()) { | 754 FreeMemory(chunk->address(), chunk->size(), chunk->executable()); |
811 FreeMemory(reservation, chunk->executable()); | |
812 } else { | |
813 FreeMemory(chunk->address(), chunk->size(), chunk->executable()); | |
814 } | |
815 } | 755 } |
816 } | 756 } |
817 | 757 |
818 template <MemoryAllocator::FreeMode mode> | 758 template <MemoryAllocator::AllocationMode mode> |
819 void MemoryAllocator::Free(MemoryChunk* chunk) { | 759 void MemoryAllocator::Free(MemoryChunk* chunk) { |
820 switch (mode) { | 760 if (mode == kRegular) { |
821 case kFull: | 761 PreFreeMemory(chunk); |
822 PreFreeMemory(chunk); | 762 PerformFreeMemory(chunk); |
823 PerformFreeMemory(chunk); | 763 } else { |
824 break; | 764 DCHECK_EQ(mode, kPooled); |
825 case kPooledAndQueue: | 765 FreePooled(chunk); |
826 DCHECK_EQ(chunk->size(), static_cast<size_t>(MemoryChunk::kPageSize)); | |
827 DCHECK_EQ(chunk->executable(), NOT_EXECUTABLE); | |
828 chunk->SetFlag(MemoryChunk::POOLED); | |
829 // Fall through to kPreFreeAndQueue. | |
830 case kPreFreeAndQueue: | |
831 PreFreeMemory(chunk); | |
832 // The chunks added to this queue will be freed by a concurrent thread. | |
833 unmapper()->AddMemoryChunkSafe(chunk); | |
834 break; | |
835 default: | |
836 UNREACHABLE(); | |
837 } | 766 } |
838 } | 767 } |
839 | 768 |
840 template void MemoryAllocator::Free<MemoryAllocator::kFull>(MemoryChunk* chunk); | 769 template void MemoryAllocator::Free<MemoryAllocator::kRegular>( |
841 | |
842 template void MemoryAllocator::Free<MemoryAllocator::kPreFreeAndQueue>( | |
843 MemoryChunk* chunk); | 770 MemoryChunk* chunk); |
844 | 771 |
845 template void MemoryAllocator::Free<MemoryAllocator::kPooledAndQueue>( | 772 template void MemoryAllocator::Free<MemoryAllocator::kPooled>( |
846 MemoryChunk* chunk); | 773 MemoryChunk* chunk); |
847 | 774 |
848 template <typename PageType, MemoryAllocator::AllocationMode mode, | 775 template <typename PageType, MemoryAllocator::AllocationMode mode, |
849 typename SpaceType> | 776 typename SpaceType> |
850 PageType* MemoryAllocator::AllocatePage(intptr_t size, SpaceType* owner, | 777 PageType* MemoryAllocator::AllocatePage(intptr_t size, SpaceType* owner, |
851 Executability executable) { | 778 Executability executable) { |
852 MemoryChunk* chunk = nullptr; | 779 MemoryChunk* chunk = nullptr; |
853 if (mode == kPooled) { | 780 if (mode == kPooled) { |
854 DCHECK_EQ(size, static_cast<intptr_t>(MemoryChunk::kAllocatableMemory)); | 781 DCHECK_EQ(size, static_cast<intptr_t>(MemoryChunk::kAllocatableMemory)); |
855 DCHECK_EQ(executable, NOT_EXECUTABLE); | 782 DCHECK_EQ(executable, NOT_EXECUTABLE); |
(...skipping 13 matching lines...) Expand all Loading... |
869 template LargePage* | 796 template LargePage* |
870 MemoryAllocator::AllocatePage<LargePage, MemoryAllocator::kRegular, Space>( | 797 MemoryAllocator::AllocatePage<LargePage, MemoryAllocator::kRegular, Space>( |
871 intptr_t, Space*, Executability); | 798 intptr_t, Space*, Executability); |
872 | 799 |
873 template NewSpacePage* MemoryAllocator::AllocatePage< | 800 template NewSpacePage* MemoryAllocator::AllocatePage< |
874 NewSpacePage, MemoryAllocator::kPooled, SemiSpace>(intptr_t, SemiSpace*, | 801 NewSpacePage, MemoryAllocator::kPooled, SemiSpace>(intptr_t, SemiSpace*, |
875 Executability); | 802 Executability); |
876 | 803 |
877 template <typename SpaceType> | 804 template <typename SpaceType> |
878 MemoryChunk* MemoryAllocator::AllocatePagePooled(SpaceType* owner) { | 805 MemoryChunk* MemoryAllocator::AllocatePagePooled(SpaceType* owner) { |
879 MemoryChunk* chunk = unmapper()->TryGetPooledMemoryChunkSafe(); | 806 if (chunk_pool_.is_empty()) return nullptr; |
880 if (chunk == nullptr) return nullptr; | |
881 const int size = MemoryChunk::kPageSize; | 807 const int size = MemoryChunk::kPageSize; |
| 808 MemoryChunk* chunk = chunk_pool_.RemoveLast(); |
882 const Address start = reinterpret_cast<Address>(chunk); | 809 const Address start = reinterpret_cast<Address>(chunk); |
883 const Address area_start = start + MemoryChunk::kObjectStartOffset; | 810 const Address area_start = start + MemoryChunk::kObjectStartOffset; |
884 const Address area_end = start + size; | 811 const Address area_end = start + size; |
885 CommitBlock(reinterpret_cast<Address>(chunk), size, NOT_EXECUTABLE); | 812 CommitBlock(reinterpret_cast<Address>(chunk), size, NOT_EXECUTABLE); |
886 base::VirtualMemory reservation(start, size); | 813 base::VirtualMemory reservation(start, size); |
887 MemoryChunk::Initialize(isolate_->heap(), start, size, area_start, area_end, | 814 MemoryChunk::Initialize(isolate_->heap(), start, size, area_start, area_end, |
888 NOT_EXECUTABLE, owner, &reservation); | 815 NOT_EXECUTABLE, owner, &reservation); |
889 size_.Increment(size); | 816 size_.Increment(size); |
890 return chunk; | 817 return chunk; |
891 } | 818 } |
892 | 819 |
| 820 void MemoryAllocator::FreePooled(MemoryChunk* chunk) { |
| 821 DCHECK_EQ(chunk->size(), static_cast<size_t>(MemoryChunk::kPageSize)); |
| 822 DCHECK_EQ(chunk->executable(), NOT_EXECUTABLE); |
| 823 chunk_pool_.Add(chunk); |
| 824 intptr_t chunk_size = static_cast<intptr_t>(chunk->size()); |
| 825 if (chunk->executable() == EXECUTABLE) { |
| 826 size_executable_.Increment(-chunk_size); |
| 827 } |
| 828 size_.Increment(-chunk_size); |
| 829 UncommitBlock(reinterpret_cast<Address>(chunk), MemoryChunk::kPageSize); |
| 830 } |
| 831 |
893 bool MemoryAllocator::CommitBlock(Address start, size_t size, | 832 bool MemoryAllocator::CommitBlock(Address start, size_t size, |
894 Executability executable) { | 833 Executability executable) { |
895 if (!CommitMemory(start, size, executable)) return false; | 834 if (!CommitMemory(start, size, executable)) return false; |
896 | 835 |
897 if (Heap::ShouldZapGarbage()) { | 836 if (Heap::ShouldZapGarbage()) { |
898 ZapBlock(start, size); | 837 ZapBlock(start, size); |
899 } | 838 } |
900 | 839 |
901 isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size)); | 840 isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size)); |
902 return true; | 841 return true; |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1025 vm->Uncommit(header, header_size); | 964 vm->Uncommit(header, header_size); |
1026 } | 965 } |
1027 return false; | 966 return false; |
1028 } | 967 } |
1029 | 968 |
1030 | 969 |
1031 // ----------------------------------------------------------------------------- | 970 // ----------------------------------------------------------------------------- |
1032 // MemoryChunk implementation | 971 // MemoryChunk implementation |
1033 | 972 |
1034 void MemoryChunk::ReleaseAllocatedMemory() { | 973 void MemoryChunk::ReleaseAllocatedMemory() { |
1035 if (skip_list_ != nullptr) { | 974 delete skip_list_; |
1036 delete skip_list_; | 975 skip_list_ = nullptr; |
1037 skip_list_ = nullptr; | 976 delete mutex_; |
1038 } | 977 mutex_ = nullptr; |
1039 if (mutex_ != nullptr) { | 978 ReleaseOldToNewSlots(); |
1040 delete mutex_; | 979 ReleaseOldToOldSlots(); |
1041 mutex_ = nullptr; | |
1042 } | |
1043 if (old_to_new_slots_ != nullptr) ReleaseOldToNewSlots(); | |
1044 if (old_to_old_slots_ != nullptr) ReleaseOldToOldSlots(); | |
1045 } | 980 } |
1046 | 981 |
1047 static SlotSet* AllocateSlotSet(size_t size, Address page_start) { | 982 static SlotSet* AllocateSlotSet(size_t size, Address page_start) { |
1048 size_t pages = (size + Page::kPageSize - 1) / Page::kPageSize; | 983 size_t pages = (size + Page::kPageSize - 1) / Page::kPageSize; |
1049 DCHECK(pages > 0); | 984 DCHECK(pages > 0); |
1050 SlotSet* slot_set = new SlotSet[pages]; | 985 SlotSet* slot_set = new SlotSet[pages]; |
1051 for (size_t i = 0; i < pages; i++) { | 986 for (size_t i = 0; i < pages; i++) { |
1052 slot_set[i].SetPageStart(page_start + i * Page::kPageSize); | 987 slot_set[i].SetPageStart(page_start + i * Page::kPageSize); |
1053 } | 988 } |
1054 return slot_set; | 989 return slot_set; |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1118 | 1053 |
1119 bool PagedSpace::SetUp() { return true; } | 1054 bool PagedSpace::SetUp() { return true; } |
1120 | 1055 |
1121 | 1056 |
1122 bool PagedSpace::HasBeenSetUp() { return true; } | 1057 bool PagedSpace::HasBeenSetUp() { return true; } |
1123 | 1058 |
1124 | 1059 |
1125 void PagedSpace::TearDown() { | 1060 void PagedSpace::TearDown() { |
1126 PageIterator iterator(this); | 1061 PageIterator iterator(this); |
1127 while (iterator.has_next()) { | 1062 while (iterator.has_next()) { |
1128 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(iterator.next()); | 1063 heap()->memory_allocator()->Free(iterator.next()); |
1129 } | 1064 } |
1130 anchor_.set_next_page(&anchor_); | 1065 anchor_.set_next_page(&anchor_); |
1131 anchor_.set_prev_page(&anchor_); | 1066 anchor_.set_prev_page(&anchor_); |
1132 accounting_stats_.Clear(); | 1067 accounting_stats_.Clear(); |
1133 } | 1068 } |
1134 | 1069 |
1135 void PagedSpace::RefillFreeList() { | 1070 void PagedSpace::RefillFreeList() { |
1136 // Any PagedSpace might invoke RefillFreeList. We filter all but our old | 1071 // Any PagedSpace might invoke RefillFreeList. We filter all but our old |
1137 // generation spaces out. | 1072 // generation spaces out. |
1138 if (identity() != OLD_SPACE && identity() != CODE_SPACE && | 1073 if (identity() != OLD_SPACE && identity() != CODE_SPACE && |
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1309 allocation_info_.Reset(nullptr, nullptr); | 1244 allocation_info_.Reset(nullptr, nullptr); |
1310 } | 1245 } |
1311 | 1246 |
1312 // If page is still in a list, unlink it from that list. | 1247 // If page is still in a list, unlink it from that list. |
1313 if (page->next_chunk() != NULL) { | 1248 if (page->next_chunk() != NULL) { |
1314 DCHECK(page->prev_chunk() != NULL); | 1249 DCHECK(page->prev_chunk() != NULL); |
1315 page->Unlink(); | 1250 page->Unlink(); |
1316 } | 1251 } |
1317 | 1252 |
1318 AccountUncommitted(static_cast<intptr_t>(page->size())); | 1253 AccountUncommitted(static_cast<intptr_t>(page->size())); |
1319 heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>(page); | 1254 heap()->QueueMemoryChunkForFree(page); |
1320 | 1255 |
1321 DCHECK(Capacity() > 0); | 1256 DCHECK(Capacity() > 0); |
1322 accounting_stats_.ShrinkSpace(AreaSize()); | 1257 accounting_stats_.ShrinkSpace(AreaSize()); |
1323 } | 1258 } |
1324 | 1259 |
1325 #ifdef DEBUG | 1260 #ifdef DEBUG |
1326 void PagedSpace::Print() {} | 1261 void PagedSpace::Print() {} |
1327 #endif | 1262 #endif |
1328 | 1263 |
1329 #ifdef VERIFY_HEAP | 1264 #ifdef VERIFY_HEAP |
(...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1773 } | 1708 } |
1774 committed_ = true; | 1709 committed_ = true; |
1775 return true; | 1710 return true; |
1776 } | 1711 } |
1777 | 1712 |
1778 | 1713 |
1779 bool SemiSpace::Uncommit() { | 1714 bool SemiSpace::Uncommit() { |
1780 DCHECK(is_committed()); | 1715 DCHECK(is_committed()); |
1781 NewSpacePageIterator it(this); | 1716 NewSpacePageIterator it(this); |
1782 while (it.has_next()) { | 1717 while (it.has_next()) { |
1783 heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>( | 1718 heap()->memory_allocator()->Free<MemoryAllocator::kPooled>(it.next()); |
1784 it.next()); | |
1785 } | 1719 } |
1786 anchor()->set_next_page(anchor()); | 1720 anchor()->set_next_page(anchor()); |
1787 anchor()->set_prev_page(anchor()); | 1721 anchor()->set_prev_page(anchor()); |
1788 AccountUncommitted(current_capacity_); | 1722 AccountUncommitted(current_capacity_); |
1789 committed_ = false; | 1723 committed_ = false; |
1790 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); | |
1791 return true; | 1724 return true; |
1792 } | 1725 } |
1793 | 1726 |
1794 | 1727 |
1795 size_t SemiSpace::CommittedPhysicalMemory() { | 1728 size_t SemiSpace::CommittedPhysicalMemory() { |
1796 if (!is_committed()) return 0; | 1729 if (!is_committed()) return 0; |
1797 size_t size = 0; | 1730 size_t size = 0; |
1798 NewSpacePageIterator it(this); | 1731 NewSpacePageIterator it(this); |
1799 while (it.has_next()) { | 1732 while (it.has_next()) { |
1800 size += it.next()->CommittedPhysicalMemory(); | 1733 size += it.next()->CommittedPhysicalMemory(); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1858 const int delta = current_capacity_ - new_capacity; | 1791 const int delta = current_capacity_ - new_capacity; |
1859 DCHECK(IsAligned(delta, base::OS::AllocateAlignment())); | 1792 DCHECK(IsAligned(delta, base::OS::AllocateAlignment())); |
1860 int delta_pages = delta / NewSpacePage::kPageSize; | 1793 int delta_pages = delta / NewSpacePage::kPageSize; |
1861 NewSpacePage* new_last_page; | 1794 NewSpacePage* new_last_page; |
1862 NewSpacePage* last_page; | 1795 NewSpacePage* last_page; |
1863 while (delta_pages > 0) { | 1796 while (delta_pages > 0) { |
1864 last_page = anchor()->prev_page(); | 1797 last_page = anchor()->prev_page(); |
1865 new_last_page = last_page->prev_page(); | 1798 new_last_page = last_page->prev_page(); |
1866 new_last_page->set_next_page(anchor()); | 1799 new_last_page->set_next_page(anchor()); |
1867 anchor()->set_prev_page(new_last_page); | 1800 anchor()->set_prev_page(new_last_page); |
1868 heap()->memory_allocator()->Free<MemoryAllocator::kPooledAndQueue>( | 1801 heap()->memory_allocator()->Free<MemoryAllocator::kPooled>(last_page); |
1869 last_page); | |
1870 delta_pages--; | 1802 delta_pages--; |
1871 } | 1803 } |
1872 AccountUncommitted(static_cast<intptr_t>(delta)); | 1804 AccountUncommitted(static_cast<intptr_t>(delta)); |
1873 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); | |
1874 } | 1805 } |
1875 current_capacity_ = new_capacity; | 1806 current_capacity_ = new_capacity; |
1876 return true; | 1807 return true; |
1877 } | 1808 } |
1878 | 1809 |
1879 void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) { | 1810 void SemiSpace::FixPagesFlags(intptr_t flags, intptr_t mask) { |
1880 anchor_.set_owner(this); | 1811 anchor_.set_owner(this); |
1881 // Fixup back-pointers to anchor. Address of anchor changes when we swap. | 1812 // Fixup back-pointers to anchor. Address of anchor changes when we swap. |
1882 anchor_.prev_page()->set_next_page(&anchor_); | 1813 anchor_.prev_page()->set_next_page(&anchor_); |
1883 anchor_.next_page()->set_prev_page(&anchor_); | 1814 anchor_.next_page()->set_prev_page(&anchor_); |
(...skipping 1072 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2956 | 2887 |
2957 void LargeObjectSpace::TearDown() { | 2888 void LargeObjectSpace::TearDown() { |
2958 while (first_page_ != NULL) { | 2889 while (first_page_ != NULL) { |
2959 LargePage* page = first_page_; | 2890 LargePage* page = first_page_; |
2960 first_page_ = first_page_->next_page(); | 2891 first_page_ = first_page_->next_page(); |
2961 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); | 2892 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); |
2962 | 2893 |
2963 ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); | 2894 ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); |
2964 heap()->memory_allocator()->PerformAllocationCallback( | 2895 heap()->memory_allocator()->PerformAllocationCallback( |
2965 space, kAllocationActionFree, page->size()); | 2896 space, kAllocationActionFree, page->size()); |
2966 heap()->memory_allocator()->Free<MemoryAllocator::kFull>(page); | 2897 heap()->memory_allocator()->Free(page); |
2967 } | 2898 } |
2968 SetUp(); | 2899 SetUp(); |
2969 } | 2900 } |
2970 | 2901 |
2971 | 2902 |
2972 AllocationResult LargeObjectSpace::AllocateRaw(int object_size, | 2903 AllocationResult LargeObjectSpace::AllocateRaw(int object_size, |
2973 Executability executable) { | 2904 Executability executable) { |
2974 // Check if we want to force a GC before growing the old space further. | 2905 // Check if we want to force a GC before growing the old space further. |
2975 // If so, fail the allocation. | 2906 // If so, fail the allocation. |
2976 if (!heap()->CanExpandOldGeneration(object_size)) { | 2907 if (!heap()->CanExpandOldGeneration(object_size)) { |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3099 // Use variable alignment to help pass length check (<= 80 characters) | 3030 // Use variable alignment to help pass length check (<= 80 characters) |
3100 // of single line in tools/presubmit.py. | 3031 // of single line in tools/presubmit.py. |
3101 const intptr_t alignment = MemoryChunk::kAlignment; | 3032 const intptr_t alignment = MemoryChunk::kAlignment; |
3102 uintptr_t base = reinterpret_cast<uintptr_t>(page) / alignment; | 3033 uintptr_t base = reinterpret_cast<uintptr_t>(page) / alignment; |
3103 uintptr_t limit = base + (page->size() - 1) / alignment; | 3034 uintptr_t limit = base + (page->size() - 1) / alignment; |
3104 for (uintptr_t key = base; key <= limit; key++) { | 3035 for (uintptr_t key = base; key <= limit; key++) { |
3105 chunk_map_.Remove(reinterpret_cast<void*>(key), | 3036 chunk_map_.Remove(reinterpret_cast<void*>(key), |
3106 static_cast<uint32_t>(key)); | 3037 static_cast<uint32_t>(key)); |
3107 } | 3038 } |
3108 | 3039 |
3109 heap()->memory_allocator()->Free<MemoryAllocator::kPreFreeAndQueue>(page); | 3040 heap()->QueueMemoryChunkForFree(page); |
3110 } | 3041 } |
3111 } | 3042 } |
3112 } | 3043 } |
3113 | 3044 |
3114 | 3045 |
3115 bool LargeObjectSpace::Contains(HeapObject* object) { | 3046 bool LargeObjectSpace::Contains(HeapObject* object) { |
3116 Address address = object->address(); | 3047 Address address = object->address(); |
3117 MemoryChunk* chunk = MemoryChunk::FromAddress(address); | 3048 MemoryChunk* chunk = MemoryChunk::FromAddress(address); |
3118 | 3049 |
3119 bool owned = (chunk->owner() == this); | 3050 bool owned = (chunk->owner() == this); |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3230 object->ShortPrint(); | 3161 object->ShortPrint(); |
3231 PrintF("\n"); | 3162 PrintF("\n"); |
3232 } | 3163 } |
3233 printf(" --------------------------------------\n"); | 3164 printf(" --------------------------------------\n"); |
3234 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3165 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
3235 } | 3166 } |
3236 | 3167 |
3237 #endif // DEBUG | 3168 #endif // DEBUG |
3238 } // namespace internal | 3169 } // namespace internal |
3239 } // namespace v8 | 3170 } // namespace v8 |
OLD | NEW |