OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
125 | 125 |
126 CodeRange::CodeRange(Isolate* isolate) | 126 CodeRange::CodeRange(Isolate* isolate) |
127 : isolate_(isolate), | 127 : isolate_(isolate), |
128 code_range_(NULL), | 128 code_range_(NULL), |
129 free_list_(0), | 129 free_list_(0), |
130 allocation_list_(0), | 130 allocation_list_(0), |
131 current_allocation_block_index_(0) { | 131 current_allocation_block_index_(0) { |
132 } | 132 } |
133 | 133 |
134 | 134 |
135 bool CodeRange::Setup(const size_t requested) { | 135 bool CodeRange::SetUp(const size_t requested) { |
136 ASSERT(code_range_ == NULL); | 136 ASSERT(code_range_ == NULL); |
137 | 137 |
138 code_range_ = new VirtualMemory(requested); | 138 code_range_ = new VirtualMemory(requested); |
139 CHECK(code_range_ != NULL); | 139 CHECK(code_range_ != NULL); |
140 if (!code_range_->IsReserved()) { | 140 if (!code_range_->IsReserved()) { |
141 delete code_range_; | 141 delete code_range_; |
142 code_range_ = NULL; | 142 code_range_ = NULL; |
143 return false; | 143 return false; |
144 } | 144 } |
145 | 145 |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
261 | 261 |
262 MemoryAllocator::MemoryAllocator(Isolate* isolate) | 262 MemoryAllocator::MemoryAllocator(Isolate* isolate) |
263 : isolate_(isolate), | 263 : isolate_(isolate), |
264 capacity_(0), | 264 capacity_(0), |
265 capacity_executable_(0), | 265 capacity_executable_(0), |
266 size_(0), | 266 size_(0), |
267 size_executable_(0) { | 267 size_executable_(0) { |
268 } | 268 } |
269 | 269 |
270 | 270 |
271 bool MemoryAllocator::Setup(intptr_t capacity, intptr_t capacity_executable) { | 271 bool MemoryAllocator::SetUp(intptr_t capacity, intptr_t capacity_executable) { |
272 capacity_ = RoundUp(capacity, Page::kPageSize); | 272 capacity_ = RoundUp(capacity, Page::kPageSize); |
273 capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); | 273 capacity_executable_ = RoundUp(capacity_executable, Page::kPageSize); |
274 ASSERT_GE(capacity_, capacity_executable_); | 274 ASSERT_GE(capacity_, capacity_executable_); |
275 | 275 |
276 size_ = 0; | 276 size_ = 0; |
277 size_executable_ = 0; | 277 size_executable_ = 0; |
278 | 278 |
279 return true; | 279 return true; |
280 } | 280 } |
281 | 281 |
(...skipping 381 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
663 * Page::kObjectAreaSize; | 663 * Page::kObjectAreaSize; |
664 accounting_stats_.Clear(); | 664 accounting_stats_.Clear(); |
665 | 665 |
666 allocation_info_.top = NULL; | 666 allocation_info_.top = NULL; |
667 allocation_info_.limit = NULL; | 667 allocation_info_.limit = NULL; |
668 | 668 |
669 anchor_.InitializeAsAnchor(this); | 669 anchor_.InitializeAsAnchor(this); |
670 } | 670 } |
671 | 671 |
672 | 672 |
673 bool PagedSpace::Setup() { | 673 bool PagedSpace::SetUp() { |
674 return true; | 674 return true; |
675 } | 675 } |
676 | 676 |
677 | 677 |
678 bool PagedSpace::HasBeenSetup() { | 678 bool PagedSpace::HasBeenSetUp() { |
679 return true; | 679 return true; |
680 } | 680 } |
681 | 681 |
682 | 682 |
683 void PagedSpace::TearDown() { | 683 void PagedSpace::TearDown() { |
684 PageIterator iterator(this); | 684 PageIterator iterator(this); |
685 while (iterator.has_next()) { | 685 while (iterator.has_next()) { |
686 heap()->isolate()->memory_allocator()->Free(iterator.next()); | 686 heap()->isolate()->memory_allocator()->Free(iterator.next()); |
687 } | 687 } |
688 anchor_.set_next_page(&anchor_); | 688 anchor_.set_next_page(&anchor_); |
(...skipping 177 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
866 } | 866 } |
867 ASSERT(allocation_pointer_found_in_space); | 867 ASSERT(allocation_pointer_found_in_space); |
868 } | 868 } |
869 #endif | 869 #endif |
870 | 870 |
871 | 871 |
872 // ----------------------------------------------------------------------------- | 872 // ----------------------------------------------------------------------------- |
873 // NewSpace implementation | 873 // NewSpace implementation |
874 | 874 |
875 | 875 |
876 bool NewSpace::Setup(int reserved_semispace_capacity, | 876 bool NewSpace::SetUp(int reserved_semispace_capacity, |
877 int maximum_semispace_capacity) { | 877 int maximum_semispace_capacity) { |
878 // Setup new space based on the preallocated memory block defined by | 878 // Set up new space based on the preallocated memory block defined by |
879 // start and size. The provided space is divided into two semi-spaces. | 879 // start and size. The provided space is divided into two semi-spaces. |
880 // To support fast containment testing in the new space, the size of | 880 // To support fast containment testing in the new space, the size of |
881 // this chunk must be a power of two and it must be aligned to its size. | 881 // this chunk must be a power of two and it must be aligned to its size. |
882 int initial_semispace_capacity = heap()->InitialSemiSpaceSize(); | 882 int initial_semispace_capacity = heap()->InitialSemiSpaceSize(); |
883 | 883 |
884 size_t size = 2 * reserved_semispace_capacity; | 884 size_t size = 2 * reserved_semispace_capacity; |
885 Address base = | 885 Address base = |
886 heap()->isolate()->memory_allocator()->ReserveAlignedMemory( | 886 heap()->isolate()->memory_allocator()->ReserveAlignedMemory( |
887 size, size, &reservation_); | 887 size, size, &reservation_); |
888 if (base == NULL) return false; | 888 if (base == NULL) return false; |
889 | 889 |
890 chunk_base_ = base; | 890 chunk_base_ = base; |
891 chunk_size_ = static_cast<uintptr_t>(size); | 891 chunk_size_ = static_cast<uintptr_t>(size); |
892 LOG(heap()->isolate(), NewEvent("InitialChunk", chunk_base_, chunk_size_)); | 892 LOG(heap()->isolate(), NewEvent("InitialChunk", chunk_base_, chunk_size_)); |
893 | 893 |
894 ASSERT(initial_semispace_capacity <= maximum_semispace_capacity); | 894 ASSERT(initial_semispace_capacity <= maximum_semispace_capacity); |
895 ASSERT(IsPowerOf2(maximum_semispace_capacity)); | 895 ASSERT(IsPowerOf2(maximum_semispace_capacity)); |
896 | 896 |
897 // Allocate and setup the histogram arrays if necessary. | 897 // Allocate and set up the histogram arrays if necessary. |
898 allocated_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1); | 898 allocated_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1); |
899 promoted_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1); | 899 promoted_histogram_ = NewArray<HistogramInfo>(LAST_TYPE + 1); |
900 | 900 |
901 #define SET_NAME(name) allocated_histogram_[name].set_name(#name); \ | 901 #define SET_NAME(name) allocated_histogram_[name].set_name(#name); \ |
902 promoted_histogram_[name].set_name(#name); | 902 promoted_histogram_[name].set_name(#name); |
903 INSTANCE_TYPE_LIST(SET_NAME) | 903 INSTANCE_TYPE_LIST(SET_NAME) |
904 #undef SET_NAME | 904 #undef SET_NAME |
905 | 905 |
906 ASSERT(reserved_semispace_capacity == heap()->ReservedSemiSpaceSize()); | 906 ASSERT(reserved_semispace_capacity == heap()->ReservedSemiSpaceSize()); |
907 ASSERT(static_cast<intptr_t>(chunk_size_) >= | 907 ASSERT(static_cast<intptr_t>(chunk_size_) >= |
908 2 * heap()->ReservedSemiSpaceSize()); | 908 2 * heap()->ReservedSemiSpaceSize()); |
909 ASSERT(IsAddressAligned(chunk_base_, 2 * reserved_semispace_capacity, 0)); | 909 ASSERT(IsAddressAligned(chunk_base_, 2 * reserved_semispace_capacity, 0)); |
910 | 910 |
911 if (!to_space_.Setup(chunk_base_, | 911 if (!to_space_.SetUp(chunk_base_, |
912 initial_semispace_capacity, | 912 initial_semispace_capacity, |
913 maximum_semispace_capacity)) { | 913 maximum_semispace_capacity)) { |
914 return false; | 914 return false; |
915 } | 915 } |
916 if (!from_space_.Setup(chunk_base_ + reserved_semispace_capacity, | 916 if (!from_space_.SetUp(chunk_base_ + reserved_semispace_capacity, |
917 initial_semispace_capacity, | 917 initial_semispace_capacity, |
918 maximum_semispace_capacity)) { | 918 maximum_semispace_capacity)) { |
919 return false; | 919 return false; |
920 } | 920 } |
921 | 921 |
922 start_ = chunk_base_; | 922 start_ = chunk_base_; |
923 address_mask_ = ~(2 * reserved_semispace_capacity - 1); | 923 address_mask_ = ~(2 * reserved_semispace_capacity - 1); |
924 object_mask_ = address_mask_ | kHeapObjectTagMask; | 924 object_mask_ = address_mask_ | kHeapObjectTagMask; |
925 object_expected_ = reinterpret_cast<uintptr_t>(start_) | kHeapObjectTag; | 925 object_expected_ = reinterpret_cast<uintptr_t>(start_) | kHeapObjectTag; |
926 | 926 |
(...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1141 ASSERT_EQ(from_space_.id(), kFromSpace); | 1141 ASSERT_EQ(from_space_.id(), kFromSpace); |
1142 ASSERT_EQ(to_space_.id(), kToSpace); | 1142 ASSERT_EQ(to_space_.id(), kToSpace); |
1143 from_space_.Verify(); | 1143 from_space_.Verify(); |
1144 to_space_.Verify(); | 1144 to_space_.Verify(); |
1145 } | 1145 } |
1146 #endif | 1146 #endif |
1147 | 1147 |
1148 // ----------------------------------------------------------------------------- | 1148 // ----------------------------------------------------------------------------- |
1149 // SemiSpace implementation | 1149 // SemiSpace implementation |
1150 | 1150 |
1151 bool SemiSpace::Setup(Address start, | 1151 bool SemiSpace::SetUp(Address start, |
1152 int initial_capacity, | 1152 int initial_capacity, |
1153 int maximum_capacity) { | 1153 int maximum_capacity) { |
1154 // Creates a space in the young generation. The constructor does not | 1154 // Creates a space in the young generation. The constructor does not |
1155 // allocate memory from the OS. A SemiSpace is given a contiguous chunk of | 1155 // allocate memory from the OS. A SemiSpace is given a contiguous chunk of |
1156 // memory of size 'capacity' when set up, and does not grow or shrink | 1156 // memory of size 'capacity' when set up, and does not grow or shrink |
1157 // otherwise. In the mark-compact collector, the memory region of the from | 1157 // otherwise. In the mark-compact collector, the memory region of the from |
1158 // space is used as the marking stack. It requires contiguous memory | 1158 // space is used as the marking stack. It requires contiguous memory |
1159 // addresses. | 1159 // addresses. |
1160 ASSERT(maximum_capacity >= Page::kPageSize); | 1160 ASSERT(maximum_capacity >= Page::kPageSize); |
1161 initial_capacity_ = RoundDown(initial_capacity, Page::kPageSize); | 1161 initial_capacity_ = RoundDown(initial_capacity, Page::kPageSize); |
(...skipping 1239 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2401 intptr_t max_capacity, | 2401 intptr_t max_capacity, |
2402 AllocationSpace id) | 2402 AllocationSpace id) |
2403 : Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis | 2403 : Space(heap, id, NOT_EXECUTABLE), // Managed on a per-allocation basis |
2404 max_capacity_(max_capacity), | 2404 max_capacity_(max_capacity), |
2405 first_page_(NULL), | 2405 first_page_(NULL), |
2406 size_(0), | 2406 size_(0), |
2407 page_count_(0), | 2407 page_count_(0), |
2408 objects_size_(0) {} | 2408 objects_size_(0) {} |
2409 | 2409 |
2410 | 2410 |
2411 bool LargeObjectSpace::Setup() { | 2411 bool LargeObjectSpace::SetUp() { |
2412 first_page_ = NULL; | 2412 first_page_ = NULL; |
2413 size_ = 0; | 2413 size_ = 0; |
2414 page_count_ = 0; | 2414 page_count_ = 0; |
2415 objects_size_ = 0; | 2415 objects_size_ = 0; |
2416 return true; | 2416 return true; |
2417 } | 2417 } |
2418 | 2418 |
2419 | 2419 |
2420 void LargeObjectSpace::TearDown() { | 2420 void LargeObjectSpace::TearDown() { |
2421 while (first_page_ != NULL) { | 2421 while (first_page_ != NULL) { |
2422 LargePage* page = first_page_; | 2422 LargePage* page = first_page_; |
2423 first_page_ = first_page_->next_page(); | 2423 first_page_ = first_page_->next_page(); |
2424 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); | 2424 LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address())); |
2425 | 2425 |
2426 ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); | 2426 ObjectSpace space = static_cast<ObjectSpace>(1 << identity()); |
2427 heap()->isolate()->memory_allocator()->PerformAllocationCallback( | 2427 heap()->isolate()->memory_allocator()->PerformAllocationCallback( |
2428 space, kAllocationActionFree, page->size()); | 2428 space, kAllocationActionFree, page->size()); |
2429 heap()->isolate()->memory_allocator()->Free(page); | 2429 heap()->isolate()->memory_allocator()->Free(page); |
2430 } | 2430 } |
2431 Setup(); | 2431 SetUp(); |
2432 } | 2432 } |
2433 | 2433 |
2434 | 2434 |
2435 MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, | 2435 MaybeObject* LargeObjectSpace::AllocateRaw(int object_size, |
2436 Executability executable) { | 2436 Executability executable) { |
2437 // Check if we want to force a GC before growing the old space further. | 2437 // Check if we want to force a GC before growing the old space further. |
2438 // If so, fail the allocation. | 2438 // If so, fail the allocation. |
2439 if (!heap()->always_allocate() && | 2439 if (!heap()->always_allocate() && |
2440 heap()->OldGenerationAllocationLimitReached()) { | 2440 heap()->OldGenerationAllocationLimitReached()) { |
2441 return Failure::RetryAfterGC(identity()); | 2441 return Failure::RetryAfterGC(identity()); |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2658 object->ShortPrint(); | 2658 object->ShortPrint(); |
2659 PrintF("\n"); | 2659 PrintF("\n"); |
2660 } | 2660 } |
2661 printf(" --------------------------------------\n"); | 2661 printf(" --------------------------------------\n"); |
2662 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 2662 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
2663 } | 2663 } |
2664 | 2664 |
2665 #endif // DEBUG | 2665 #endif // DEBUG |
2666 | 2666 |
2667 } } // namespace v8::internal | 2667 } } // namespace v8::internal |
OLD | NEW |