OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_SPACES_H_ | 5 #ifndef V8_HEAP_SPACES_H_ |
6 #define V8_HEAP_SPACES_H_ | 6 #define V8_HEAP_SPACES_H_ |
7 | 7 |
8 #include "src/allocation.h" | 8 #include "src/allocation.h" |
9 #include "src/base/atomicops.h" | 9 #include "src/base/atomicops.h" |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
11 #include "src/base/platform/mutex.h" | 11 #include "src/base/platform/mutex.h" |
| 12 #include "src/flags.h" |
12 #include "src/hashmap.h" | 13 #include "src/hashmap.h" |
13 #include "src/list.h" | 14 #include "src/list.h" |
14 #include "src/log.h" | 15 #include "src/objects.h" |
15 #include "src/utils.h" | 16 #include "src/utils.h" |
16 | 17 |
17 namespace v8 { | 18 namespace v8 { |
18 namespace internal { | 19 namespace internal { |
19 | 20 |
20 class Isolate; | 21 class Isolate; |
21 | 22 |
22 // ----------------------------------------------------------------------------- | 23 // ----------------------------------------------------------------------------- |
23 // Heap structures: | 24 // Heap structures: |
24 // | 25 // |
(...skipping 624 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
649 } | 650 } |
650 | 651 |
651 Address area_start() { return area_start_; } | 652 Address area_start() { return area_start_; } |
652 Address area_end() { return area_end_; } | 653 Address area_end() { return area_end_; } |
653 int area_size() { return static_cast<int>(area_end() - area_start()); } | 654 int area_size() { return static_cast<int>(area_end() - area_start()); } |
654 bool CommitArea(size_t requested); | 655 bool CommitArea(size_t requested); |
655 | 656 |
656 // Approximate amount of physical memory committed for this chunk. | 657 // Approximate amount of physical memory committed for this chunk. |
657 size_t CommittedPhysicalMemory() { return high_water_mark_; } | 658 size_t CommittedPhysicalMemory() { return high_water_mark_; } |
658 | 659 |
659 static inline void UpdateHighWaterMark(Address mark); | 660 static inline void UpdateHighWaterMark(Address mark) { |
| 661 if (mark == NULL) return; |
| 662 // Need to subtract one from the mark because when a chunk is full the |
| 663 // top points to the next address after the chunk, which effectively belongs |
| 664 // to another chunk. See the comment to Page::FromAllocationTop. |
| 665 MemoryChunk* chunk = MemoryChunk::FromAddress(mark - 1); |
| 666 int new_mark = static_cast<int>(mark - chunk->address()); |
| 667 if (new_mark > chunk->high_water_mark_) { |
| 668 chunk->high_water_mark_ = new_mark; |
| 669 } |
| 670 } |
660 | 671 |
661 protected: | 672 protected: |
662 size_t size_; | 673 size_t size_; |
663 intptr_t flags_; | 674 intptr_t flags_; |
664 | 675 |
665 // Start and end of allocatable memory on this chunk. | 676 // Start and end of allocatable memory on this chunk. |
666 Address area_start_; | 677 Address area_start_; |
667 Address area_end_; | 678 Address area_end_; |
668 | 679 |
669 // If the chunk needs to remember its memory reservation, it is stored here. | 680 // If the chunk needs to remember its memory reservation, it is stored here. |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
734 // Returns the page containing an allocation top. Because an allocation | 745 // Returns the page containing an allocation top. Because an allocation |
735 // top address can be the upper bound of the page, we need to subtract | 746 // top address can be the upper bound of the page, we need to subtract |
736 // it with kPointerSize first. The address ranges from | 747 // it with kPointerSize first. The address ranges from |
737 // [page_addr + kObjectStartOffset .. page_addr + kPageSize]. | 748 // [page_addr + kObjectStartOffset .. page_addr + kPageSize]. |
738 INLINE(static Page* FromAllocationTop(Address top)) { | 749 INLINE(static Page* FromAllocationTop(Address top)) { |
739 Page* p = FromAddress(top - kPointerSize); | 750 Page* p = FromAddress(top - kPointerSize); |
740 return p; | 751 return p; |
741 } | 752 } |
742 | 753 |
743 // Returns the next page in the chain of pages owned by a space. | 754 // Returns the next page in the chain of pages owned by a space. |
744 inline Page* next_page(); | 755 inline Page* next_page() { |
745 inline Page* prev_page(); | 756 DCHECK(next_chunk()->owner() == owner()); |
| 757 return static_cast<Page*>(next_chunk()); |
| 758 } |
| 759 inline Page* prev_page() { |
| 760 DCHECK(prev_chunk()->owner() == owner()); |
| 761 return static_cast<Page*>(prev_chunk()); |
| 762 } |
746 inline void set_next_page(Page* page); | 763 inline void set_next_page(Page* page); |
747 inline void set_prev_page(Page* page); | 764 inline void set_prev_page(Page* page); |
748 | 765 |
749 // Checks whether an address is page aligned. | 766 // Checks whether an address is page aligned. |
750 static bool IsAlignedToPageSize(Address a) { | 767 static bool IsAlignedToPageSize(Address a) { |
751 return 0 == (OffsetFrom(a) & kPageAlignmentMask); | 768 return 0 == (OffsetFrom(a) & kPageAlignmentMask); |
752 } | 769 } |
753 | 770 |
754 // Returns the offset of a given address to this page. | 771 // Returns the offset of a given address to this page. |
755 INLINE(int Offset(Address a)) { | 772 INLINE(int Offset(Address a)) { |
(...skipping 483 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1239 // iterator in order to be sure to visit these new objects. | 1256 // iterator in order to be sure to visit these new objects. |
1240 class HeapObjectIterator : public ObjectIterator { | 1257 class HeapObjectIterator : public ObjectIterator { |
1241 public: | 1258 public: |
1242 // Creates a new object iterator in a given space. | 1259 // Creates a new object iterator in a given space. |
1243 explicit HeapObjectIterator(PagedSpace* space); | 1260 explicit HeapObjectIterator(PagedSpace* space); |
1244 explicit HeapObjectIterator(Page* page); | 1261 explicit HeapObjectIterator(Page* page); |
1245 | 1262 |
1246 // Advance to the next object, skipping free spaces and other fillers and | 1263 // Advance to the next object, skipping free spaces and other fillers and |
1247 // skipping the special garbage section of which there is one per space. | 1264 // skipping the special garbage section of which there is one per space. |
1248 // Returns NULL when the iteration has ended. | 1265 // Returns NULL when the iteration has ended. |
1249 inline HeapObject* Next() { | 1266 inline HeapObject* Next(); |
1250 do { | 1267 virtual inline HeapObject* next_object(); |
1251 HeapObject* next_obj = FromCurrentPage(); | |
1252 if (next_obj != NULL) return next_obj; | |
1253 } while (AdvanceToNextPage()); | |
1254 return NULL; | |
1255 } | |
1256 | |
1257 virtual HeapObject* next_object() { return Next(); } | |
1258 | 1268 |
1259 private: | 1269 private: |
1260 enum PageMode { kOnePageOnly, kAllPagesInSpace }; | 1270 enum PageMode { kOnePageOnly, kAllPagesInSpace }; |
1261 | 1271 |
1262 Address cur_addr_; // Current iteration point. | 1272 Address cur_addr_; // Current iteration point. |
1263 Address cur_end_; // End iteration point. | 1273 Address cur_end_; // End iteration point. |
1264 PagedSpace* space_; | 1274 PagedSpace* space_; |
1265 PageMode page_mode_; | 1275 PageMode page_mode_; |
1266 | 1276 |
1267 // Fast (inlined) path of next(). | 1277 // Fast (inlined) path of next(). |
(...skipping 370 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1638 if (IsRetry()) return false; | 1648 if (IsRetry()) return false; |
1639 *obj = T::cast(object_); | 1649 *obj = T::cast(object_); |
1640 return true; | 1650 return true; |
1641 } | 1651 } |
1642 | 1652 |
1643 Object* ToObjectChecked() { | 1653 Object* ToObjectChecked() { |
1644 CHECK(!IsRetry()); | 1654 CHECK(!IsRetry()); |
1645 return object_; | 1655 return object_; |
1646 } | 1656 } |
1647 | 1657 |
1648 AllocationSpace RetrySpace() { | 1658 inline AllocationSpace RetrySpace(); |
1649 DCHECK(IsRetry()); | |
1650 return static_cast<AllocationSpace>(Smi::cast(object_)->value()); | |
1651 } | |
1652 | 1659 |
1653 private: | 1660 private: |
1654 explicit AllocationResult(AllocationSpace space) | 1661 explicit AllocationResult(AllocationSpace space) |
1655 : object_(Smi::FromInt(static_cast<int>(space))) {} | 1662 : object_(Smi::FromInt(static_cast<int>(space))) {} |
1656 | 1663 |
1657 Object* object_; | 1664 Object* object_; |
1658 }; | 1665 }; |
1659 | 1666 |
1660 | 1667 |
1661 STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize); | 1668 STATIC_ASSERT(sizeof(AllocationResult) == kPointerSize); |
(...skipping 15 matching lines...) Expand all Loading... |
1677 // Returns true if the space has been successfully set up and not | 1684 // Returns true if the space has been successfully set up and not |
1678 // subsequently torn down. | 1685 // subsequently torn down. |
1679 bool HasBeenSetUp(); | 1686 bool HasBeenSetUp(); |
1680 | 1687 |
1681 // Cleans up the space, frees all pages in this space except those belonging | 1688 // Cleans up the space, frees all pages in this space except those belonging |
1682 // to the initial chunk, uncommits addresses in the initial chunk. | 1689 // to the initial chunk, uncommits addresses in the initial chunk. |
1683 void TearDown(); | 1690 void TearDown(); |
1684 | 1691 |
1685 // Checks whether an object/address is in this space. | 1692 // Checks whether an object/address is in this space. |
1686 inline bool Contains(Address a); | 1693 inline bool Contains(Address a); |
1687 bool Contains(HeapObject* o) { return Contains(o->address()); } | 1694 inline bool Contains(HeapObject* o); |
1688 // Unlike Contains() methods it is safe to call this one even for addresses | 1695 // Unlike Contains() methods it is safe to call this one even for addresses |
1689 // of unmapped memory. | 1696 // of unmapped memory. |
1690 bool ContainsSafe(Address addr); | 1697 bool ContainsSafe(Address addr); |
1691 | 1698 |
1692 // Given an address occupied by a live object, return that object if it is | 1699 // Given an address occupied by a live object, return that object if it is |
1693 // in this space, or a Smi if it is not. The implementation iterates over | 1700 // in this space, or a Smi if it is not. The implementation iterates over |
1694 // objects in the page containing the address, the cost is linear in the | 1701 // objects in the page containing the address, the cost is linear in the |
1695 // number of objects in the page. It may be slow. | 1702 // number of objects in the page. It may be slow. |
1696 Object* FindObject(Address addr); | 1703 Object* FindObject(Address addr); |
1697 | 1704 |
(...skipping 568 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2266 // A SemiSpaceIterator is an ObjectIterator that iterates over the active | 2273 // A SemiSpaceIterator is an ObjectIterator that iterates over the active |
2267 // semispace of the heap's new space. It iterates over the objects in the | 2274 // semispace of the heap's new space. It iterates over the objects in the |
2268 // semispace from a given start address (defaulting to the bottom of the | 2275 // semispace from a given start address (defaulting to the bottom of the |
2269 // semispace) to the top of the semispace. New objects allocated after the | 2276 // semispace) to the top of the semispace. New objects allocated after the |
2270 // iterator is created are not iterated. | 2277 // iterator is created are not iterated. |
2271 class SemiSpaceIterator : public ObjectIterator { | 2278 class SemiSpaceIterator : public ObjectIterator { |
2272 public: | 2279 public: |
2273 // Create an iterator over the allocated objects in the given to-space. | 2280 // Create an iterator over the allocated objects in the given to-space. |
2274 explicit SemiSpaceIterator(NewSpace* space); | 2281 explicit SemiSpaceIterator(NewSpace* space); |
2275 | 2282 |
2276 HeapObject* Next() { | 2283 inline HeapObject* Next(); |
2277 if (current_ == limit_) return NULL; | |
2278 if (NewSpacePage::IsAtEnd(current_)) { | |
2279 NewSpacePage* page = NewSpacePage::FromLimit(current_); | |
2280 page = page->next_page(); | |
2281 DCHECK(!page->is_anchor()); | |
2282 current_ = page->area_start(); | |
2283 if (current_ == limit_) return NULL; | |
2284 } | |
2285 | |
2286 HeapObject* object = HeapObject::FromAddress(current_); | |
2287 int size = object->Size(); | |
2288 | |
2289 current_ += size; | |
2290 return object; | |
2291 } | |
2292 | 2284 |
2293 // Implementation of the ObjectIterator functions. | 2285 // Implementation of the ObjectIterator functions. |
2294 virtual HeapObject* next_object() { return Next(); } | 2286 virtual inline HeapObject* next_object(); |
2295 | 2287 |
2296 private: | 2288 private: |
2297 void Initialize(Address start, Address end); | 2289 void Initialize(Address start, Address end); |
2298 | 2290 |
2299 // The current iteration point. | 2291 // The current iteration point. |
2300 Address current_; | 2292 Address current_; |
2301 // The end of iteration. | 2293 // The end of iteration. |
2302 Address limit_; | 2294 Address limit_; |
2303 }; | 2295 }; |
2304 | 2296 |
(...skipping 499 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2804 }; | 2796 }; |
2805 | 2797 |
2806 | 2798 |
2807 // Iterates over the chunks (pages and large object pages) that can contain | 2799 // Iterates over the chunks (pages and large object pages) that can contain |
2808 // pointers to new space. | 2800 // pointers to new space. |
2809 class PointerChunkIterator BASE_EMBEDDED { | 2801 class PointerChunkIterator BASE_EMBEDDED { |
2810 public: | 2802 public: |
2811 inline explicit PointerChunkIterator(Heap* heap); | 2803 inline explicit PointerChunkIterator(Heap* heap); |
2812 | 2804 |
2813 // Return NULL when the iterator is done. | 2805 // Return NULL when the iterator is done. |
2814 MemoryChunk* next() { | 2806 inline MemoryChunk* next(); |
2815 switch (state_) { | |
2816 case kOldSpaceState: { | |
2817 if (old_iterator_.has_next()) { | |
2818 return old_iterator_.next(); | |
2819 } | |
2820 state_ = kMapState; | |
2821 // Fall through. | |
2822 } | |
2823 case kMapState: { | |
2824 if (map_iterator_.has_next()) { | |
2825 return map_iterator_.next(); | |
2826 } | |
2827 state_ = kLargeObjectState; | |
2828 // Fall through. | |
2829 } | |
2830 case kLargeObjectState: { | |
2831 HeapObject* heap_object; | |
2832 do { | |
2833 heap_object = lo_iterator_.Next(); | |
2834 if (heap_object == NULL) { | |
2835 state_ = kFinishedState; | |
2836 return NULL; | |
2837 } | |
2838 // Fixed arrays are the only pointer-containing objects in large | |
2839 // object space. | |
2840 } while (!heap_object->IsFixedArray()); | |
2841 MemoryChunk* answer = MemoryChunk::FromAddress(heap_object->address()); | |
2842 return answer; | |
2843 } | |
2844 case kFinishedState: | |
2845 return NULL; | |
2846 default: | |
2847 break; | |
2848 } | |
2849 UNREACHABLE(); | |
2850 return NULL; | |
2851 } | |
2852 | |
2853 | 2807 |
2854 private: | 2808 private: |
2855 enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState }; | 2809 enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState }; |
2856 State state_; | 2810 State state_; |
2857 PageIterator old_iterator_; | 2811 PageIterator old_iterator_; |
2858 PageIterator map_iterator_; | 2812 PageIterator map_iterator_; |
2859 LargeObjectIterator lo_iterator_; | 2813 LargeObjectIterator lo_iterator_; |
2860 }; | 2814 }; |
2861 | 2815 |
2862 | 2816 |
2863 #ifdef DEBUG | 2817 #ifdef DEBUG |
2864 struct CommentStatistic { | 2818 struct CommentStatistic { |
2865 const char* comment; | 2819 const char* comment; |
2866 int size; | 2820 int size; |
2867 int count; | 2821 int count; |
2868 void Clear() { | 2822 void Clear() { |
2869 comment = NULL; | 2823 comment = NULL; |
2870 size = 0; | 2824 size = 0; |
2871 count = 0; | 2825 count = 0; |
2872 } | 2826 } |
2873 // Must be small, since an iteration is used for lookup. | 2827 // Must be small, since an iteration is used for lookup. |
2874 static const int kMaxComments = 64; | 2828 static const int kMaxComments = 64; |
2875 }; | 2829 }; |
2876 #endif | 2830 #endif |
2877 } | 2831 } |
2878 } // namespace v8::internal | 2832 } // namespace v8::internal |
2879 | 2833 |
2880 #endif // V8_HEAP_SPACES_H_ | 2834 #endif // V8_HEAP_SPACES_H_ |
OLD | NEW |