| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 418 static bool exists() { return code_range_ != NULL; } | 418 static bool exists() { return code_range_ != NULL; } |
| 419 static bool contains(Address address) { | 419 static bool contains(Address address) { |
| 420 if (code_range_ == NULL) return false; | 420 if (code_range_ == NULL) return false; |
| 421 Address start = static_cast<Address>(code_range_->address()); | 421 Address start = static_cast<Address>(code_range_->address()); |
| 422 return start <= address && address < start + code_range_->size(); | 422 return start <= address && address < start + code_range_->size(); |
| 423 } | 423 } |
| 424 | 424 |
| 425 // Allocates a chunk of memory from the large-object portion of | 425 // Allocates a chunk of memory from the large-object portion of |
| 426 // the code range. On platforms with no separate code range, should | 426 // the code range. On platforms with no separate code range, should |
| 427 // not be called. | 427 // not be called. |
| 428 static void* AllocateRawMemory(const size_t requested, size_t* allocated); | 428 MUST_USE_RESULT static void* AllocateRawMemory(const size_t requested, |
| 429 size_t* allocated); |
| 429 static void FreeRawMemory(void* buf, size_t length); | 430 static void FreeRawMemory(void* buf, size_t length); |
| 430 | 431 |
| 431 private: | 432 private: |
| 432 // The reserved range of virtual memory that all code objects are put in. | 433 // The reserved range of virtual memory that all code objects are put in. |
| 433 static VirtualMemory* code_range_; | 434 static VirtualMemory* code_range_; |
| 434 // Plain old data class, just a struct plus a constructor. | 435 // Plain old data class, just a struct plus a constructor. |
| 435 class FreeBlock { | 436 class FreeBlock { |
| 436 public: | 437 public: |
| 437 FreeBlock(Address start_arg, size_t size_arg) | 438 FreeBlock(Address start_arg, size_t size_arg) |
| 438 : start(start_arg), size(size_arg) {} | 439 : start(start_arg), size(size_arg) {} |
| (...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 556 | 557 |
| 557 // Frees all pages owned by given space. | 558 // Frees all pages owned by given space. |
| 558 static void FreeAllPages(PagedSpace* space); | 559 static void FreeAllPages(PagedSpace* space); |
| 559 | 560 |
| 560 // Allocates and frees raw memory of certain size. | 561 // Allocates and frees raw memory of certain size. |
| 561 // These are just thin wrappers around OS::Allocate and OS::Free, | 562 // These are just thin wrappers around OS::Allocate and OS::Free, |
| 562 // but keep track of allocated bytes as part of heap. | 563 // but keep track of allocated bytes as part of heap. |
| 563 // If the flag is EXECUTABLE and a code range exists, the requested | 564 // If the flag is EXECUTABLE and a code range exists, the requested |
| 564 // memory is allocated from the code range. If a code range exists | 565 // memory is allocated from the code range. If a code range exists |
| 565 // and the freed memory is in it, the code range manages the freed memory. | 566 // and the freed memory is in it, the code range manages the freed memory. |
| 566 static void* AllocateRawMemory(const size_t requested, | 567 MUST_USE_RESULT static void* AllocateRawMemory(const size_t requested, |
| 567 size_t* allocated, | 568 size_t* allocated, |
| 568 Executability executable); | 569 Executability executable); |
| 569 static void FreeRawMemory(void* buf, | 570 static void FreeRawMemory(void* buf, |
| 570 size_t length, | 571 size_t length, |
| 571 Executability executable); | 572 Executability executable); |
| 572 static void PerformAllocationCallback(ObjectSpace space, | 573 static void PerformAllocationCallback(ObjectSpace space, |
| 573 AllocationAction action, | 574 AllocationAction action, |
| 574 size_t size); | 575 size_t size); |
| 575 | 576 |
| 576 static void AddMemoryAllocationCallback(MemoryAllocationCallback callback, | 577 static void AddMemoryAllocationCallback(MemoryAllocationCallback callback, |
| 577 ObjectSpace space, | 578 ObjectSpace space, |
| 578 AllocationAction action); | 579 AllocationAction action); |
| (...skipping 424 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1003 void TearDown(); | 1004 void TearDown(); |
| 1004 | 1005 |
| 1005 // Checks whether an object/address is in this space. | 1006 // Checks whether an object/address is in this space. |
| 1006 inline bool Contains(Address a); | 1007 inline bool Contains(Address a); |
| 1007 bool Contains(HeapObject* o) { return Contains(o->address()); } | 1008 bool Contains(HeapObject* o) { return Contains(o->address()); } |
| 1008 | 1009 |
| 1009 // Given an address occupied by a live object, return that object if it is | 1010 // Given an address occupied by a live object, return that object if it is |
| 1010 // in this space, or Failure::Exception() if it is not. The implementation | 1011 // in this space, or Failure::Exception() if it is not. The implementation |
| 1011 // iterates over objects in the page containing the address, the cost is | 1012 // iterates over objects in the page containing the address, the cost is |
| 1012 // linear in the number of objects in the page. It may be slow. | 1013 // linear in the number of objects in the page. It may be slow. |
| 1013 Object* FindObject(Address addr); | 1014 MUST_USE_RESULT MaybeObject* FindObject(Address addr); |
| 1014 | 1015 |
| 1015 // Checks whether page is currently in use by this space. | 1016 // Checks whether page is currently in use by this space. |
| 1016 bool IsUsed(Page* page); | 1017 bool IsUsed(Page* page); |
| 1017 | 1018 |
| 1018 void MarkAllPagesClean(); | 1019 void MarkAllPagesClean(); |
| 1019 | 1020 |
| 1020 // Prepares for a mark-compact GC. | 1021 // Prepares for a mark-compact GC. |
| 1021 virtual void PrepareForMarkCompact(bool will_compact); | 1022 virtual void PrepareForMarkCompact(bool will_compact); |
| 1022 | 1023 |
| 1023 // The top of allocation in a page in this space. Undefined if page is unused. | 1024 // The top of allocation in a page in this space. Undefined if page is unused. |
| (...skipping 28 matching lines...) Expand all Loading... |
| 1052 intptr_t Waste() { return accounting_stats_.Waste(); } | 1053 intptr_t Waste() { return accounting_stats_.Waste(); } |
| 1053 | 1054 |
| 1054 // Returns the address of the first object in this space. | 1055 // Returns the address of the first object in this space. |
| 1055 Address bottom() { return first_page_->ObjectAreaStart(); } | 1056 Address bottom() { return first_page_->ObjectAreaStart(); } |
| 1056 | 1057 |
| 1057 // Returns the allocation pointer in this space. | 1058 // Returns the allocation pointer in this space. |
| 1058 Address top() { return allocation_info_.top; } | 1059 Address top() { return allocation_info_.top; } |
| 1059 | 1060 |
| 1060 // Allocate the requested number of bytes in the space if possible, return a | 1061 // Allocate the requested number of bytes in the space if possible, return a |
| 1061 // failure object if not. | 1062 // failure object if not. |
| 1062 inline Object* AllocateRaw(int size_in_bytes); | 1063 MUST_USE_RESULT inline MaybeObject* AllocateRaw(int size_in_bytes); |
| 1063 | 1064 |
| 1064 // Allocate the requested number of bytes for relocation during mark-compact | 1065 // Allocate the requested number of bytes for relocation during mark-compact |
| 1065 // collection. | 1066 // collection. |
| 1066 inline Object* MCAllocateRaw(int size_in_bytes); | 1067 MUST_USE_RESULT inline MaybeObject* MCAllocateRaw(int size_in_bytes); |
| 1067 | 1068 |
| 1068 virtual bool ReserveSpace(int bytes); | 1069 virtual bool ReserveSpace(int bytes); |
| 1069 | 1070 |
| 1070 // Used by ReserveSpace. | 1071 // Used by ReserveSpace. |
| 1071 virtual void PutRestOfCurrentPageOnFreeList(Page* current_page) = 0; | 1072 virtual void PutRestOfCurrentPageOnFreeList(Page* current_page) = 0; |
| 1072 | 1073 |
| 1073 // Free all pages in range from prev (exclusive) to last (inclusive). | 1074 // Free all pages in range from prev (exclusive) to last (inclusive). |
| 1074 // Freed pages are moved to the end of page list. | 1075 // Freed pages are moved to the end of page list. |
| 1075 void FreePages(Page* prev, Page* last); | 1076 void FreePages(Page* prev, Page* last); |
| 1076 | 1077 |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1199 inline HeapObject* AllocateLinearly(AllocationInfo* alloc_info, | 1200 inline HeapObject* AllocateLinearly(AllocationInfo* alloc_info, |
| 1200 int size_in_bytes); | 1201 int size_in_bytes); |
| 1201 | 1202 |
| 1202 // During normal allocation or deserialization, roll to the next page in | 1203 // During normal allocation or deserialization, roll to the next page in |
| 1203 // the space (there is assumed to be one) and allocate there. This | 1204 // the space (there is assumed to be one) and allocate there. This |
| 1204 // function is space-dependent. | 1205 // function is space-dependent. |
| 1205 virtual HeapObject* AllocateInNextPage(Page* current_page, | 1206 virtual HeapObject* AllocateInNextPage(Page* current_page, |
| 1206 int size_in_bytes) = 0; | 1207 int size_in_bytes) = 0; |
| 1207 | 1208 |
| 1208 // Slow path of AllocateRaw. This function is space-dependent. | 1209 // Slow path of AllocateRaw. This function is space-dependent. |
| 1209 virtual HeapObject* SlowAllocateRaw(int size_in_bytes) = 0; | 1210 MUST_USE_RESULT virtual HeapObject* SlowAllocateRaw(int size_in_bytes) = 0; |
| 1210 | 1211 |
| 1211 // Slow path of MCAllocateRaw. | 1212 // Slow path of MCAllocateRaw. |
| 1212 HeapObject* SlowMCAllocateRaw(int size_in_bytes); | 1213 MUST_USE_RESULT HeapObject* SlowMCAllocateRaw(int size_in_bytes); |
| 1213 | 1214 |
| 1214 #ifdef DEBUG | 1215 #ifdef DEBUG |
| 1215 // Returns the number of total pages in this space. | 1216 // Returns the number of total pages in this space. |
| 1216 int CountTotalPages(); | 1217 int CountTotalPages(); |
| 1217 #endif | 1218 #endif |
| 1218 private: | 1219 private: |
| 1219 | 1220 |
| 1220 // Returns a pointer to the page of the relocation pointer. | 1221 // Returns a pointer to the page of the relocation pointer. |
| 1221 Page* MCRelocationTopPage() { return TopPageOf(mc_forwarding_info_); } | 1222 Page* MCRelocationTopPage() { return TopPageOf(mc_forwarding_info_); } |
| 1222 | 1223 |
| (...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1520 | 1521 |
| 1521 // The start address of the space and a bit mask. Anding an address in the | 1522 // The start address of the space and a bit mask. Anding an address in the |
| 1522 // new space with the mask will result in the start address. | 1523 // new space with the mask will result in the start address. |
| 1523 Address start() { return start_; } | 1524 Address start() { return start_; } |
| 1524 uintptr_t mask() { return address_mask_; } | 1525 uintptr_t mask() { return address_mask_; } |
| 1525 | 1526 |
| 1526 // The allocation top and limit addresses. | 1527 // The allocation top and limit addresses. |
| 1527 Address* allocation_top_address() { return &allocation_info_.top; } | 1528 Address* allocation_top_address() { return &allocation_info_.top; } |
| 1528 Address* allocation_limit_address() { return &allocation_info_.limit; } | 1529 Address* allocation_limit_address() { return &allocation_info_.limit; } |
| 1529 | 1530 |
| 1530 Object* AllocateRaw(int size_in_bytes) { | 1531 MUST_USE_RESULT MaybeObject* AllocateRaw(int size_in_bytes) { |
| 1531 return AllocateRawInternal(size_in_bytes, &allocation_info_); | 1532 return AllocateRawInternal(size_in_bytes, &allocation_info_); |
| 1532 } | 1533 } |
| 1533 | 1534 |
| 1534 // Allocate the requested number of bytes for relocation during mark-compact | 1535 // Allocate the requested number of bytes for relocation during mark-compact |
| 1535 // collection. | 1536 // collection. |
| 1536 Object* MCAllocateRaw(int size_in_bytes) { | 1537 MUST_USE_RESULT MaybeObject* MCAllocateRaw(int size_in_bytes) { |
| 1537 return AllocateRawInternal(size_in_bytes, &mc_forwarding_info_); | 1538 return AllocateRawInternal(size_in_bytes, &mc_forwarding_info_); |
| 1538 } | 1539 } |
| 1539 | 1540 |
| 1540 // Reset the allocation pointer to the beginning of the active semispace. | 1541 // Reset the allocation pointer to the beginning of the active semispace. |
| 1541 void ResetAllocationInfo(); | 1542 void ResetAllocationInfo(); |
| 1542 // Reset the reloction pointer to the bottom of the inactive semispace in | 1543 // Reset the reloction pointer to the bottom of the inactive semispace in |
| 1543 // preparation for mark-compact collection. | 1544 // preparation for mark-compact collection. |
| 1544 void MCResetRelocationInfo(); | 1545 void MCResetRelocationInfo(); |
| 1545 // Update the allocation pointer in the active semispace after a | 1546 // Update the allocation pointer in the active semispace after a |
| 1546 // mark-compact collection. | 1547 // mark-compact collection. |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1628 // mark-compact collection. | 1629 // mark-compact collection. |
| 1629 AllocationInfo allocation_info_; | 1630 AllocationInfo allocation_info_; |
| 1630 AllocationInfo mc_forwarding_info_; | 1631 AllocationInfo mc_forwarding_info_; |
| 1631 | 1632 |
| 1632 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) | 1633 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) |
| 1633 HistogramInfo* allocated_histogram_; | 1634 HistogramInfo* allocated_histogram_; |
| 1634 HistogramInfo* promoted_histogram_; | 1635 HistogramInfo* promoted_histogram_; |
| 1635 #endif | 1636 #endif |
| 1636 | 1637 |
| 1637 // Implementation of AllocateRaw and MCAllocateRaw. | 1638 // Implementation of AllocateRaw and MCAllocateRaw. |
| 1638 inline Object* AllocateRawInternal(int size_in_bytes, | 1639 MUST_USE_RESULT inline MaybeObject* AllocateRawInternal( |
| 1639 AllocationInfo* alloc_info); | 1640 int size_in_bytes, |
| 1641 AllocationInfo* alloc_info); |
| 1640 | 1642 |
| 1641 friend class SemiSpaceIterator; | 1643 friend class SemiSpaceIterator; |
| 1642 | 1644 |
| 1643 public: | 1645 public: |
| 1644 TRACK_MEMORY("NewSpace") | 1646 TRACK_MEMORY("NewSpace") |
| 1645 }; | 1647 }; |
| 1646 | 1648 |
| 1647 | 1649 |
| 1648 // ----------------------------------------------------------------------------- | 1650 // ----------------------------------------------------------------------------- |
| 1649 // Free lists for old object spaces | 1651 // Free lists for old object spaces |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1696 // number of bytes that have been lost due to internal fragmentation by | 1698 // number of bytes that have been lost due to internal fragmentation by |
| 1697 // freeing the block. Bookkeeping information will be written to the block, | 1699 // freeing the block. Bookkeeping information will be written to the block, |
| 1698 // ie, its contents will be destroyed. The start address should be word | 1700 // ie, its contents will be destroyed. The start address should be word |
| 1699 // aligned, and the size should be a non-zero multiple of the word size. | 1701 // aligned, and the size should be a non-zero multiple of the word size. |
| 1700 int Free(Address start, int size_in_bytes); | 1702 int Free(Address start, int size_in_bytes); |
| 1701 | 1703 |
| 1702 // Allocate a block of size 'size_in_bytes' from the free list. The block | 1704 // Allocate a block of size 'size_in_bytes' from the free list. The block |
| 1703 // is unitialized. A failure is returned if no block is available. The | 1705 // is unitialized. A failure is returned if no block is available. The |
| 1704 // number of bytes lost to fragmentation is returned in the output parameter | 1706 // number of bytes lost to fragmentation is returned in the output parameter |
| 1705 // 'wasted_bytes'. The size should be a non-zero multiple of the word size. | 1707 // 'wasted_bytes'. The size should be a non-zero multiple of the word size. |
| 1706 Object* Allocate(int size_in_bytes, int* wasted_bytes); | 1708 MUST_USE_RESULT MaybeObject* Allocate(int size_in_bytes, int* wasted_bytes); |
| 1707 | 1709 |
| 1708 private: | 1710 private: |
| 1709 // The size range of blocks, in bytes. (Smaller allocations are allowed, but | 1711 // The size range of blocks, in bytes. (Smaller allocations are allowed, but |
| 1710 // will always result in waste.) | 1712 // will always result in waste.) |
| 1711 static const int kMinBlockSize = 2 * kPointerSize; | 1713 static const int kMinBlockSize = 2 * kPointerSize; |
| 1712 static const int kMaxBlockSize = Page::kMaxHeapObjectSize; | 1714 static const int kMaxBlockSize = Page::kMaxHeapObjectSize; |
| 1713 | 1715 |
| 1714 // The identity of the owning space, for building allocation Failure | 1716 // The identity of the owning space, for building allocation Failure |
| 1715 // objects. | 1717 // objects. |
| 1716 AllocationSpace owner_; | 1718 AllocationSpace owner_; |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1794 intptr_t available() { return available_; } | 1796 intptr_t available() { return available_; } |
| 1795 | 1797 |
| 1796 // Place a node on the free list. The block starting at 'start' (assumed to | 1798 // Place a node on the free list. The block starting at 'start' (assumed to |
| 1797 // have size object_size_) is placed on the free list. Bookkeeping | 1799 // have size object_size_) is placed on the free list. Bookkeeping |
| 1798 // information will be written to the block, ie, its contents will be | 1800 // information will be written to the block, ie, its contents will be |
| 1799 // destroyed. The start address should be word aligned. | 1801 // destroyed. The start address should be word aligned. |
| 1800 void Free(Address start); | 1802 void Free(Address start); |
| 1801 | 1803 |
| 1802 // Allocate a fixed sized block from the free list. The block is unitialized. | 1804 // Allocate a fixed sized block from the free list. The block is unitialized. |
| 1803 // A failure is returned if no block is available. | 1805 // A failure is returned if no block is available. |
| 1804 Object* Allocate(); | 1806 MUST_USE_RESULT MaybeObject* Allocate(); |
| 1805 | 1807 |
| 1806 private: | 1808 private: |
| 1807 // Available bytes on the free list. | 1809 // Available bytes on the free list. |
| 1808 intptr_t available_; | 1810 intptr_t available_; |
| 1809 | 1811 |
| 1810 // The head of the free list. | 1812 // The head of the free list. |
| 1811 Address head_; | 1813 Address head_; |
| 1812 | 1814 |
| 1813 // The tail of the free list. | 1815 // The tail of the free list. |
| 1814 Address tail_; | 1816 Address tail_; |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1874 | 1876 |
| 1875 virtual void PutRestOfCurrentPageOnFreeList(Page* current_page); | 1877 virtual void PutRestOfCurrentPageOnFreeList(Page* current_page); |
| 1876 | 1878 |
| 1877 #ifdef DEBUG | 1879 #ifdef DEBUG |
| 1878 // Reports statistics for the space | 1880 // Reports statistics for the space |
| 1879 void ReportStatistics(); | 1881 void ReportStatistics(); |
| 1880 #endif | 1882 #endif |
| 1881 | 1883 |
| 1882 protected: | 1884 protected: |
| 1883 // Virtual function in the superclass. Slow path of AllocateRaw. | 1885 // Virtual function in the superclass. Slow path of AllocateRaw. |
| 1884 HeapObject* SlowAllocateRaw(int size_in_bytes); | 1886 MUST_USE_RESULT HeapObject* SlowAllocateRaw(int size_in_bytes); |
| 1885 | 1887 |
| 1886 // Virtual function in the superclass. Allocate linearly at the start of | 1888 // Virtual function in the superclass. Allocate linearly at the start of |
| 1887 // the page after current_page (there is assumed to be one). | 1889 // the page after current_page (there is assumed to be one). |
| 1888 HeapObject* AllocateInNextPage(Page* current_page, int size_in_bytes); | 1890 HeapObject* AllocateInNextPage(Page* current_page, int size_in_bytes); |
| 1889 | 1891 |
| 1890 private: | 1892 private: |
| 1891 // The space's free list. | 1893 // The space's free list. |
| 1892 OldSpaceFreeList free_list_; | 1894 OldSpaceFreeList free_list_; |
| 1893 | 1895 |
| 1894 public: | 1896 public: |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1941 virtual void DeallocateBlock(Address start, | 1943 virtual void DeallocateBlock(Address start, |
| 1942 int size_in_bytes, | 1944 int size_in_bytes, |
| 1943 bool add_to_freelist); | 1945 bool add_to_freelist); |
| 1944 #ifdef DEBUG | 1946 #ifdef DEBUG |
| 1945 // Reports statistic info of the space | 1947 // Reports statistic info of the space |
| 1946 void ReportStatistics(); | 1948 void ReportStatistics(); |
| 1947 #endif | 1949 #endif |
| 1948 | 1950 |
| 1949 protected: | 1951 protected: |
| 1950 // Virtual function in the superclass. Slow path of AllocateRaw. | 1952 // Virtual function in the superclass. Slow path of AllocateRaw. |
| 1951 HeapObject* SlowAllocateRaw(int size_in_bytes); | 1953 MUST_USE_RESULT HeapObject* SlowAllocateRaw(int size_in_bytes); |
| 1952 | 1954 |
| 1953 // Virtual function in the superclass. Allocate linearly at the start of | 1955 // Virtual function in the superclass. Allocate linearly at the start of |
| 1954 // the page after current_page (there is assumed to be one). | 1956 // the page after current_page (there is assumed to be one). |
| 1955 HeapObject* AllocateInNextPage(Page* current_page, int size_in_bytes); | 1957 HeapObject* AllocateInNextPage(Page* current_page, int size_in_bytes); |
| 1956 | 1958 |
| 1957 void ResetFreeList() { | 1959 void ResetFreeList() { |
| 1958 free_list_.Reset(); | 1960 free_list_.Reset(); |
| 1959 } | 1961 } |
| 1960 | 1962 |
| 1961 private: | 1963 private: |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2159 explicit LargeObjectSpace(AllocationSpace id); | 2161 explicit LargeObjectSpace(AllocationSpace id); |
| 2160 virtual ~LargeObjectSpace() {} | 2162 virtual ~LargeObjectSpace() {} |
| 2161 | 2163 |
| 2162 // Initializes internal data structures. | 2164 // Initializes internal data structures. |
| 2163 bool Setup(); | 2165 bool Setup(); |
| 2164 | 2166 |
| 2165 // Releases internal resources, frees objects in this space. | 2167 // Releases internal resources, frees objects in this space. |
| 2166 void TearDown(); | 2168 void TearDown(); |
| 2167 | 2169 |
| 2168 // Allocates a (non-FixedArray, non-Code) large object. | 2170 // Allocates a (non-FixedArray, non-Code) large object. |
| 2169 Object* AllocateRaw(int size_in_bytes); | 2171 MUST_USE_RESULT MaybeObject* AllocateRaw(int size_in_bytes); |
| 2170 // Allocates a large Code object. | 2172 // Allocates a large Code object. |
| 2171 Object* AllocateRawCode(int size_in_bytes); | 2173 MUST_USE_RESULT MaybeObject* AllocateRawCode(int size_in_bytes); |
| 2172 // Allocates a large FixedArray. | 2174 // Allocates a large FixedArray. |
| 2173 Object* AllocateRawFixedArray(int size_in_bytes); | 2175 MUST_USE_RESULT MaybeObject* AllocateRawFixedArray(int size_in_bytes); |
| 2174 | 2176 |
| 2175 // Available bytes for objects in this space. | 2177 // Available bytes for objects in this space. |
| 2176 intptr_t Available() { | 2178 intptr_t Available() { |
| 2177 return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available()); | 2179 return LargeObjectChunk::ObjectSizeFor(MemoryAllocator::Available()); |
| 2178 } | 2180 } |
| 2179 | 2181 |
| 2180 virtual intptr_t Size() { | 2182 virtual intptr_t Size() { |
| 2181 return size_; | 2183 return size_; |
| 2182 } | 2184 } |
| 2183 | 2185 |
| 2184 int PageCount() { | 2186 int PageCount() { |
| 2185 return page_count_; | 2187 return page_count_; |
| 2186 } | 2188 } |
| 2187 | 2189 |
| 2188 // Finds an object for a given address, returns Failure::Exception() | 2190 // Finds an object for a given address, returns Failure::Exception() |
| 2189 // if it is not found. The function iterates through all objects in this | 2191 // if it is not found. The function iterates through all objects in this |
| 2190 // space, may be slow. | 2192 // space, may be slow. |
| 2191 Object* FindObject(Address a); | 2193 MaybeObject* FindObject(Address a); |
| 2192 | 2194 |
| 2193 // Finds a large object page containing the given pc, returns NULL | 2195 // Finds a large object page containing the given pc, returns NULL |
| 2194 // if such a page doesn't exist. | 2196 // if such a page doesn't exist. |
| 2195 LargeObjectChunk* FindChunkContainingPc(Address pc); | 2197 LargeObjectChunk* FindChunkContainingPc(Address pc); |
| 2196 | 2198 |
| 2197 // Iterates objects covered by dirty regions. | 2199 // Iterates objects covered by dirty regions. |
| 2198 void IterateDirtyRegions(ObjectSlotCallback func); | 2200 void IterateDirtyRegions(ObjectSlotCallback func); |
| 2199 | 2201 |
| 2200 // Frees unmarked objects. | 2202 // Frees unmarked objects. |
| 2201 void FreeUnmarkedObjects(); | 2203 void FreeUnmarkedObjects(); |
| (...skipping 27 matching lines...) Expand all Loading... |
| 2229 | 2231 |
| 2230 private: | 2232 private: |
| 2231 // The head of the linked list of large object chunks. | 2233 // The head of the linked list of large object chunks. |
| 2232 LargeObjectChunk* first_chunk_; | 2234 LargeObjectChunk* first_chunk_; |
| 2233 intptr_t size_; // allocated bytes | 2235 intptr_t size_; // allocated bytes |
| 2234 int page_count_; // number of chunks | 2236 int page_count_; // number of chunks |
| 2235 | 2237 |
| 2236 | 2238 |
| 2237 // Shared implementation of AllocateRaw, AllocateRawCode and | 2239 // Shared implementation of AllocateRaw, AllocateRawCode and |
| 2238 // AllocateRawFixedArray. | 2240 // AllocateRawFixedArray. |
| 2239 Object* AllocateRawInternal(int requested_size, | 2241 MUST_USE_RESULT MaybeObject* AllocateRawInternal(int requested_size, |
| 2240 int object_size, | 2242 int object_size, |
| 2241 Executability executable); | 2243 Executability executable); |
| 2242 | 2244 |
| 2243 friend class LargeObjectIterator; | 2245 friend class LargeObjectIterator; |
| 2244 | 2246 |
| 2245 public: | 2247 public: |
| 2246 TRACK_MEMORY("LargeObjectSpace") | 2248 TRACK_MEMORY("LargeObjectSpace") |
| 2247 }; | 2249 }; |
| 2248 | 2250 |
| 2249 | 2251 |
| 2250 class LargeObjectIterator: public ObjectIterator { | 2252 class LargeObjectIterator: public ObjectIterator { |
| 2251 public: | 2253 public: |
| 2252 explicit LargeObjectIterator(LargeObjectSpace* space); | 2254 explicit LargeObjectIterator(LargeObjectSpace* space); |
| 2253 LargeObjectIterator(LargeObjectSpace* space, HeapObjectCallback size_func); | 2255 LargeObjectIterator(LargeObjectSpace* space, HeapObjectCallback size_func); |
| 2254 | 2256 |
| 2255 HeapObject* next(); | 2257 HeapObject* next(); |
| 2256 | 2258 |
| 2257 // implementation of ObjectIterator. | 2259 // implementation of ObjectIterator. |
| 2258 virtual HeapObject* next_object() { return next(); } | 2260 virtual HeapObject* next_object() { return next(); } |
| 2259 | 2261 |
| 2260 private: | 2262 private: |
| 2261 LargeObjectChunk* current_; | 2263 LargeObjectChunk* current_; |
| 2262 HeapObjectCallback size_func_; | 2264 HeapObjectCallback size_func_; |
| 2263 }; | 2265 }; |
| 2264 | 2266 |
| 2265 | 2267 |
| 2266 } } // namespace v8::internal | 2268 } } // namespace v8::internal |
| 2267 | 2269 |
| 2268 #endif // V8_SPACES_H_ | 2270 #endif // V8_SPACES_H_ |
| OLD | NEW |