OLD | NEW |
---|---|
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 640 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
651 // ----------------------------------------------------------------------------- | 651 // ----------------------------------------------------------------------------- |
652 // PagedSpace implementation | 652 // PagedSpace implementation |
653 | 653 |
654 PagedSpace::PagedSpace(Heap* heap, | 654 PagedSpace::PagedSpace(Heap* heap, |
655 intptr_t max_capacity, | 655 intptr_t max_capacity, |
656 AllocationSpace id, | 656 AllocationSpace id, |
657 Executability executable) | 657 Executability executable) |
658 : Space(heap, id, executable), | 658 : Space(heap, id, executable), |
659 free_list_(this), | 659 free_list_(this), |
660 was_swept_conservatively_(false), | 660 was_swept_conservatively_(false), |
661 first_unswept_page_(Page::FromAddress(NULL)), | 661 first_unswept_page_(Page::FromAddress(NULL)) { |
662 last_unswept_page_(Page::FromAddress(NULL)) { | |
663 max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize) | 662 max_capacity_ = (RoundDown(max_capacity, Page::kPageSize) / Page::kPageSize) |
664 * Page::kObjectAreaSize; | 663 * Page::kObjectAreaSize; |
665 accounting_stats_.Clear(); | 664 accounting_stats_.Clear(); |
666 | 665 |
667 allocation_info_.top = NULL; | 666 allocation_info_.top = NULL; |
668 allocation_info_.limit = NULL; | 667 allocation_info_.limit = NULL; |
669 | 668 |
670 anchor_.InitializeAsAnchor(this); | 669 anchor_.InitializeAsAnchor(this); |
671 } | 670 } |
672 | 671 |
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
747 it.next(); | 746 it.next(); |
748 count++; | 747 count++; |
749 } | 748 } |
750 return count; | 749 return count; |
751 } | 750 } |
752 #endif | 751 #endif |
753 | 752 |
754 | 753 |
755 void PagedSpace::ReleasePage(Page* page) { | 754 void PagedSpace::ReleasePage(Page* page) { |
756 ASSERT(page->LiveBytes() == 0); | 755 ASSERT(page->LiveBytes() == 0); |
756 | |
757 // Adjust list of unswept pages if the page is it's head or tail. | |
758 if (first_unswept_page_ == page) { | |
759 first_unswept_page_ = page->next_page(); | |
760 if (first_unswept_page_ == anchor()) { | |
761 first_unswept_page_ = Page::FromAddress(NULL); | |
762 } | |
763 } | |
764 | |
765 if (page->WasSwept()) { | |
766 intptr_t size = free_list_.EvictFreeListItems(page); | |
767 accounting_stats_.AllocateBytes(size); | |
768 ASSERT_EQ(Page::kObjectAreaSize, static_cast<int>(size)); | |
769 } | |
770 | |
757 page->Unlink(); | 771 page->Unlink(); |
758 if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) { | 772 if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) { |
759 heap()->isolate()->memory_allocator()->Free(page); | 773 heap()->isolate()->memory_allocator()->Free(page); |
760 } else { | 774 } else { |
761 heap()->QueueMemoryChunkForFree(page); | 775 heap()->QueueMemoryChunkForFree(page); |
762 } | 776 } |
763 | 777 |
764 ASSERT(Capacity() > 0); | 778 ASSERT(Capacity() > 0); |
765 ASSERT(Capacity() % Page::kObjectAreaSize == 0); | 779 ASSERT(Capacity() % Page::kObjectAreaSize == 0); |
766 accounting_stats_.ShrinkSpace(Page::kObjectAreaSize); | 780 accounting_stats_.ShrinkSpace(Page::kObjectAreaSize); |
767 } | 781 } |
768 | 782 |
769 | 783 |
770 void PagedSpace::ReleaseAllUnusedPages() { | 784 void PagedSpace::ReleaseAllUnusedPages() { |
771 PageIterator it(this); | 785 PageIterator it(this); |
772 while (it.has_next()) { | 786 while (it.has_next()) { |
773 Page* page = it.next(); | 787 Page* page = it.next(); |
774 if (page->LiveBytes() == 0) { | 788 if (!page->WasSwept()) { |
775 ReleasePage(page); | 789 if (page->LiveBytes() == 0) ReleasePage(page); |
790 } else { | |
791 HeapObject* obj = HeapObject::FromAddress(page->body()); | |
792 if (obj->IsFreeSpace() && | |
793 FreeSpace::cast(obj)->size() == Page::kObjectAreaSize) { | |
794 // Sometimes we allocate memory from free list but don't | |
795 // immediately initialize it (e.g. see PagedSpace::ReserveSpace | |
796 // called from Heap::ReserveSpace that can cause GC before | |
797 // reserved space is actually initialized). | |
798 // Thus we can't simply assume that obj represents a valid | |
799 // node still owned by a free list | |
800 // Instead we should verify that the page is fully covered | |
801 // by free list items. | |
Michael Starzinger
2011/11/10 12:52:45
Wow, that's tricky. Nice catch!
| |
802 FreeList::SizeStats sizes; | |
803 free_list_.CountFreeListItems(page, &sizes); | |
804 if (sizes.Total() == Page::kObjectAreaSize) { | |
805 ReleasePage(page); | |
806 } | |
807 } | |
776 } | 808 } |
777 } | 809 } |
778 heap()->FreeQueuedChunks(); | 810 heap()->FreeQueuedChunks(); |
779 } | 811 } |
780 | 812 |
781 | 813 |
782 #ifdef DEBUG | 814 #ifdef DEBUG |
783 void PagedSpace::Print() { } | 815 void PagedSpace::Print() { } |
784 #endif | 816 #endif |
785 | 817 |
(...skipping 1077 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1863 if (Page::FromAddress(n->address()) == p) { | 1895 if (Page::FromAddress(n->address()) == p) { |
1864 FreeSpace* free_space = reinterpret_cast<FreeSpace*>(n); | 1896 FreeSpace* free_space = reinterpret_cast<FreeSpace*>(n); |
1865 sum += free_space->Size(); | 1897 sum += free_space->Size(); |
1866 } | 1898 } |
1867 n = n->next(); | 1899 n = n->next(); |
1868 } | 1900 } |
1869 return sum; | 1901 return sum; |
1870 } | 1902 } |
1871 | 1903 |
1872 | 1904 |
1873 void FreeList::CountFreeListItems(Page* p, intptr_t* sizes) { | 1905 void FreeList::CountFreeListItems(Page* p, SizeStats* sizes) { |
1874 sizes[0] = CountFreeListItemsInList(small_list_, p); | 1906 sizes->huge_size_ = CountFreeListItemsInList(huge_list_, p); |
1875 sizes[1] = CountFreeListItemsInList(medium_list_, p); | 1907 if (sizes->huge_size_ < Page::kObjectAreaSize) { |
1876 sizes[2] = CountFreeListItemsInList(large_list_, p); | 1908 sizes->small_size_ = CountFreeListItemsInList(small_list_, p); |
1877 sizes[3] = CountFreeListItemsInList(huge_list_, p); | 1909 sizes->medium_size_ = CountFreeListItemsInList(medium_list_, p); |
1910 sizes->large_size_ = CountFreeListItemsInList(large_list_, p); | |
1911 } else { | |
1912 sizes->small_size_ = 0; | |
1913 sizes->medium_size_ = 0; | |
1914 sizes->large_size_ = 0; | |
1915 } | |
1878 } | 1916 } |
1879 | 1917 |
1918 | |
1919 static intptr_t EvictFreeListItemsInList(FreeListNode** n, Page* p) { | |
1920 intptr_t sum = 0; | |
1921 while (*n != NULL) { | |
1922 if (Page::FromAddress((*n)->address()) == p) { | |
1923 FreeSpace* free_space = reinterpret_cast<FreeSpace*>(*n); | |
1924 sum += free_space->Size(); | |
1925 *n = (*n)->next(); | |
1926 } else { | |
1927 n = (*n)->next_address(); | |
1928 } | |
1929 } | |
1930 return sum; | |
1931 } | |
1932 | |
1933 | |
1934 intptr_t FreeList::EvictFreeListItems(Page* p) { | |
1935 intptr_t sum = EvictFreeListItemsInList(&huge_list_, p); | |
1936 | |
1937 if (sum < Page::kObjectAreaSize) { | |
1938 sum += EvictFreeListItemsInList(&small_list_, p) + | |
1939 EvictFreeListItemsInList(&medium_list_, p) + | |
1940 EvictFreeListItemsInList(&large_list_, p); | |
1941 } | |
1942 | |
1943 available_ -= sum; | |
1944 | |
1945 return sum; | |
1946 } | |
1947 | |
1948 | |
1880 #ifdef DEBUG | 1949 #ifdef DEBUG |
1881 intptr_t FreeList::SumFreeList(FreeListNode* cur) { | 1950 intptr_t FreeList::SumFreeList(FreeListNode* cur) { |
1882 intptr_t sum = 0; | 1951 intptr_t sum = 0; |
1883 while (cur != NULL) { | 1952 while (cur != NULL) { |
1884 ASSERT(cur->map() == HEAP->raw_unchecked_free_space_map()); | 1953 ASSERT(cur->map() == HEAP->raw_unchecked_free_space_map()); |
1885 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur); | 1954 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur); |
1886 sum += cur_as_free_space->Size(); | 1955 sum += cur_as_free_space->Size(); |
1887 cur = cur->next(); | 1956 cur = cur->next(); |
1888 } | 1957 } |
1889 return sum; | 1958 return sum; |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1956 // We don't have a linear allocation area while sweeping. It will be restored | 2025 // We don't have a linear allocation area while sweeping. It will be restored |
1957 // on the first allocation after the sweep. | 2026 // on the first allocation after the sweep. |
1958 // Mark the old linear allocation area with a free space map so it can be | 2027 // Mark the old linear allocation area with a free space map so it can be |
1959 // skipped when scanning the heap. | 2028 // skipped when scanning the heap. |
1960 int old_linear_size = static_cast<int>(limit() - top()); | 2029 int old_linear_size = static_cast<int>(limit() - top()); |
1961 Free(top(), old_linear_size); | 2030 Free(top(), old_linear_size); |
1962 SetTop(NULL, NULL); | 2031 SetTop(NULL, NULL); |
1963 | 2032 |
1964 // Stop lazy sweeping and clear marking bits for unswept pages. | 2033 // Stop lazy sweeping and clear marking bits for unswept pages. |
1965 if (first_unswept_page_ != NULL) { | 2034 if (first_unswept_page_ != NULL) { |
1966 Page* last = last_unswept_page_; | |
1967 Page* p = first_unswept_page_; | 2035 Page* p = first_unswept_page_; |
1968 do { | 2036 do { |
1969 // Do not use ShouldBeSweptLazily predicate here. | 2037 // Do not use ShouldBeSweptLazily predicate here. |
1970 // New evacuation candidates were selected but they still have | 2038 // New evacuation candidates were selected but they still have |
1971 // to be swept before collection starts. | 2039 // to be swept before collection starts. |
1972 if (!p->WasSwept()) { | 2040 if (!p->WasSwept()) { |
1973 Bitmap::Clear(p); | 2041 Bitmap::Clear(p); |
1974 if (FLAG_gc_verbose) { | 2042 if (FLAG_gc_verbose) { |
1975 PrintF("Sweeping 0x%" V8PRIxPTR " lazily abandoned.\n", | 2043 PrintF("Sweeping 0x%" V8PRIxPTR " lazily abandoned.\n", |
1976 reinterpret_cast<intptr_t>(p)); | 2044 reinterpret_cast<intptr_t>(p)); |
1977 } | 2045 } |
1978 } | 2046 } |
1979 p = p->next_page(); | 2047 p = p->next_page(); |
1980 } while (p != last); | 2048 } while (p != anchor()); |
1981 } | 2049 } |
1982 first_unswept_page_ = last_unswept_page_ = Page::FromAddress(NULL); | 2050 first_unswept_page_ = Page::FromAddress(NULL); |
1983 | 2051 |
1984 // Clear the free list before a full GC---it will be rebuilt afterward. | 2052 // Clear the free list before a full GC---it will be rebuilt afterward. |
1985 free_list_.Reset(); | 2053 free_list_.Reset(); |
1986 } | 2054 } |
1987 | 2055 |
1988 | 2056 |
1989 bool PagedSpace::ReserveSpace(int size_in_bytes) { | 2057 bool PagedSpace::ReserveSpace(int size_in_bytes) { |
1990 ASSERT(size_in_bytes <= Page::kMaxHeapObjectSize); | 2058 ASSERT(size_in_bytes <= Page::kMaxHeapObjectSize); |
1991 ASSERT(size_in_bytes == RoundSizeDownToObjectAlignment(size_in_bytes)); | 2059 ASSERT(size_in_bytes == RoundSizeDownToObjectAlignment(size_in_bytes)); |
1992 Address current_top = allocation_info_.top; | 2060 Address current_top = allocation_info_.top; |
(...skipping 20 matching lines...) Expand all Loading... | |
2013 // doesn't know that memory was 'promised' to large object space. | 2081 // doesn't know that memory was 'promised' to large object space. |
2014 bool LargeObjectSpace::ReserveSpace(int bytes) { | 2082 bool LargeObjectSpace::ReserveSpace(int bytes) { |
2015 return heap()->OldGenerationSpaceAvailable() >= bytes; | 2083 return heap()->OldGenerationSpaceAvailable() >= bytes; |
2016 } | 2084 } |
2017 | 2085 |
2018 | 2086 |
2019 bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) { | 2087 bool PagedSpace::AdvanceSweeper(intptr_t bytes_to_sweep) { |
2020 if (IsSweepingComplete()) return true; | 2088 if (IsSweepingComplete()) return true; |
2021 | 2089 |
2022 intptr_t freed_bytes = 0; | 2090 intptr_t freed_bytes = 0; |
2023 Page* last = last_unswept_page_; | |
2024 Page* p = first_unswept_page_; | 2091 Page* p = first_unswept_page_; |
2025 do { | 2092 do { |
2026 Page* next_page = p->next_page(); | 2093 Page* next_page = p->next_page(); |
2027 if (ShouldBeSweptLazily(p)) { | 2094 if (ShouldBeSweptLazily(p)) { |
2028 if (FLAG_gc_verbose) { | 2095 if (FLAG_gc_verbose) { |
2029 PrintF("Sweeping 0x%" V8PRIxPTR " lazily advanced.\n", | 2096 PrintF("Sweeping 0x%" V8PRIxPTR " lazily advanced.\n", |
2030 reinterpret_cast<intptr_t>(p)); | 2097 reinterpret_cast<intptr_t>(p)); |
2031 } | 2098 } |
2032 freed_bytes += MarkCompactCollector::SweepConservatively(this, p); | 2099 freed_bytes += MarkCompactCollector::SweepConservatively(this, p); |
2033 } | 2100 } |
2034 p = next_page; | 2101 p = next_page; |
2035 } while (p != last && freed_bytes < bytes_to_sweep); | 2102 } while (p != anchor() && freed_bytes < bytes_to_sweep); |
2036 | 2103 |
2037 if (p == last) { | 2104 if (p == anchor()) { |
2038 last_unswept_page_ = first_unswept_page_ = Page::FromAddress(NULL); | 2105 first_unswept_page_ = Page::FromAddress(NULL); |
2039 } else { | 2106 } else { |
2040 first_unswept_page_ = p; | 2107 first_unswept_page_ = p; |
2041 } | 2108 } |
2042 | 2109 |
2043 heap()->LowerOldGenLimits(freed_bytes); | 2110 heap()->LowerOldGenLimits(freed_bytes); |
2044 | 2111 |
2045 heap()->FreeQueuedChunks(); | 2112 heap()->FreeQueuedChunks(); |
2046 | 2113 |
2047 return IsSweepingComplete(); | 2114 return IsSweepingComplete(); |
2048 } | 2115 } |
(...skipping 516 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2565 object->ShortPrint(); | 2632 object->ShortPrint(); |
2566 PrintF("\n"); | 2633 PrintF("\n"); |
2567 } | 2634 } |
2568 printf(" --------------------------------------\n"); | 2635 printf(" --------------------------------------\n"); |
2569 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 2636 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
2570 } | 2637 } |
2571 | 2638 |
2572 #endif // DEBUG | 2639 #endif // DEBUG |
2573 | 2640 |
2574 } } // namespace v8::internal | 2641 } } // namespace v8::internal |
OLD | NEW |