OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
61 NULL, | 61 NULL, |
62 NULL, | 62 NULL, |
63 kAllPagesInSpace, | 63 kAllPagesInSpace, |
64 size_func); | 64 size_func); |
65 } | 65 } |
66 | 66 |
67 | 67 |
68 HeapObjectIterator::HeapObjectIterator(Page* page, | 68 HeapObjectIterator::HeapObjectIterator(Page* page, |
69 HeapObjectCallback size_func) { | 69 HeapObjectCallback size_func) { |
70 Space* owner = page->owner(); | 70 Space* owner = page->owner(); |
71 ASSERT(owner == HEAP->old_pointer_space() || | 71 ASSERT(owner == page->heap()->old_pointer_space() || |
72 owner == HEAP->old_data_space() || | 72 owner == page->heap()->old_data_space() || |
73 owner == HEAP->map_space() || | 73 owner == page->heap()->map_space() || |
74 owner == HEAP->cell_space() || | 74 owner == page->heap()->cell_space() || |
75 owner == HEAP->code_space()); | 75 owner == page->heap()->code_space()); |
76 Initialize(reinterpret_cast<PagedSpace*>(owner), | 76 Initialize(reinterpret_cast<PagedSpace*>(owner), |
77 page->area_start(), | 77 page->area_start(), |
78 page->area_end(), | 78 page->area_end(), |
79 kOnePageOnly, | 79 kOnePageOnly, |
80 size_func); | 80 size_func); |
81 ASSERT(page->WasSweptPrecisely()); | 81 ASSERT(page->WasSweptPrecisely()); |
82 } | 82 } |
83 | 83 |
84 | 84 |
85 void HeapObjectIterator::Initialize(PagedSpace* space, | 85 void HeapObjectIterator::Initialize(PagedSpace* space, |
(...skipping 1911 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1997 } else { | 1997 } else { |
1998 UNREACHABLE(); | 1998 UNREACHABLE(); |
1999 } | 1999 } |
2000 // We would like to ASSERT(Size() == size_in_bytes) but this would fail during | 2000 // We would like to ASSERT(Size() == size_in_bytes) but this would fail during |
2001 // deserialization because the free space map is not done yet. | 2001 // deserialization because the free space map is not done yet. |
2002 } | 2002 } |
2003 | 2003 |
2004 | 2004 |
2005 FreeListNode* FreeListNode::next() { | 2005 FreeListNode* FreeListNode::next() { |
2006 ASSERT(IsFreeListNode(this)); | 2006 ASSERT(IsFreeListNode(this)); |
2007 if (map() == HEAP->raw_unchecked_free_space_map()) { | 2007 if (map() == GetHeap()->raw_unchecked_free_space_map()) { |
2008 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); | 2008 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); |
2009 return reinterpret_cast<FreeListNode*>( | 2009 return reinterpret_cast<FreeListNode*>( |
2010 Memory::Address_at(address() + kNextOffset)); | 2010 Memory::Address_at(address() + kNextOffset)); |
2011 } else { | 2011 } else { |
2012 return reinterpret_cast<FreeListNode*>( | 2012 return reinterpret_cast<FreeListNode*>( |
2013 Memory::Address_at(address() + kPointerSize)); | 2013 Memory::Address_at(address() + kPointerSize)); |
2014 } | 2014 } |
2015 } | 2015 } |
2016 | 2016 |
2017 | 2017 |
2018 FreeListNode** FreeListNode::next_address() { | 2018 FreeListNode** FreeListNode::next_address() { |
2019 ASSERT(IsFreeListNode(this)); | 2019 ASSERT(IsFreeListNode(this)); |
2020 if (map() == HEAP->raw_unchecked_free_space_map()) { | 2020 if (map() == GetHeap()->raw_unchecked_free_space_map()) { |
2021 ASSERT(Size() >= kNextOffset + kPointerSize); | 2021 ASSERT(Size() >= kNextOffset + kPointerSize); |
2022 return reinterpret_cast<FreeListNode**>(address() + kNextOffset); | 2022 return reinterpret_cast<FreeListNode**>(address() + kNextOffset); |
2023 } else { | 2023 } else { |
2024 return reinterpret_cast<FreeListNode**>(address() + kPointerSize); | 2024 return reinterpret_cast<FreeListNode**>(address() + kPointerSize); |
2025 } | 2025 } |
2026 } | 2026 } |
2027 | 2027 |
2028 | 2028 |
2029 void FreeListNode::set_next(FreeListNode* next) { | 2029 void FreeListNode::set_next(FreeListNode* next) { |
2030 ASSERT(IsFreeListNode(this)); | 2030 ASSERT(IsFreeListNode(this)); |
2031 // While we are booting the VM the free space map will actually be null. So | 2031 // While we are booting the VM the free space map will actually be null. So |
2032 // we have to make sure that we don't try to use it for anything at that | 2032 // we have to make sure that we don't try to use it for anything at that |
2033 // stage. | 2033 // stage. |
2034 if (map() == HEAP->raw_unchecked_free_space_map()) { | 2034 if (map() == GetHeap()->raw_unchecked_free_space_map()) { |
2035 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); | 2035 ASSERT(map() == NULL || Size() >= kNextOffset + kPointerSize); |
2036 Memory::Address_at(address() + kNextOffset) = | 2036 Memory::Address_at(address() + kNextOffset) = |
2037 reinterpret_cast<Address>(next); | 2037 reinterpret_cast<Address>(next); |
2038 } else { | 2038 } else { |
2039 Memory::Address_at(address() + kPointerSize) = | 2039 Memory::Address_at(address() + kPointerSize) = |
2040 reinterpret_cast<Address>(next); | 2040 reinterpret_cast<Address>(next); |
2041 } | 2041 } |
2042 } | 2042 } |
2043 | 2043 |
2044 | 2044 |
(...skipping 190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2235 huge_list_available -= reinterpret_cast<FreeSpace*>(cur_node)->Size(); | 2235 huge_list_available -= reinterpret_cast<FreeSpace*>(cur_node)->Size(); |
2236 cur_node = cur_node->next(); | 2236 cur_node = cur_node->next(); |
2237 } | 2237 } |
2238 | 2238 |
2239 *cur = cur_node; | 2239 *cur = cur_node; |
2240 if (cur_node == NULL) { | 2240 if (cur_node == NULL) { |
2241 huge_list_.set_end(NULL); | 2241 huge_list_.set_end(NULL); |
2242 break; | 2242 break; |
2243 } | 2243 } |
2244 | 2244 |
2245 ASSERT((*cur)->map() == HEAP->raw_unchecked_free_space_map()); | 2245 ASSERT((*cur)->map() == heap_->raw_unchecked_free_space_map()); |
2246 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(*cur); | 2246 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(*cur); |
2247 int size = cur_as_free_space->Size(); | 2247 int size = cur_as_free_space->Size(); |
2248 if (size >= size_in_bytes) { | 2248 if (size >= size_in_bytes) { |
2249 // Large enough node found. Unlink it from the list. | 2249 // Large enough node found. Unlink it from the list. |
2250 node = *cur; | 2250 node = *cur; |
2251 *cur = node->next(); | 2251 *cur = node->next(); |
2252 *node_size = size; | 2252 *node_size = size; |
2253 huge_list_available -= size; | 2253 huge_list_available -= size; |
2254 break; | 2254 break; |
2255 } | 2255 } |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2371 large_list_.RepairFreeList(heap); | 2371 large_list_.RepairFreeList(heap); |
2372 huge_list_.RepairFreeList(heap); | 2372 huge_list_.RepairFreeList(heap); |
2373 } | 2373 } |
2374 | 2374 |
2375 | 2375 |
2376 #ifdef DEBUG | 2376 #ifdef DEBUG |
2377 intptr_t FreeListCategory::SumFreeList() { | 2377 intptr_t FreeListCategory::SumFreeList() { |
2378 intptr_t sum = 0; | 2378 intptr_t sum = 0; |
2379 FreeListNode* cur = top_; | 2379 FreeListNode* cur = top_; |
2380 while (cur != NULL) { | 2380 while (cur != NULL) { |
2381 ASSERT(cur->map() == HEAP->raw_unchecked_free_space_map()); | 2381 ASSERT(cur->map() == cur->GetHeap()->raw_unchecked_free_space_map()); |
2382 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur); | 2382 FreeSpace* cur_as_free_space = reinterpret_cast<FreeSpace*>(cur); |
2383 sum += cur_as_free_space->Size(); | 2383 sum += cur_as_free_space->Size(); |
2384 cur = cur->next(); | 2384 cur = cur->next(); |
2385 } | 2385 } |
2386 return sum; | 2386 return sum; |
2387 } | 2387 } |
2388 | 2388 |
2389 | 2389 |
2390 static const int kVeryLongFreeList = 500; | 2390 static const int kVeryLongFreeList = 500; |
2391 | 2391 |
(...skipping 760 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3152 object->ShortPrint(); | 3152 object->ShortPrint(); |
3153 PrintF("\n"); | 3153 PrintF("\n"); |
3154 } | 3154 } |
3155 printf(" --------------------------------------\n"); | 3155 printf(" --------------------------------------\n"); |
3156 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); | 3156 printf(" Marked: %x, LiveCount: %x\n", mark_size, LiveBytes()); |
3157 } | 3157 } |
3158 | 3158 |
3159 #endif // DEBUG | 3159 #endif // DEBUG |
3160 | 3160 |
3161 } } // namespace v8::internal | 3161 } } // namespace v8::internal |
OLD | NEW |