| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1729 | 1729 |
| 1730 const int kThreshold = IncrementalMarking::kAllocatedThreshold; | 1730 const int kThreshold = IncrementalMarking::kAllocatedThreshold; |
| 1731 | 1731 |
| 1732 // Memory in the linear allocation area is counted as allocated. We may free | 1732 // Memory in the linear allocation area is counted as allocated. We may free |
| 1733 // a little of this again immediately - see below. | 1733 // a little of this again immediately - see below. |
| 1734 owner_->Allocate(new_node_size); | 1734 owner_->Allocate(new_node_size); |
| 1735 | 1735 |
| 1736 if (new_node_size - size_in_bytes > kThreshold && | 1736 if (new_node_size - size_in_bytes > kThreshold && |
| 1737 HEAP->incremental_marking()->IsMarkingIncomplete() && | 1737 HEAP->incremental_marking()->IsMarkingIncomplete() && |
| 1738 FLAG_incremental_marking_steps) { | 1738 FLAG_incremental_marking_steps) { |
| 1739 int linear_size = owner_->RoundSizeDownToObjectAlignment(kThreshold); |
| 1739 // We don't want to give too large linear areas to the allocator while | 1740 // We don't want to give too large linear areas to the allocator while |
| 1740 // incremental marking is going on, because we won't check again whether | 1741 // incremental marking is going on, because we won't check again whether |
| 1741 // we want to do another increment until the linear area is used up. | 1742 // we want to do another increment until the linear area is used up. |
| 1742 owner_->Free(new_node->address() + size_in_bytes + kThreshold, | 1743 owner_->Free(new_node->address() + size_in_bytes + linear_size, |
| 1743 new_node_size - size_in_bytes - kThreshold); | 1744 new_node_size - size_in_bytes - linear_size); |
| 1744 owner_->SetTop(new_node->address() + size_in_bytes, | 1745 owner_->SetTop(new_node->address() + size_in_bytes, |
| 1745 new_node->address() + size_in_bytes + kThreshold); | 1746 new_node->address() + size_in_bytes + linear_size); |
| 1746 } else { | 1747 } else { |
| 1747 // Normally we give the rest of the node to the allocator as its new | 1748 // Normally we give the rest of the node to the allocator as its new |
| 1748 // linear allocation area. | 1749 // linear allocation area. |
| 1749 owner_->SetTop(new_node->address() + size_in_bytes, | 1750 owner_->SetTop(new_node->address() + size_in_bytes, |
| 1750 new_node->address() + new_node_size); | 1751 new_node->address() + new_node_size); |
| 1751 } | 1752 } |
| 1752 | 1753 |
| 1753 return new_node; | 1754 return new_node; |
| 1754 } | 1755 } |
| 1755 | 1756 |
| (...skipping 80 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1836 // Mark the old linear allocation area with a free space map so it can be | 1837 // Mark the old linear allocation area with a free space map so it can be |
| 1837 // skipped when scanning the heap. | 1838 // skipped when scanning the heap. |
| 1838 int old_linear_size = limit() - top(); | 1839 int old_linear_size = limit() - top(); |
| 1839 Free(top(), old_linear_size); | 1840 Free(top(), old_linear_size); |
| 1840 SetTop(NULL, NULL); | 1841 SetTop(NULL, NULL); |
| 1841 } | 1842 } |
| 1842 | 1843 |
| 1843 | 1844 |
| 1844 bool PagedSpace::ReserveSpace(int size_in_bytes) { | 1845 bool PagedSpace::ReserveSpace(int size_in_bytes) { |
| 1845 ASSERT(size_in_bytes <= Page::kMaxHeapObjectSize); | 1846 ASSERT(size_in_bytes <= Page::kMaxHeapObjectSize); |
| 1846 ASSERT(size_in_bytes == RoundUp(size_in_bytes, kPointerSize)); | 1847 ASSERT(size_in_bytes == RoundSizeDownToObjectAlignment(size_in_bytes)); |
| 1847 Address current_top = allocation_info_.top; | 1848 Address current_top = allocation_info_.top; |
| 1848 Address new_top = current_top + size_in_bytes; | 1849 Address new_top = current_top + size_in_bytes; |
| 1849 if (new_top <= allocation_info_.limit) return true; | 1850 if (new_top <= allocation_info_.limit) return true; |
| 1850 | 1851 |
| 1851 HeapObject* new_area = free_list_.Allocate(size_in_bytes); | 1852 HeapObject* new_area = free_list_.Allocate(size_in_bytes); |
| 1852 if (new_area == NULL) new_area = SlowAllocateRaw(size_in_bytes); | 1853 if (new_area == NULL) new_area = SlowAllocateRaw(size_in_bytes); |
| 1853 if (new_area == NULL) return false; | 1854 if (new_area == NULL) return false; |
| 1854 | 1855 |
| 1855 int old_linear_size = limit() - top(); | 1856 int old_linear_size = limit() - top(); |
| 1856 // Mark the old linear allocation area with a free space so it can be | 1857 // Mark the old linear allocation area with a free space so it can be |
| (...skipping 570 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2427 for (HeapObject* obj = obj_it.Next(); obj != NULL; obj = obj_it.Next()) { | 2428 for (HeapObject* obj = obj_it.Next(); obj != NULL; obj = obj_it.Next()) { |
| 2428 if (obj->IsCode()) { | 2429 if (obj->IsCode()) { |
| 2429 Code* code = Code::cast(obj); | 2430 Code* code = Code::cast(obj); |
| 2430 isolate->code_kind_statistics()[code->kind()] += code->Size(); | 2431 isolate->code_kind_statistics()[code->kind()] += code->Size(); |
| 2431 } | 2432 } |
| 2432 } | 2433 } |
| 2433 } | 2434 } |
| 2434 #endif // DEBUG | 2435 #endif // DEBUG |
| 2435 | 2436 |
| 2436 } } // namespace v8::internal | 2437 } } // namespace v8::internal |
| OLD | NEW |