| OLD | NEW |
| 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1810 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1821 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. | 1821 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. |
| 1822 return node; | 1822 return node; |
| 1823 } | 1823 } |
| 1824 // Search the size list for the best fit. | 1824 // Search the size list for the best fit. |
| 1825 int prev = finger_ < index ? finger_ : kHead; | 1825 int prev = finger_ < index ? finger_ : kHead; |
| 1826 int cur = FindSize(index, &prev); | 1826 int cur = FindSize(index, &prev); |
| 1827 ASSERT(index < cur); | 1827 ASSERT(index < cur); |
| 1828 if (cur == kEnd) { | 1828 if (cur == kEnd) { |
| 1829 // No large enough size in list. | 1829 // No large enough size in list. |
| 1830 *wasted_bytes = 0; | 1830 *wasted_bytes = 0; |
| 1831 return Failure::RetryAfterGC(size_in_bytes, owner_); | 1831 return Failure::RetryAfterGC(owner_); |
| 1832 } | 1832 } |
| 1833 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. | 1833 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. |
| 1834 int rem = cur - index; | 1834 int rem = cur - index; |
| 1835 int rem_bytes = rem << kPointerSizeLog2; | 1835 int rem_bytes = rem << kPointerSizeLog2; |
| 1836 FreeListNode* cur_node = FreeListNode::FromAddress(free_[cur].head_node_); | 1836 FreeListNode* cur_node = FreeListNode::FromAddress(free_[cur].head_node_); |
| 1837 ASSERT(cur_node->Size() == (cur << kPointerSizeLog2)); | 1837 ASSERT(cur_node->Size() == (cur << kPointerSizeLog2)); |
| 1838 FreeListNode* rem_node = FreeListNode::FromAddress(free_[cur].head_node_ + | 1838 FreeListNode* rem_node = FreeListNode::FromAddress(free_[cur].head_node_ + |
| 1839 size_in_bytes); | 1839 size_in_bytes); |
| 1840 // Distinguish the cases prev < rem < cur and rem <= prev < cur | 1840 // Distinguish the cases prev < rem < cur and rem <= prev < cur |
| 1841 // to avoid many redundant tests and calls to Insert/RemoveSize. | 1841 // to avoid many redundant tests and calls to Insert/RemoveSize. |
| (...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1919 } else { | 1919 } else { |
| 1920 FreeListNode::FromAddress(tail_)->set_next(node->address()); | 1920 FreeListNode::FromAddress(tail_)->set_next(node->address()); |
| 1921 tail_ = node->address(); | 1921 tail_ = node->address(); |
| 1922 } | 1922 } |
| 1923 available_ += object_size_; | 1923 available_ += object_size_; |
| 1924 } | 1924 } |
| 1925 | 1925 |
| 1926 | 1926 |
| 1927 Object* FixedSizeFreeList::Allocate() { | 1927 Object* FixedSizeFreeList::Allocate() { |
| 1928 if (head_ == NULL) { | 1928 if (head_ == NULL) { |
| 1929 return Failure::RetryAfterGC(object_size_, owner_); | 1929 return Failure::RetryAfterGC(owner_); |
| 1930 } | 1930 } |
| 1931 | 1931 |
| 1932 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. | 1932 ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. |
| 1933 FreeListNode* node = FreeListNode::FromAddress(head_); | 1933 FreeListNode* node = FreeListNode::FromAddress(head_); |
| 1934 head_ = node->next(); | 1934 head_ = node->next(); |
| 1935 available_ -= object_size_; | 1935 available_ -= object_size_; |
| 1936 return node; | 1936 return node; |
| 1937 } | 1937 } |
| 1938 | 1938 |
| 1939 | 1939 |
| (...skipping 806 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2746 | 2746 |
| 2747 | 2747 |
| 2748 Object* LargeObjectSpace::AllocateRawInternal(int requested_size, | 2748 Object* LargeObjectSpace::AllocateRawInternal(int requested_size, |
| 2749 int object_size, | 2749 int object_size, |
| 2750 Executability executable) { | 2750 Executability executable) { |
| 2751 ASSERT(0 < object_size && object_size <= requested_size); | 2751 ASSERT(0 < object_size && object_size <= requested_size); |
| 2752 | 2752 |
| 2753 // Check if we want to force a GC before growing the old space further. | 2753 // Check if we want to force a GC before growing the old space further. |
| 2754 // If so, fail the allocation. | 2754 // If so, fail the allocation. |
| 2755 if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) { | 2755 if (!Heap::always_allocate() && Heap::OldGenerationAllocationLimitReached()) { |
| 2756 return Failure::RetryAfterGC(requested_size, identity()); | 2756 return Failure::RetryAfterGC(identity()); |
| 2757 } | 2757 } |
| 2758 | 2758 |
| 2759 size_t chunk_size; | 2759 size_t chunk_size; |
| 2760 LargeObjectChunk* chunk = | 2760 LargeObjectChunk* chunk = |
| 2761 LargeObjectChunk::New(requested_size, &chunk_size, executable); | 2761 LargeObjectChunk::New(requested_size, &chunk_size, executable); |
| 2762 if (chunk == NULL) { | 2762 if (chunk == NULL) { |
| 2763 return Failure::RetryAfterGC(requested_size, identity()); | 2763 return Failure::RetryAfterGC(identity()); |
| 2764 } | 2764 } |
| 2765 | 2765 |
| 2766 size_ += static_cast<int>(chunk_size); | 2766 size_ += static_cast<int>(chunk_size); |
| 2767 page_count_++; | 2767 page_count_++; |
| 2768 chunk->set_next(first_chunk_); | 2768 chunk->set_next(first_chunk_); |
| 2769 chunk->set_size(chunk_size); | 2769 chunk->set_size(chunk_size); |
| 2770 first_chunk_ = chunk; | 2770 first_chunk_ = chunk; |
| 2771 | 2771 |
| 2772 // Initialize page header. | 2772 // Initialize page header. |
| 2773 Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize)); | 2773 Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize)); |
| (...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3040 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) { | 3040 for (HeapObject* obj = obj_it.next(); obj != NULL; obj = obj_it.next()) { |
| 3041 if (obj->IsCode()) { | 3041 if (obj->IsCode()) { |
| 3042 Code* code = Code::cast(obj); | 3042 Code* code = Code::cast(obj); |
| 3043 code_kind_statistics[code->kind()] += code->Size(); | 3043 code_kind_statistics[code->kind()] += code->Size(); |
| 3044 } | 3044 } |
| 3045 } | 3045 } |
| 3046 } | 3046 } |
| 3047 #endif // DEBUG | 3047 #endif // DEBUG |
| 3048 | 3048 |
| 3049 } } // namespace v8::internal | 3049 } } // namespace v8::internal |
| OLD | NEW |