| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2179 Address top = *top_addr; | 2179 Address top = *top_addr; |
| 2180 // Now force the remaining allocation onto the free list. | 2180 // Now force the remaining allocation onto the free list. |
| 2181 CcTest::heap()->old_space()->EmptyAllocationInfo(); | 2181 CcTest::heap()->old_space()->EmptyAllocationInfo(); |
| 2182 return top; | 2182 return top; |
| 2183 } | 2183 } |
| 2184 | 2184 |
| 2185 | 2185 |
| 2186 // Test the case where allocation must be done from the free list, so filler | 2186 // Test the case where allocation must be done from the free list, so filler |
| 2187 // may precede or follow the object. | 2187 // may precede or follow the object. |
| 2188 TEST(TestAlignedOverAllocation) { | 2188 TEST(TestAlignedOverAllocation) { |
| 2189 Heap* heap = CcTest::heap(); |
| 2190 // Test checks for fillers before and behind objects and requires a fresh |
| 2191 // page and empty free list. |
| 2192 heap::AbandonCurrentlyFreeMemory(heap->old_space()); |
| 2193 // Allocate a dummy object to properly set up the linear allocation info. |
| 2194 AllocationResult dummy = |
| 2195 heap->old_space()->AllocateRawUnaligned(kPointerSize); |
| 2196 CHECK(!dummy.IsRetry()); |
| 2197 heap->CreateFillerObjectAt( |
| 2198 HeapObject::cast(dummy.ToObjectChecked())->address(), kPointerSize, |
| 2199 ClearRecordedSlots::kNo); |
| 2200 |
| 2189 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones. | 2201 // Double misalignment is 4 on 32-bit platforms, 0 on 64-bit ones. |
| 2190 const intptr_t double_misalignment = kDoubleSize - kPointerSize; | 2202 const intptr_t double_misalignment = kDoubleSize - kPointerSize; |
| 2191 Address start; | 2203 Address start; |
| 2192 HeapObject* obj; | 2204 HeapObject* obj; |
| 2193 HeapObject* filler1; | 2205 HeapObject* filler1; |
| 2194 HeapObject* filler2; | 2206 HeapObject* filler2; |
| 2195 if (double_misalignment) { | 2207 if (double_misalignment) { |
| 2196 start = AlignOldSpace(kDoubleAligned, 0); | 2208 start = AlignOldSpace(kDoubleAligned, 0); |
| 2197 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned); | 2209 obj = OldSpaceAllocateAligned(kPointerSize, kDoubleAligned); |
| 2198 // The object is aligned, and a filler object is created after. | 2210 // The object is aligned, and a filler object is created after. |
| (...skipping 1365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3564 i::FLAG_parallel_compaction = false; | 3576 i::FLAG_parallel_compaction = false; |
| 3565 // Concurrent sweeping adds non determinism, depending on when memory is | 3577 // Concurrent sweeping adds non determinism, depending on when memory is |
| 3566 // available for further reuse. | 3578 // available for further reuse. |
| 3567 i::FLAG_concurrent_sweeping = false; | 3579 i::FLAG_concurrent_sweeping = false; |
| 3568 // Fast evacuation of pages may result in a different page count in old space. | 3580 // Fast evacuation of pages may result in a different page count in old space. |
| 3569 i::FLAG_page_promotion = false; | 3581 i::FLAG_page_promotion = false; |
| 3570 CcTest::InitializeVM(); | 3582 CcTest::InitializeVM(); |
| 3571 Isolate* isolate = CcTest::i_isolate(); | 3583 Isolate* isolate = CcTest::i_isolate(); |
| 3572 Factory* factory = isolate->factory(); | 3584 Factory* factory = isolate->factory(); |
| 3573 Heap* heap = isolate->heap(); | 3585 Heap* heap = isolate->heap(); |
| 3586 |
| 3587 // The initial GC makes sure that objects that are allocated in new space |
| 3588 // don't play a part in the test. |
| 3589 heap->CollectAllAvailableGarbage("initial GC"); |
| 3590 |
| 3574 v8::HandleScope scope(CcTest::isolate()); | 3591 v8::HandleScope scope(CcTest::isolate()); |
| 3575 static const int number_of_test_pages = 20; | 3592 static const int number_of_test_pages = 20; |
| 3576 | 3593 |
| 3577 // Prepare many pages with low live-bytes count. | 3594 // Prepare many pages with low live-bytes count. |
| 3578 PagedSpace* old_space = heap->old_space(); | 3595 PagedSpace* old_space = heap->old_space(); |
| 3579 const int initial_page_count = old_space->CountTotalPages(); | 3596 const int initial_page_count = old_space->CountTotalPages(); |
| 3580 const int overall_page_count = number_of_test_pages + initial_page_count; | 3597 const int overall_page_count = number_of_test_pages + initial_page_count; |
| 3581 for (int i = 0; i < number_of_test_pages; i++) { | 3598 for (int i = 0; i < number_of_test_pages; i++) { |
| 3582 AlwaysAllocateScope always_allocate(isolate); | 3599 AlwaysAllocateScope always_allocate(isolate); |
| 3583 heap::SimulateFullSpace(old_space); | 3600 heap::SimulateFullSpace(old_space); |
| (...skipping 3459 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7043 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); | 7060 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); |
| 7044 slots[chunk->area_end() - kPointerSize] = false; | 7061 slots[chunk->area_end() - kPointerSize] = false; |
| 7045 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { | 7062 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { |
| 7046 CHECK(slots[addr]); | 7063 CHECK(slots[addr]); |
| 7047 return KEEP_SLOT; | 7064 return KEEP_SLOT; |
| 7048 }); | 7065 }); |
| 7049 } | 7066 } |
| 7050 | 7067 |
| 7051 } // namespace internal | 7068 } // namespace internal |
| 7052 } // namespace v8 | 7069 } // namespace v8 |
| OLD | NEW |