| OLD | NEW |
| 1 // Copyright 2015 the V8 project authors. All rights reserved. | 1 // Copyright 2015 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "test/cctest/cctest.h" | 5 #include "test/cctest/cctest.h" |
| 6 #include "test/cctest/heap/heap-tester.h" | 6 #include "test/cctest/heap/heap-tester.h" |
| 7 #include "test/cctest/heap/utils-inl.h" | 7 #include "test/cctest/heap/utils-inl.h" |
| 8 | 8 |
| 9 namespace v8 { | 9 namespace v8 { |
| 10 namespace internal { | 10 namespace internal { |
| (...skipping 17 matching lines...) Expand all Loading... |
| 28 // we can reach the state of a half aborted page. | 28 // we can reach the state of a half aborted page. |
| 29 FLAG_concurrent_sweeping = false; | 29 FLAG_concurrent_sweeping = false; |
| 30 FLAG_manual_evacuation_candidates_selection = true; | 30 FLAG_manual_evacuation_candidates_selection = true; |
| 31 CcTest::InitializeVM(); | 31 CcTest::InitializeVM(); |
| 32 Isolate* isolate = CcTest::i_isolate(); | 32 Isolate* isolate = CcTest::i_isolate(); |
| 33 Heap* heap = isolate->heap(); | 33 Heap* heap = isolate->heap(); |
| 34 { | 34 { |
| 35 HandleScope scope1(isolate); | 35 HandleScope scope1(isolate); |
| 36 PageIterator it(heap->old_space()); | 36 PageIterator it(heap->old_space()); |
| 37 while (it.has_next()) { | 37 while (it.has_next()) { |
| 38 it.next()->SetFlag(Page::NEVER_ALLOCATE_ON_PAGE); | 38 it.next()->MarkNeverAllocateForTesting(); |
| 39 } | 39 } |
| 40 | 40 |
| 41 { | 41 { |
| 42 HandleScope scope2(isolate); | 42 HandleScope scope2(isolate); |
| 43 CHECK(heap->old_space()->Expand()); | 43 CHECK(heap->old_space()->Expand()); |
| 44 auto compaction_page_handles = | 44 auto compaction_page_handles = |
| 45 CreatePadding(heap, Page::kAllocatableMemory, TENURED); | 45 CreatePadding(heap, Page::kAllocatableMemory, TENURED); |
| 46 Page* to_be_aborted_page = | 46 Page* to_be_aborted_page = |
| 47 Page::FromAddress(compaction_page_handles.front()->address()); | 47 Page::FromAddress(compaction_page_handles.front()->address()); |
| 48 to_be_aborted_page->SetFlag( | 48 to_be_aborted_page->SetFlag( |
| (...skipping 24 matching lines...) Expand all Loading... |
| 73 | 73 |
| 74 const int object_size = 128 * KB; | 74 const int object_size = 128 * KB; |
| 75 | 75 |
| 76 CcTest::InitializeVM(); | 76 CcTest::InitializeVM(); |
| 77 Isolate* isolate = CcTest::i_isolate(); | 77 Isolate* isolate = CcTest::i_isolate(); |
| 78 Heap* heap = isolate->heap(); | 78 Heap* heap = isolate->heap(); |
| 79 { | 79 { |
| 80 HandleScope scope1(isolate); | 80 HandleScope scope1(isolate); |
| 81 PageIterator it(heap->old_space()); | 81 PageIterator it(heap->old_space()); |
| 82 while (it.has_next()) { | 82 while (it.has_next()) { |
| 83 it.next()->SetFlag(Page::NEVER_ALLOCATE_ON_PAGE); | 83 it.next()->MarkNeverAllocateForTesting(); |
| 84 } | 84 } |
| 85 | 85 |
| 86 { | 86 { |
| 87 HandleScope scope2(isolate); | 87 HandleScope scope2(isolate); |
| 88 // Fill another page with objects of size {object_size} (last one is | 88 // Fill another page with objects of size {object_size} (last one is |
| 89 // properly adjusted). | 89 // properly adjusted). |
| 90 CHECK(heap->old_space()->Expand()); | 90 CHECK(heap->old_space()->Expand()); |
| 91 auto compaction_page_handles = | 91 auto compaction_page_handles = |
| 92 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); | 92 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); |
| 93 Page* to_be_aborted_page = | 93 Page* to_be_aborted_page = |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 148 CcTest::InitializeVM(); | 148 CcTest::InitializeVM(); |
| 149 Isolate* isolate = CcTest::i_isolate(); | 149 Isolate* isolate = CcTest::i_isolate(); |
| 150 Heap* heap = isolate->heap(); | 150 Heap* heap = isolate->heap(); |
| 151 { | 151 { |
| 152 HandleScope scope1(isolate); | 152 HandleScope scope1(isolate); |
| 153 Handle<FixedArray> root_array = | 153 Handle<FixedArray> root_array = |
| 154 isolate->factory()->NewFixedArray(10, TENURED); | 154 isolate->factory()->NewFixedArray(10, TENURED); |
| 155 | 155 |
| 156 PageIterator it(heap->old_space()); | 156 PageIterator it(heap->old_space()); |
| 157 while (it.has_next()) { | 157 while (it.has_next()) { |
| 158 it.next()->SetFlag(Page::NEVER_ALLOCATE_ON_PAGE); | 158 it.next()->MarkNeverAllocateForTesting(); |
| 159 } | 159 } |
| 160 | 160 |
| 161 Page* to_be_aborted_page = nullptr; | 161 Page* to_be_aborted_page = nullptr; |
| 162 { | 162 { |
| 163 HandleScope temporary_scope(isolate); | 163 HandleScope temporary_scope(isolate); |
| 164 // Fill a fresh page with objects of size {object_size} (last one is | 164 // Fill a fresh page with objects of size {object_size} (last one is |
| 165 // properly adjusted). | 165 // properly adjusted). |
| 166 CHECK(heap->old_space()->Expand()); | 166 CHECK(heap->old_space()->Expand()); |
| 167 std::vector<Handle<FixedArray>> compaction_page_handles = | 167 std::vector<Handle<FixedArray>> compaction_page_handles = |
| 168 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); | 168 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 234 | 234 |
| 235 CcTest::InitializeVM(); | 235 CcTest::InitializeVM(); |
| 236 Isolate* isolate = CcTest::i_isolate(); | 236 Isolate* isolate = CcTest::i_isolate(); |
| 237 Heap* heap = isolate->heap(); | 237 Heap* heap = isolate->heap(); |
| 238 { | 238 { |
| 239 HandleScope scope1(isolate); | 239 HandleScope scope1(isolate); |
| 240 Handle<FixedArray> root_array = | 240 Handle<FixedArray> root_array = |
| 241 isolate->factory()->NewFixedArray(10, TENURED); | 241 isolate->factory()->NewFixedArray(10, TENURED); |
| 242 PageIterator it(heap->old_space()); | 242 PageIterator it(heap->old_space()); |
| 243 while (it.has_next()) { | 243 while (it.has_next()) { |
| 244 it.next()->SetFlag(Page::NEVER_ALLOCATE_ON_PAGE); | 244 it.next()->MarkNeverAllocateForTesting(); |
| 245 } | 245 } |
| 246 | 246 |
| 247 Page* to_be_aborted_page = nullptr; | 247 Page* to_be_aborted_page = nullptr; |
| 248 { | 248 { |
| 249 HandleScope temporary_scope(isolate); | 249 HandleScope temporary_scope(isolate); |
| 250 // Fill another page with objects of size {object_size} (last one is | 250 // Fill another page with objects of size {object_size} (last one is |
| 251 // properly adjusted). | 251 // properly adjusted). |
| 252 CHECK(heap->old_space()->Expand()); | 252 CHECK(heap->old_space()->Expand()); |
| 253 auto compaction_page_handles = | 253 auto compaction_page_handles = |
| 254 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); | 254 CreatePadding(heap, Page::kAllocatableMemory, TENURED, object_size); |
| (...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 331 // If store buffer entries are not properly filtered/reset for aborted | 331 // If store buffer entries are not properly filtered/reset for aborted |
| 332 // pages we have now a broken address at an object slot in old space and | 332 // pages we have now a broken address at an object slot in old space and |
| 333 // the following scavenge will crash. | 333 // the following scavenge will crash. |
| 334 heap->CollectGarbage(NEW_SPACE); | 334 heap->CollectGarbage(NEW_SPACE); |
| 335 } | 335 } |
| 336 } | 336 } |
| 337 } | 337 } |
| 338 | 338 |
| 339 } // namespace internal | 339 } // namespace internal |
| 340 } // namespace v8 | 340 } // namespace v8 |
| OLD | NEW |