Index: test/cctest/heap/test-page-promotion.cc |
diff --git a/test/cctest/heap/test-page-promotion.cc b/test/cctest/heap/test-page-promotion.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..4ec2e2a416460c7cc55de7bebc087fb0af8ee23b |
--- /dev/null |
+++ b/test/cctest/heap/test-page-promotion.cc |
@@ -0,0 +1,129 @@ |
+// Copyright 2016 the V8 project authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "src/heap/array-buffer-tracker.h" |
+#include "test/cctest/cctest.h" |
+#include "test/cctest/heap/heap-utils.h" |
+ |
+namespace { |
+ |
+v8::Isolate* NewIsolateForPagePromotion() { |
+ i::FLAG_page_promotion = true; |
+ i::FLAG_page_promotion_threshold = 0; // % |
+ i::FLAG_min_semi_space_size = 8 * (i::Page::kPageSize / i::MB); |
+ // We cannot optimize for size as we require a new space with more than one |
+ // page. |
+ i::FLAG_optimize_for_size = false; |
+ // Set max_semi_space_size because it could've been initialized by an |
+ // implication of optimize_for_size. |
+ i::FLAG_max_semi_space_size = i::FLAG_min_semi_space_size; |
+ v8::Isolate::CreateParams create_params; |
+ create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); |
+ v8::Isolate* isolate = v8::Isolate::New(create_params); |
+ return isolate; |
+} |
+ |
+} // namespace |
+ |
+namespace v8 { |
+namespace internal { |
+ |
+UNINITIALIZED_TEST(PagePromotion_NewToOld) { |
+ v8::Isolate* isolate = NewIsolateForPagePromotion(); |
+ i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); |
+ { |
+ v8::Isolate::Scope isolate_scope(isolate); |
+ v8::HandleScope handle_scope(isolate); |
+ v8::Context::New(isolate)->Enter(); |
+ Heap* heap = i_isolate->heap(); |
+ |
+ std::vector<Handle<FixedArray>> handles; |
+ heap::SimulateFullSpace(heap->new_space(), &handles); |
+ heap->CollectGarbage(NEW_SPACE); |
+ CHECK_GT(handles.size(), 0u); |
+ // First object in handle should be on the first page. |
+ Handle<FixedArray> first_object = handles.front(); |
+ Page* first_page = Page::FromAddress(first_object->address()); |
+ // To perform a sanity check on live bytes we need to mark the heap. |
+ heap::SimulateIncrementalMarking(heap, true); |
+ // Sanity check that the page meets the requirements for promotion. |
+ const int threshold_bytes = |
+ FLAG_page_promotion_threshold * Page::kAllocatableMemory / 100; |
+ CHECK_GE(first_page->LiveBytes(), threshold_bytes); |
+ |
+ // Actual checks: The page is in new space first, but is moved to old space |
+ // during a full GC. |
+ CHECK(heap->new_space()->ContainsSlow(first_page->address())); |
+ CHECK(!heap->old_space()->ContainsSlow(first_page->address())); |
+ heap::GcAndSweep(heap, OLD_SPACE); |
+ CHECK(!heap->new_space()->ContainsSlow(first_page->address())); |
+ CHECK(heap->old_space()->ContainsSlow(first_page->address())); |
+ } |
+} |
+ |
+UNINITIALIZED_TEST(PagePromotion_NewToNew) { |
+ v8::Isolate* isolate = NewIsolateForPagePromotion(); |
+ Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); |
+ { |
+ v8::Isolate::Scope isolate_scope(isolate); |
+ v8::HandleScope handle_scope(isolate); |
+ v8::Context::New(isolate)->Enter(); |
+ Heap* heap = i_isolate->heap(); |
+ |
+ std::vector<Handle<FixedArray>> handles; |
+ heap::SimulateFullSpace(heap->new_space(), &handles); |
+ CHECK_GT(handles.size(), 0u); |
+ // Last object in handles should definitely be on the last page which does |
+ // not contain the age mark. |
+ Handle<FixedArray> last_object = handles.back(); |
+ Page* to_be_promoted_page = Page::FromAddress(last_object->address()); |
+ CHECK(to_be_promoted_page->Contains(last_object->address())); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address())); |
+ heap::GcAndSweep(heap, OLD_SPACE); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address())); |
+ CHECK(to_be_promoted_page->Contains(last_object->address())); |
+ } |
+} |
+ |
+UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) { |
+ // Test makes sure JSArrayBuffer backing stores are still tracked after |
+ // new-to-new promotion. |
+ v8::Isolate* isolate = NewIsolateForPagePromotion(); |
+ Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); |
+ { |
+ v8::Isolate::Scope isolate_scope(isolate); |
+ v8::HandleScope handle_scope(isolate); |
+ v8::Context::New(isolate)->Enter(); |
+ Heap* heap = i_isolate->heap(); |
+ |
+ // Fill the current page which potentially contains the age mark. |
+ heap::FillCurrentPage(heap->new_space()); |
+ |
+ // Allocate a buffer we would like to check against. |
+ Handle<JSArrayBuffer> buffer = |
+ i_isolate->factory()->NewJSArrayBuffer(SharedFlag::kNotShared); |
+ JSArrayBuffer::SetupAllocatingData(buffer, i_isolate, 100); |
+ std::vector<Handle<FixedArray>> handles; |
+ // Simulate a full space, filling the interesting page with live objects. |
+ heap::SimulateFullSpace(heap->new_space(), &handles); |
+ CHECK_GT(handles.size(), 0u); |
+ // Last object in handles should definitely be on the last page which does |
+ // not contain the age mark. |
+ Handle<FixedArray> first_object = handles.front(); |
+ Page* to_be_promoted_page = Page::FromAddress(first_object->address()); |
+ CHECK(to_be_promoted_page->Contains(first_object->address())); |
+ CHECK(to_be_promoted_page->Contains(buffer->address())); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address())); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address())); |
+ heap::GcAndSweep(heap, OLD_SPACE); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(first_object->address())); |
+ CHECK(heap->new_space()->ToSpaceContainsSlow(buffer->address())); |
+ CHECK(to_be_promoted_page->Contains(first_object->address())); |
+ CHECK(to_be_promoted_page->Contains(buffer->address())); |
+ CHECK(ArrayBufferTracker::IsTracked(*buffer)); |
+ } |
+} |
+ |
+} // namespace internal |
+} // namespace v8 |