| Index: test/cctest/test-heap.cc
|
| ===================================================================
|
| --- test/cctest/test-heap.cc (revision 9327)
|
| +++ test/cctest/test-heap.cc (working copy)
|
| @@ -1,4 +1,4 @@
|
| -// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
| +// Copyright 2011 the V8 project authors. All rights reserved.
|
|
|
| #include <stdlib.h>
|
|
|
| @@ -838,49 +838,6 @@
|
| }
|
|
|
|
|
| -TEST(LargeObjectSpaceContains) {
|
| - InitializeVM();
|
| -
|
| - HEAP->CollectGarbage(NEW_SPACE);
|
| -
|
| - Address current_top = HEAP->new_space()->top();
|
| - Page* page = Page::FromAddress(current_top);
|
| - Address current_page = page->address();
|
| - Address next_page = current_page + Page::kPageSize;
|
| - int bytes_to_page = static_cast<int>(next_page - current_top);
|
| - if (bytes_to_page <= FixedArray::kHeaderSize) {
|
| - // Alas, need to cross another page to be able to
|
| - // put desired value.
|
| - next_page += Page::kPageSize;
|
| - bytes_to_page = static_cast<int>(next_page - current_top);
|
| - }
|
| - CHECK(bytes_to_page > FixedArray::kHeaderSize);
|
| -
|
| - intptr_t* flags_ptr = &Page::FromAddress(next_page)->flags_;
|
| - Address flags_addr = reinterpret_cast<Address>(flags_ptr);
|
| -
|
| - int bytes_to_allocate =
|
| - static_cast<int>(flags_addr - current_top) + kPointerSize;
|
| -
|
| - int n_elements = (bytes_to_allocate - FixedArray::kHeaderSize) /
|
| - kPointerSize;
|
| - CHECK_EQ(bytes_to_allocate, FixedArray::SizeFor(n_elements));
|
| - FixedArray* array = FixedArray::cast(
|
| - HEAP->AllocateFixedArray(n_elements)->ToObjectChecked());
|
| -
|
| - int index = n_elements - 1;
|
| - CHECK_EQ(flags_ptr,
|
| - HeapObject::RawField(array, FixedArray::OffsetOfElementAt(index)));
|
| - array->set(index, Smi::FromInt(0));
|
| - // This chould have turned next page into LargeObjectPage:
|
| - // CHECK(Page::FromAddress(next_page)->IsLargeObjectPage());
|
| -
|
| - HeapObject* addr = HeapObject::FromAddress(next_page + 2 * kPointerSize);
|
| - CHECK(HEAP->new_space()->Contains(addr));
|
| - CHECK(!HEAP->lo_space()->Contains(addr));
|
| -}
|
| -
|
| -
|
| TEST(EmptyHandleEscapeFrom) {
|
| InitializeVM();
|
|
|
| @@ -907,8 +864,7 @@
|
| InitializeVM();
|
|
|
| // Increase the chance of 'bump-the-pointer' allocation in old space.
|
| - bool force_compaction = true;
|
| - HEAP->CollectAllGarbage(force_compaction);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
|
|
| v8::HandleScope scope;
|
|
|
| @@ -975,12 +931,6 @@
|
| return;
|
| }
|
| CHECK(HEAP->old_pointer_space()->Contains(clone->address()));
|
| -
|
| - // Step 5: verify validity of region dirty marks.
|
| - Address clone_addr = clone->address();
|
| - Page* page = Page::FromAddress(clone_addr);
|
| - // Check that region covering inobject property 1 is marked dirty.
|
| - CHECK(page->IsRegionDirty(clone_addr + (object_size - kPointerSize)));
|
| }
|
|
|
|
|
| @@ -1010,17 +960,17 @@
|
| Handle<JSFunction> function(JSFunction::cast(func_value));
|
| CHECK(function->shared()->is_compiled());
|
|
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
|
|
| CHECK(function->shared()->is_compiled());
|
|
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
|
|
| // foo should no longer be in the compilation cache
|
| CHECK(!function->shared()->is_compiled() || function->IsOptimized());
|
| @@ -1109,7 +1059,7 @@
|
| }
|
|
|
| // Mark compact handles the weak references.
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
|
|
|
| // Get rid of f3 and f5 in the same way.
|
| @@ -1118,21 +1068,21 @@
|
| HEAP->PerformScavenge();
|
| CHECK_EQ(opt ? 4 : 0, CountOptimizedUserFunctions(ctx[i]));
|
| }
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
|
| CompileRun("f5=null");
|
| for (int j = 0; j < 10; j++) {
|
| HEAP->PerformScavenge();
|
| CHECK_EQ(opt ? 3 : 0, CountOptimizedUserFunctions(ctx[i]));
|
| }
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| CHECK_EQ(opt ? 2 : 0, CountOptimizedUserFunctions(ctx[i]));
|
|
|
| ctx[i]->Exit();
|
| }
|
|
|
| // Force compilation cache cleanup.
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
|
|
| // Dispose the global contexts one by one.
|
| for (int i = 0; i < kNumTestContexts; i++) {
|
| @@ -1146,7 +1096,7 @@
|
| }
|
|
|
| // Mark compact handles the weak references.
|
| - HEAP->CollectAllGarbage(true);
|
| + HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| CHECK_EQ(kNumTestContexts - i - 1, CountGlobalContexts());
|
| }
|
|
|
| @@ -1161,7 +1111,7 @@
|
| Handle<Object> object(HEAP->global_contexts_list());
|
| while (!object->IsUndefined()) {
|
| count++;
|
| - if (count == n) HEAP->CollectAllGarbage(true);
|
| + if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| object =
|
| Handle<Object>(Context::cast(*object)->get(Context::NEXT_CONTEXT_LINK));
|
| }
|
| @@ -1180,7 +1130,7 @@
|
| while (object->IsJSFunction() &&
|
| !Handle<JSFunction>::cast(object)->IsBuiltin()) {
|
| count++;
|
| - if (count == n) HEAP->CollectAllGarbage(true);
|
| + if (count == n) HEAP->CollectAllGarbage(Heap::kNoGCFlags);
|
| object = Handle<Object>(
|
| Object::cast(JSFunction::cast(*object)->next_function_link()));
|
| }
|
| @@ -1240,29 +1190,33 @@
|
|
|
| TEST(TestSizeOfObjectsVsHeapIteratorPrecision) {
|
| InitializeVM();
|
| + HEAP->EnsureHeapIsIterable();
|
| intptr_t size_of_objects_1 = HEAP->SizeOfObjects();
|
| - HeapIterator iterator(HeapIterator::kFilterFreeListNodes);
|
| + HeapIterator iterator;
|
| intptr_t size_of_objects_2 = 0;
|
| for (HeapObject* obj = iterator.next();
|
| obj != NULL;
|
| obj = iterator.next()) {
|
| size_of_objects_2 += obj->Size();
|
| }
|
| - // Delta must be within 1% of the larger result.
|
| + // Delta must be within 5% of the larger result.
|
| + // TODO(gc): Tighten this up by distinguishing between byte
|
| + // arrays that are real and those that merely mark free space
|
| + // on the heap.
|
| if (size_of_objects_1 > size_of_objects_2) {
|
| intptr_t delta = size_of_objects_1 - size_of_objects_2;
|
| PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
|
| "Iterator: %" V8_PTR_PREFIX "d, "
|
| "delta: %" V8_PTR_PREFIX "d\n",
|
| size_of_objects_1, size_of_objects_2, delta);
|
| - CHECK_GT(size_of_objects_1 / 100, delta);
|
| + CHECK_GT(size_of_objects_1 / 20, delta);
|
| } else {
|
| intptr_t delta = size_of_objects_2 - size_of_objects_1;
|
| PrintF("Heap::SizeOfObjects: %" V8_PTR_PREFIX "d, "
|
| "Iterator: %" V8_PTR_PREFIX "d, "
|
| "delta: %" V8_PTR_PREFIX "d\n",
|
| size_of_objects_1, size_of_objects_2, delta);
|
| - CHECK_GT(size_of_objects_2 / 100, delta);
|
| + CHECK_GT(size_of_objects_2 / 20, delta);
|
| }
|
| }
|
|
|
| @@ -1273,8 +1227,8 @@
|
| : a_(a), b_(b), a_found_(false), b_found_(false) {}
|
| bool a_found() { return a_found_; }
|
| bool b_found() { return b_found_; }
|
| - void IterateHeap(HeapIterator::HeapObjectsFiltering mode) {
|
| - HeapIterator iterator(mode);
|
| + void IterateHeap() {
|
| + HeapIterator iterator;
|
| for (HeapObject* obj = iterator.next();
|
| obj != NULL;
|
| obj = iterator.next()) {
|
| @@ -1290,40 +1244,3 @@
|
| bool a_found_;
|
| bool b_found_;
|
| };
|
| -
|
| -TEST(HeapIteratorFilterUnreachable) {
|
| - InitializeVM();
|
| - v8::HandleScope scope;
|
| - CompileRun("a = {}; b = {};");
|
| - v8::Handle<Object> a(ISOLATE->context()->global()->GetProperty(
|
| - *FACTORY->LookupAsciiSymbol("a"))->ToObjectChecked());
|
| - v8::Handle<Object> b(ISOLATE->context()->global()->GetProperty(
|
| - *FACTORY->LookupAsciiSymbol("b"))->ToObjectChecked());
|
| - CHECK_NE(*a, *b);
|
| - {
|
| - HeapIteratorTestHelper helper(*a, *b);
|
| - helper.IterateHeap(HeapIterator::kFilterUnreachable);
|
| - CHECK(helper.a_found());
|
| - CHECK(helper.b_found());
|
| - }
|
| - CHECK(ISOLATE->context()->global()->DeleteProperty(
|
| - *FACTORY->LookupAsciiSymbol("a"), JSObject::FORCE_DELETION));
|
| - // We ensure that GC will not happen, so our raw pointer stays valid.
|
| - AssertNoAllocation no_alloc;
|
| - Object* a_saved = *a;
|
| - a.Clear();
|
| - // Verify that "a" object still resides in the heap...
|
| - {
|
| - HeapIteratorTestHelper helper(a_saved, *b);
|
| - helper.IterateHeap(HeapIterator::kNoFiltering);
|
| - CHECK(helper.a_found());
|
| - CHECK(helper.b_found());
|
| - }
|
| - // ...but is now unreachable.
|
| - {
|
| - HeapIteratorTestHelper helper(a_saved, *b);
|
| - helper.IterateHeap(HeapIterator::kFilterUnreachable);
|
| - CHECK(!helper.a_found());
|
| - CHECK(helper.b_found());
|
| - }
|
| -}
|
|
|