Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(285)

Side by Side Diff: test/cctest/test-heap.cc

Issue 1314863003: [heap] More flag cleanup. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Remove unnecessary parameter. Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 419 matching lines...) Expand 10 before | Expand all | Expand 10 after
430 430
431 431
432 TEST(GarbageCollection) { 432 TEST(GarbageCollection) {
433 CcTest::InitializeVM(); 433 CcTest::InitializeVM();
434 Isolate* isolate = CcTest::i_isolate(); 434 Isolate* isolate = CcTest::i_isolate();
435 Heap* heap = isolate->heap(); 435 Heap* heap = isolate->heap();
436 Factory* factory = isolate->factory(); 436 Factory* factory = isolate->factory();
437 437
438 HandleScope sc(isolate); 438 HandleScope sc(isolate);
439 // Check GC. 439 // Check GC.
440 heap->CollectGarbage(NEW_SPACE); 440 heap->CollectGarbageNewSpace();
441 441
442 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object()); 442 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
443 Handle<String> name = factory->InternalizeUtf8String("theFunction"); 443 Handle<String> name = factory->InternalizeUtf8String("theFunction");
444 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); 444 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
445 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); 445 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
446 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); 446 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
447 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); 447 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
448 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); 448 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
449 449
450 { 450 {
451 HandleScope inner_scope(isolate); 451 HandleScope inner_scope(isolate);
452 // Allocate a function and keep it in global object's property. 452 // Allocate a function and keep it in global object's property.
453 Handle<JSFunction> function = factory->NewFunction(name); 453 Handle<JSFunction> function = factory->NewFunction(name);
454 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); 454 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
455 // Allocate an object. Unrooted after leaving the scope. 455 // Allocate an object. Unrooted after leaving the scope.
456 Handle<JSObject> obj = factory->NewJSObject(function); 456 Handle<JSObject> obj = factory->NewJSObject(function);
457 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 457 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
458 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); 458 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
459 459
460 CHECK_EQ(Smi::FromInt(23), 460 CHECK_EQ(Smi::FromInt(23),
461 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 461 *Object::GetProperty(obj, prop_name).ToHandleChecked());
462 CHECK_EQ(Smi::FromInt(24), 462 CHECK_EQ(Smi::FromInt(24),
463 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); 463 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
464 } 464 }
465 465
466 heap->CollectGarbage(NEW_SPACE); 466 heap->CollectGarbageNewSpace();
467 467
468 // Function should be alive. 468 // Function should be alive.
469 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); 469 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
470 // Check function is retained. 470 // Check function is retained.
471 Handle<Object> func_value = 471 Handle<Object> func_value =
472 Object::GetProperty(global, name).ToHandleChecked(); 472 Object::GetProperty(global, name).ToHandleChecked();
473 CHECK(func_value->IsJSFunction()); 473 CHECK(func_value->IsJSFunction());
474 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 474 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
475 475
476 { 476 {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
546 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 546 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
547 Handle<Object> u = factory->NewNumber(1.12344); 547 Handle<Object> u = factory->NewNumber(1.12344);
548 548
549 h1 = global_handles->Create(*i); 549 h1 = global_handles->Create(*i);
550 h2 = global_handles->Create(*u); 550 h2 = global_handles->Create(*u);
551 h3 = global_handles->Create(*i); 551 h3 = global_handles->Create(*i);
552 h4 = global_handles->Create(*u); 552 h4 = global_handles->Create(*u);
553 } 553 }
554 554
555 // after gc, it should survive 555 // after gc, it should survive
556 heap->CollectGarbage(NEW_SPACE); 556 heap->CollectGarbageNewSpace();
557 557
558 CHECK((*h1)->IsString()); 558 CHECK((*h1)->IsString());
559 CHECK((*h2)->IsHeapNumber()); 559 CHECK((*h2)->IsHeapNumber());
560 CHECK((*h3)->IsString()); 560 CHECK((*h3)->IsString());
561 CHECK((*h4)->IsHeapNumber()); 561 CHECK((*h4)->IsHeapNumber());
562 562
563 CHECK_EQ(*h3, *h1); 563 CHECK_EQ(*h3, *h1);
564 GlobalHandles::Destroy(h1.location()); 564 GlobalHandles::Destroy(h1.location());
565 GlobalHandles::Destroy(h3.location()); 565 GlobalHandles::Destroy(h3.location());
566 566
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
604 h1 = global_handles->Create(*i); 604 h1 = global_handles->Create(*i);
605 h2 = global_handles->Create(*u); 605 h2 = global_handles->Create(*u);
606 } 606 }
607 607
608 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 608 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
609 GlobalHandles::MakeWeak(h2.location(), 609 GlobalHandles::MakeWeak(h2.location(),
610 reinterpret_cast<void*>(&handle_and_id), 610 reinterpret_cast<void*>(&handle_and_id),
611 &TestWeakGlobalHandleCallback); 611 &TestWeakGlobalHandleCallback);
612 612
613 // Scavenge treats weak pointers as normal roots. 613 // Scavenge treats weak pointers as normal roots.
614 heap->CollectGarbage(NEW_SPACE); 614 heap->CollectGarbageNewSpace();
615 615
616 CHECK((*h1)->IsString()); 616 CHECK((*h1)->IsString());
617 CHECK((*h2)->IsHeapNumber()); 617 CHECK((*h2)->IsHeapNumber());
618 618
619 CHECK(!WeakPointerCleared); 619 CHECK(!WeakPointerCleared);
620 CHECK(!global_handles->IsNearDeath(h2.location())); 620 CHECK(!global_handles->IsNearDeath(h2.location()));
621 CHECK(!global_handles->IsNearDeath(h1.location())); 621 CHECK(!global_handles->IsNearDeath(h1.location()));
622 622
623 GlobalHandles::Destroy(h1.location()); 623 GlobalHandles::Destroy(h1.location());
624 GlobalHandles::Destroy(h2.location()); 624 GlobalHandles::Destroy(h2.location());
(...skipping 17 matching lines...) Expand all
642 642
643 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 643 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
644 Handle<Object> u = factory->NewNumber(1.12344); 644 Handle<Object> u = factory->NewNumber(1.12344);
645 645
646 h1 = global_handles->Create(*i); 646 h1 = global_handles->Create(*i);
647 h2 = global_handles->Create(*u); 647 h2 = global_handles->Create(*u);
648 } 648 }
649 649
650 // Make sure the objects are promoted. 650 // Make sure the objects are promoted.
651 heap->CollectGarbage(OLD_SPACE); 651 heap->CollectGarbage(OLD_SPACE);
652 heap->CollectGarbage(NEW_SPACE); 652 heap->CollectGarbageNewSpace();
653 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); 653 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
654 654
655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
656 GlobalHandles::MakeWeak(h2.location(), 656 GlobalHandles::MakeWeak(h2.location(),
657 reinterpret_cast<void*>(&handle_and_id), 657 reinterpret_cast<void*>(&handle_and_id),
658 &TestWeakGlobalHandleCallback); 658 &TestWeakGlobalHandleCallback);
659 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 659 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
660 CHECK(!GlobalHandles::IsNearDeath(h2.location())); 660 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
661 661
662 // Incremental marking potentially marked handles before they turned weak. 662 // Incremental marking potentially marked handles before they turned weak.
(...skipping 26 matching lines...) Expand all
689 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 689 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
690 h = global_handles->Create(*i); 690 h = global_handles->Create(*i);
691 } 691 }
692 692
693 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); 693 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
694 GlobalHandles::MakeWeak(h.location(), 694 GlobalHandles::MakeWeak(h.location(),
695 reinterpret_cast<void*>(&handle_and_id), 695 reinterpret_cast<void*>(&handle_and_id),
696 &TestWeakGlobalHandleCallback); 696 &TestWeakGlobalHandleCallback);
697 697
698 // Scanvenge does not recognize weak reference. 698 // Scanvenge does not recognize weak reference.
699 heap->CollectGarbage(NEW_SPACE); 699 heap->CollectGarbageNewSpace();
700 700
701 CHECK(!WeakPointerCleared); 701 CHECK(!WeakPointerCleared);
702 702
703 // Mark-compact treats weak reference properly. 703 // Mark-compact treats weak reference properly.
704 heap->CollectGarbage(OLD_SPACE); 704 heap->CollectGarbage(OLD_SPACE);
705 705
706 CHECK(WeakPointerCleared); 706 CHECK(WeakPointerCleared);
707 } 707 }
708 708
709 709
(...skipping 779 matching lines...) Expand 10 before | Expand all | Expand 10 after
1489 for (int i = 0; i < kAgingThreshold; i++) { 1489 for (int i = 0; i < kAgingThreshold; i++) {
1490 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1490 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1491 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1491 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1492 } 1492 }
1493 1493
1494 // Simulate incremental marking so that the functions are enqueued as 1494 // Simulate incremental marking so that the functions are enqueued as
1495 // code flushing candidates. Then kill one of the functions. Finally 1495 // code flushing candidates. Then kill one of the functions. Finally
1496 // perform a scavenge while incremental marking is still running. 1496 // perform a scavenge while incremental marking is still running.
1497 SimulateIncrementalMarking(CcTest::heap()); 1497 SimulateIncrementalMarking(CcTest::heap());
1498 *function2.location() = NULL; 1498 *function2.location() = NULL;
1499 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); 1499 CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking");
1500 1500
1501 // Simulate one final GC to make sure the candidate queue is sane. 1501 // Simulate one final GC to make sure the candidate queue is sane.
1502 CcTest::heap()->CollectAllGarbage(); 1502 CcTest::heap()->CollectAllGarbage();
1503 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1503 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1504 CHECK(!function->is_compiled() || function->IsOptimized()); 1504 CHECK(!function->is_compiled() || function->IsOptimized());
1505 } 1505 }
1506 1506
1507 1507
1508 TEST(TestCodeFlushingIncrementalAbort) { 1508 TEST(TestCodeFlushingIncrementalAbort) {
1509 // If we do not flush code this test is invalid. 1509 // If we do not flush code this test is invalid.
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
1763 OptimizeEmptyFunction("f4"); 1763 OptimizeEmptyFunction("f4");
1764 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1764 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1765 OptimizeEmptyFunction("f5"); 1765 OptimizeEmptyFunction("f5");
1766 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1766 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1767 1767
1768 // Remove function f1, and 1768 // Remove function f1, and
1769 CompileRun("f1=null"); 1769 CompileRun("f1=null");
1770 1770
1771 // Scavenge treats these references as strong. 1771 // Scavenge treats these references as strong.
1772 for (int j = 0; j < 10; j++) { 1772 for (int j = 0; j < 10; j++) {
1773 CcTest::heap()->CollectGarbage(NEW_SPACE); 1773 CcTest::heap()->CollectGarbageNewSpace();
1774 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1774 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1775 } 1775 }
1776 1776
1777 // Mark compact handles the weak references. 1777 // Mark compact handles the weak references.
1778 isolate->compilation_cache()->Clear(); 1778 isolate->compilation_cache()->Clear();
1779 heap->CollectAllGarbage(); 1779 heap->CollectAllGarbage();
1780 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1780 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1781 1781
1782 // Get rid of f3 and f5 in the same way. 1782 // Get rid of f3 and f5 in the same way.
1783 CompileRun("f3=null"); 1783 CompileRun("f3=null");
1784 for (int j = 0; j < 10; j++) { 1784 for (int j = 0; j < 10; j++) {
1785 CcTest::heap()->CollectGarbage(NEW_SPACE); 1785 CcTest::heap()->CollectGarbageNewSpace();
1786 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1786 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1787 } 1787 }
1788 CcTest::heap()->CollectAllGarbage(); 1788 CcTest::heap()->CollectAllGarbage();
1789 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1789 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1790 CompileRun("f5=null"); 1790 CompileRun("f5=null");
1791 for (int j = 0; j < 10; j++) { 1791 for (int j = 0; j < 10; j++) {
1792 CcTest::heap()->CollectGarbage(NEW_SPACE); 1792 CcTest::heap()->CollectGarbage(NEW_SPACE);
1793 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1793 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1794 } 1794 }
1795 CcTest::heap()->CollectAllGarbage(); 1795 CcTest::heap()->CollectAllGarbage();
(...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after
2358 new_capacity = new_space->TotalCapacity(); 2358 new_capacity = new_space->TotalCapacity();
2359 CHECK(old_capacity == new_capacity); 2359 CHECK(old_capacity == new_capacity);
2360 2360
2361 // Explicitly shrinking should not affect space capacity. 2361 // Explicitly shrinking should not affect space capacity.
2362 old_capacity = new_space->TotalCapacity(); 2362 old_capacity = new_space->TotalCapacity();
2363 new_space->Shrink(); 2363 new_space->Shrink();
2364 new_capacity = new_space->TotalCapacity(); 2364 new_capacity = new_space->TotalCapacity();
2365 CHECK(old_capacity == new_capacity); 2365 CHECK(old_capacity == new_capacity);
2366 2366
2367 // Let the scavenger empty the new space. 2367 // Let the scavenger empty the new space.
2368 heap->CollectGarbage(NEW_SPACE); 2368 heap->CollectGarbageNewSpace();
2369 CHECK_LE(new_space->Size(), old_capacity); 2369 CHECK_LE(new_space->Size(), old_capacity);
2370 2370
2371 // Explicitly shrinking should halve the space capacity. 2371 // Explicitly shrinking should halve the space capacity.
2372 old_capacity = new_space->TotalCapacity(); 2372 old_capacity = new_space->TotalCapacity();
2373 new_space->Shrink(); 2373 new_space->Shrink();
2374 new_capacity = new_space->TotalCapacity(); 2374 new_capacity = new_space->TotalCapacity();
2375 CHECK(old_capacity == 2 * new_capacity); 2375 CHECK(old_capacity == 2 * new_capacity);
2376 2376
2377 // Consecutive shrinking should not affect space capacity. 2377 // Consecutive shrinking should not affect space capacity.
2378 old_capacity = new_space->TotalCapacity(); 2378 old_capacity = new_space->TotalCapacity();
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
2813 2813
2814 HEAP_TEST(GCFlags) { 2814 HEAP_TEST(GCFlags) {
2815 CcTest::InitializeVM(); 2815 CcTest::InitializeVM();
2816 Heap* heap = CcTest::heap(); 2816 Heap* heap = CcTest::heap();
2817 2817
2818 heap->set_current_gc_flags(Heap::kNoGCFlags); 2818 heap->set_current_gc_flags(Heap::kNoGCFlags);
2819 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2819 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2820 2820
2821 // Set the flags to check whether we appropriately resets them after the GC. 2821 // Set the flags to check whether we appropriately resets them after the GC.
2822 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); 2822 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2823 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); 2823 heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask);
2824 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2824 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2825 2825
2826 MarkCompactCollector* collector = heap->mark_compact_collector(); 2826 MarkCompactCollector* collector = heap->mark_compact_collector();
2827 if (collector->sweeping_in_progress()) { 2827 if (collector->sweeping_in_progress()) {
2828 collector->EnsureSweepingCompleted(); 2828 collector->EnsureSweepingCompleted();
2829 } 2829 }
2830 2830
2831 IncrementalMarking* marking = heap->incremental_marking(); 2831 IncrementalMarking* marking = heap->incremental_marking();
2832 marking->Stop(); 2832 marking->Stop();
2833 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); 2833 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2834 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2834 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2835 2835
2836 heap->CollectGarbage(NEW_SPACE); 2836 heap->CollectGarbageNewSpace();
2837 // NewSpace scavenges should not overwrite the flags. 2837 // NewSpace scavenges should not overwrite the flags.
2838 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2838 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2839 2839
2840 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 2840 heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask);
2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2842 } 2842 }
2843 2843
2844 2844
2845 TEST(IdleNotificationFinishMarking) { 2845 TEST(IdleNotificationFinishMarking) {
2846 i::FLAG_allow_natives_syntax = true; 2846 i::FLAG_allow_natives_syntax = true;
2847 CcTest::InitializeVM(); 2847 CcTest::InitializeVM();
2848 SimulateFullSpace(CcTest::heap()->old_space()); 2848 SimulateFullSpace(CcTest::heap()->old_space());
2849 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2849 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2850 marking->Stop(); 2850 marking->Stop();
(...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after
3716 CHECK_EQ(1, old_space->CountTotalPages()); 3716 CHECK_EQ(1, old_space->CountTotalPages());
3717 for (int i = 0; i < number_of_test_pages; i++) { 3717 for (int i = 0; i < number_of_test_pages; i++) {
3718 AlwaysAllocateScope always_allocate(isolate); 3718 AlwaysAllocateScope always_allocate(isolate);
3719 SimulateFullSpace(old_space); 3719 SimulateFullSpace(old_space);
3720 factory->NewFixedArray(1, TENURED); 3720 factory->NewFixedArray(1, TENURED);
3721 } 3721 }
3722 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages()); 3722 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3723 3723
3724 // Triggering one GC will cause a lot of garbage to be discovered but 3724 // Triggering one GC will cause a lot of garbage to be discovered but
3725 // even spread across all allocated pages. 3725 // even spread across all allocated pages.
3726 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3726 heap->CollectAllGarbage("triggered for preparation",
3727 "triggered for preparation"); 3727 Heap::kFinalizeIncrementalMarkingMask);
3728 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3728 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3729 3729
3730 // Triggering subsequent GCs should cause at least half of the pages 3730 // Triggering subsequent GCs should cause at least half of the pages
3731 // to be released to the OS after at most two cycles. 3731 // to be released to the OS after at most two cycles.
3732 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3732 heap->CollectAllGarbage("triggered by test 1",
3733 "triggered by test 1"); 3733 Heap::kFinalizeIncrementalMarkingMask);
3734 ;
3734 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3735 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3735 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3736 heap->CollectAllGarbage("triggered by test 2",
3736 "triggered by test 2"); 3737 Heap::kFinalizeIncrementalMarkingMask);
3737 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); 3738 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3738 3739
3739 // Triggering a last-resort GC should cause all pages to be released to the 3740 // Triggering a last-resort GC should cause all pages to be released to the
3740 // OS so that other processes can seize the memory. If we get a failure here 3741 // OS so that other processes can seize the memory. If we get a failure here
3741 // where there are 2 pages left instead of 1, then we should increase the 3742 // where there are 2 pages left instead of 1, then we should increase the
3742 // size of the first page a little in SizeOfFirstPage in spaces.cc. The 3743 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3743 // first page should be small in order to reduce memory used when the VM 3744 // first page should be small in order to reduce memory used when the VM
3744 // boots, but if the 20 small arrays don't fit on the first page then that's 3745 // boots, but if the 20 small arrays don't fit on the first page then that's
3745 // an indication that it is too small. 3746 // an indication that it is too small.
3746 heap->CollectAllAvailableGarbage("triggered really hard"); 3747 heap->CollectAllAvailableGarbage("triggered really hard");
(...skipping 677 matching lines...) Expand 10 before | Expand all | Expand 10 after
4424 "obj = fastliteralcase(get_standard_literal(), 2);"); 4425 "obj = fastliteralcase(get_standard_literal(), 2);");
4425 4426
4426 // prepare the heap 4427 // prepare the heap
4427 v8::Local<v8::String> mote_code_string = 4428 v8::Local<v8::String> mote_code_string =
4428 v8_str("fastliteralcase(mote, 2.5);"); 4429 v8_str("fastliteralcase(mote, 2.5);");
4429 4430
4430 v8::Local<v8::String> array_name = v8_str("mote"); 4431 v8::Local<v8::String> array_name = v8_str("mote");
4431 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0)); 4432 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4432 4433
4433 // First make sure we flip spaces 4434 // First make sure we flip spaces
4434 CcTest::heap()->CollectGarbage(NEW_SPACE); 4435 CcTest::heap()->CollectGarbageNewSpace();
4435 4436
4436 // Allocate the object. 4437 // Allocate the object.
4437 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); 4438 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4438 array_data->set(0, Smi::FromInt(1)); 4439 array_data->set(0, Smi::FromInt(1));
4439 array_data->set(1, Smi::FromInt(2)); 4440 array_data->set(1, Smi::FromInt(2));
4440 4441
4441 AllocateAllButNBytes(CcTest::heap()->new_space(), 4442 AllocateAllButNBytes(CcTest::heap()->new_space(),
4442 JSArray::kSize + AllocationMemento::kSize + 4443 JSArray::kSize + AllocationMemento::kSize +
4443 kPointerSize); 4444 kPointerSize);
4444 4445
(...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after
5494 } 5495 }
5495 5496
5496 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5497 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5497 Handle<WeakCell> weak_cell2; 5498 Handle<WeakCell> weak_cell2;
5498 { 5499 {
5499 HandleScope inner_scope(isolate); 5500 HandleScope inner_scope(isolate);
5500 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); 5501 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5501 } 5502 }
5502 CHECK(weak_cell1->value()->IsFixedArray()); 5503 CHECK(weak_cell1->value()->IsFixedArray());
5503 CHECK_EQ(*survivor, weak_cell2->value()); 5504 CHECK_EQ(*survivor, weak_cell2->value());
5504 heap->CollectGarbage(NEW_SPACE); 5505 heap->CollectGarbageNewSpace();
5505 CHECK(weak_cell1->value()->IsFixedArray()); 5506 CHECK(weak_cell1->value()->IsFixedArray());
5506 CHECK_EQ(*survivor, weak_cell2->value()); 5507 CHECK_EQ(*survivor, weak_cell2->value());
5507 heap->CollectGarbage(NEW_SPACE); 5508 heap->CollectGarbageNewSpace();
5508 CHECK(weak_cell1->value()->IsFixedArray()); 5509 CHECK(weak_cell1->value()->IsFixedArray());
5509 CHECK_EQ(*survivor, weak_cell2->value()); 5510 CHECK_EQ(*survivor, weak_cell2->value());
5510 heap->CollectAllAvailableGarbage(); 5511 heap->CollectAllAvailableGarbage();
5511 CHECK(weak_cell1->cleared()); 5512 CHECK(weak_cell1->cleared());
5512 CHECK_EQ(*survivor, weak_cell2->value()); 5513 CHECK_EQ(*survivor, weak_cell2->value());
5513 } 5514 }
5514 5515
5515 5516
5516 TEST(WeakCellsWithIncrementalMarking) { 5517 TEST(WeakCellsWithIncrementalMarking) {
5517 CcTest::InitializeVM(); 5518 CcTest::InitializeVM();
(...skipping 10 matching lines...) Expand all
5528 HandleScope inner_scope(isolate); 5529 HandleScope inner_scope(isolate);
5529 Handle<HeapObject> value = 5530 Handle<HeapObject> value =
5530 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); 5531 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5531 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); 5532 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5532 CHECK(weak_cell->value()->IsFixedArray()); 5533 CHECK(weak_cell->value()->IsFixedArray());
5533 IncrementalMarking* marking = heap->incremental_marking(); 5534 IncrementalMarking* marking = heap->incremental_marking();
5534 if (marking->IsStopped()) { 5535 if (marking->IsStopped()) {
5535 heap->StartIncrementalMarking(); 5536 heap->StartIncrementalMarking();
5536 } 5537 }
5537 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD); 5538 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5538 heap->CollectGarbage(NEW_SPACE); 5539 heap->CollectGarbageNewSpace();
5539 CHECK(weak_cell->value()->IsFixedArray()); 5540 CHECK(weak_cell->value()->IsFixedArray());
5540 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); 5541 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5541 } 5542 }
5542 heap->CollectAllGarbage(); 5543 heap->CollectAllGarbage();
5543 CHECK_EQ(*survivor, weak_cells[0]->value()); 5544 CHECK_EQ(*survivor, weak_cells[0]->value());
5544 for (int i = 1; i < N; i++) { 5545 for (int i = 1; i < N; i++) {
5545 CHECK(weak_cells[i]->cleared()); 5546 CHECK(weak_cells[i]->cleared());
5546 } 5547 }
5547 } 5548 }
5548 5549
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
5734 // If we are in a low memory config, we can't grow to two pages and we can't 5735 // If we are in a low memory config, we can't grow to two pages and we can't
5735 // run this test. This also means the issue we are testing cannot arise, as 5736 // run this test. This also means the issue we are testing cannot arise, as
5736 // there is no fragmentation. 5737 // there is no fragmentation.
5737 if (new_space->IsAtMaximumCapacity()) return; 5738 if (new_space->IsAtMaximumCapacity()) return;
5738 5739
5739 new_space->Grow(); 5740 new_space->Grow();
5740 CHECK(new_space->IsAtMaximumCapacity()); 5741 CHECK(new_space->IsAtMaximumCapacity());
5741 CHECK(2 * old_capacity == new_space->TotalCapacity()); 5742 CHECK(2 * old_capacity == new_space->TotalCapacity());
5742 5743
5743 // Call the scavenger two times to get an empty new space 5744 // Call the scavenger two times to get an empty new space
5744 heap->CollectGarbage(NEW_SPACE); 5745 heap->CollectGarbageNewSpace();
5745 heap->CollectGarbage(NEW_SPACE); 5746 heap->CollectGarbageNewSpace();
5746 5747
5747 // First create a few objects which will survive a scavenge, and will get 5748 // First create a few objects which will survive a scavenge, and will get
5748 // promoted to the old generation later on. These objects will create 5749 // promoted to the old generation later on. These objects will create
5749 // promotion queue entries at the end of the second semi-space page. 5750 // promotion queue entries at the end of the second semi-space page.
5750 const int number_handles = 12; 5751 const int number_handles = 12;
5751 Handle<FixedArray> handles[number_handles]; 5752 Handle<FixedArray> handles[number_handles];
5752 for (int i = 0; i < number_handles; i++) { 5753 for (int i = 0; i < number_handles; i++) {
5753 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5754 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5754 } 5755 }
5755 heap->CollectGarbage(NEW_SPACE); 5756 heap->CollectGarbageNewSpace();
5756 5757
5757 // Create the first huge object which will exactly fit the first semi-space 5758 // Create the first huge object which will exactly fit the first semi-space
5758 // page. 5759 // page.
5759 int new_linear_size = 5760 int new_linear_size =
5760 static_cast<int>(*heap->new_space()->allocation_limit_address() - 5761 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5761 *heap->new_space()->allocation_top_address()); 5762 *heap->new_space()->allocation_top_address());
5762 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize; 5763 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5763 Handle<FixedArray> first = 5764 Handle<FixedArray> first =
5764 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); 5765 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5765 CHECK(heap->InNewSpace(*first)); 5766 CHECK(heap->InNewSpace(*first));
5766 5767
5767 // Create the second huge object of maximum allocatable second semi-space 5768 // Create the second huge object of maximum allocatable second semi-space
5768 // page size. 5769 // page size.
5769 new_linear_size = 5770 new_linear_size =
5770 static_cast<int>(*heap->new_space()->allocation_limit_address() - 5771 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5771 *heap->new_space()->allocation_top_address()); 5772 *heap->new_space()->allocation_top_address());
5772 length = Page::kMaxRegularHeapObjectSize / kPointerSize - 5773 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5773 FixedArray::kHeaderSize; 5774 FixedArray::kHeaderSize;
5774 Handle<FixedArray> second = 5775 Handle<FixedArray> second =
5775 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); 5776 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5776 CHECK(heap->InNewSpace(*second)); 5777 CHECK(heap->InNewSpace(*second));
5777 5778
5778 // This scavenge will corrupt memory if the promotion queue is not 5779 // This scavenge will corrupt memory if the promotion queue is not
5779 // evacuated. 5780 // evacuated.
5780 heap->CollectGarbage(NEW_SPACE); 5781 heap->CollectGarbageNewSpace();
5781 } 5782 }
5782 isolate->Dispose(); 5783 isolate->Dispose();
5783 } 5784 }
5784 5785
5785 5786
5786 TEST(Regress388880) { 5787 TEST(Regress388880) {
5787 i::FLAG_expose_gc = true; 5788 i::FLAG_expose_gc = true;
5788 CcTest::InitializeVM(); 5789 CcTest::InitializeVM();
5789 v8::HandleScope scope(CcTest::isolate()); 5790 v8::HandleScope scope(CcTest::isolate());
5790 Isolate* isolate = CcTest::i_isolate(); 5791 Isolate* isolate = CcTest::i_isolate();
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
6195 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); 6196 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6196 } 6197 }
6197 6198
6198 6199
6199 TEST(NewSpaceAllocationCounter) { 6200 TEST(NewSpaceAllocationCounter) {
6200 CcTest::InitializeVM(); 6201 CcTest::InitializeVM();
6201 v8::HandleScope scope(CcTest::isolate()); 6202 v8::HandleScope scope(CcTest::isolate());
6202 Isolate* isolate = CcTest::i_isolate(); 6203 Isolate* isolate = CcTest::i_isolate();
6203 Heap* heap = isolate->heap(); 6204 Heap* heap = isolate->heap();
6204 size_t counter1 = heap->NewSpaceAllocationCounter(); 6205 size_t counter1 = heap->NewSpaceAllocationCounter();
6205 heap->CollectGarbage(NEW_SPACE); 6206 heap->CollectGarbageNewSpace();
6206 const size_t kSize = 1024; 6207 const size_t kSize = 1024;
6207 AllocateInSpace(isolate, kSize, NEW_SPACE); 6208 AllocateInSpace(isolate, kSize, NEW_SPACE);
6208 size_t counter2 = heap->NewSpaceAllocationCounter(); 6209 size_t counter2 = heap->NewSpaceAllocationCounter();
6209 CHECK_EQ(kSize, counter2 - counter1); 6210 CHECK_EQ(kSize, counter2 - counter1);
6210 heap->CollectGarbage(NEW_SPACE); 6211 heap->CollectGarbageNewSpace();
6211 size_t counter3 = heap->NewSpaceAllocationCounter(); 6212 size_t counter3 = heap->NewSpaceAllocationCounter();
6212 CHECK_EQ(0U, counter3 - counter2); 6213 CHECK_EQ(0U, counter3 - counter2);
6213 // Test counter overflow. 6214 // Test counter overflow.
6214 size_t max_counter = -1; 6215 size_t max_counter = -1;
6215 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); 6216 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6216 size_t start = heap->NewSpaceAllocationCounter(); 6217 size_t start = heap->NewSpaceAllocationCounter();
6217 for (int i = 0; i < 20; i++) { 6218 for (int i = 0; i < 20; i++) {
6218 AllocateInSpace(isolate, kSize, NEW_SPACE); 6219 AllocateInSpace(isolate, kSize, NEW_SPACE);
6219 size_t counter = heap->NewSpaceAllocationCounter(); 6220 size_t counter = heap->NewSpaceAllocationCounter();
6220 CHECK_EQ(kSize, counter - start); 6221 CHECK_EQ(kSize, counter - start);
6221 start = counter; 6222 start = counter;
6222 } 6223 }
6223 } 6224 }
6224 6225
6225 6226
6226 TEST(OldSpaceAllocationCounter) { 6227 TEST(OldSpaceAllocationCounter) {
6227 CcTest::InitializeVM(); 6228 CcTest::InitializeVM();
6228 v8::HandleScope scope(CcTest::isolate()); 6229 v8::HandleScope scope(CcTest::isolate());
6229 Isolate* isolate = CcTest::i_isolate(); 6230 Isolate* isolate = CcTest::i_isolate();
6230 Heap* heap = isolate->heap(); 6231 Heap* heap = isolate->heap();
6231 size_t counter1 = heap->OldGenerationAllocationCounter(); 6232 size_t counter1 = heap->OldGenerationAllocationCounter();
6232 heap->CollectGarbage(NEW_SPACE); 6233 heap->CollectGarbageNewSpace();
6233 heap->CollectGarbage(NEW_SPACE); 6234 heap->CollectGarbageNewSpace();
6234 const size_t kSize = 1024; 6235 const size_t kSize = 1024;
6235 AllocateInSpace(isolate, kSize, OLD_SPACE); 6236 AllocateInSpace(isolate, kSize, OLD_SPACE);
6236 size_t counter2 = heap->OldGenerationAllocationCounter(); 6237 size_t counter2 = heap->OldGenerationAllocationCounter();
6237 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. 6238 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6238 CHECK_LE(kSize, counter2 - counter1); 6239 CHECK_LE(kSize, counter2 - counter1);
6239 heap->CollectGarbage(NEW_SPACE); 6240 heap->CollectGarbageNewSpace();
6240 size_t counter3 = heap->OldGenerationAllocationCounter(); 6241 size_t counter3 = heap->OldGenerationAllocationCounter();
6241 CHECK_EQ(0u, counter3 - counter2); 6242 CHECK_EQ(0u, counter3 - counter2);
6242 AllocateInSpace(isolate, kSize, OLD_SPACE); 6243 AllocateInSpace(isolate, kSize, OLD_SPACE);
6243 heap->CollectGarbage(OLD_SPACE); 6244 heap->CollectGarbage(OLD_SPACE);
6244 size_t counter4 = heap->OldGenerationAllocationCounter(); 6245 size_t counter4 = heap->OldGenerationAllocationCounter();
6245 CHECK_LE(kSize, counter4 - counter3); 6246 CHECK_LE(kSize, counter4 - counter3);
6246 // Test counter overflow. 6247 // Test counter overflow.
6247 size_t max_counter = -1; 6248 size_t max_counter = -1;
6248 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); 6249 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6249 size_t start = heap->OldGenerationAllocationCounter(); 6250 size_t start = heap->OldGenerationAllocationCounter();
(...skipping 316 matching lines...) Expand 10 before | Expand all | Expand 10 after
6566 { 6567 {
6567 SharedFunctionInfo::Iterator iterator(isolate); 6568 SharedFunctionInfo::Iterator iterator(isolate);
6568 while (iterator.Next()) sfi_count--; 6569 while (iterator.Next()) sfi_count--;
6569 } 6570 }
6570 6571
6571 CHECK_EQ(0, sfi_count); 6572 CHECK_EQ(0, sfi_count);
6572 } 6573 }
6573 6574
6574 } // namespace internal 6575 } // namespace internal
6575 } // namespace v8 6576 } // namespace v8
OLDNEW
« src/heap/heap.h ('K') | « test/cctest/test-debug.cc ('k') | test/cctest/test-log.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698