Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(220)

Side by Side Diff: test/cctest/test-heap.cc

Issue 1303393004: Revert of [heap] More flag cleanup. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/cctest/test-debug.cc ('k') | test/cctest/test-log.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 417 matching lines...) Expand 10 before | Expand all | Expand 10 after
428 428
429 429
430 TEST(GarbageCollection) { 430 TEST(GarbageCollection) {
431 CcTest::InitializeVM(); 431 CcTest::InitializeVM();
432 Isolate* isolate = CcTest::i_isolate(); 432 Isolate* isolate = CcTest::i_isolate();
433 Heap* heap = isolate->heap(); 433 Heap* heap = isolate->heap();
434 Factory* factory = isolate->factory(); 434 Factory* factory = isolate->factory();
435 435
436 HandleScope sc(isolate); 436 HandleScope sc(isolate);
437 // Check GC. 437 // Check GC.
438 heap->CollectGarbageNewSpace(); 438 heap->CollectGarbage(NEW_SPACE);
439 439
440 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object()); 440 Handle<GlobalObject> global(CcTest::i_isolate()->context()->global_object());
441 Handle<String> name = factory->InternalizeUtf8String("theFunction"); 441 Handle<String> name = factory->InternalizeUtf8String("theFunction");
442 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); 442 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
443 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); 443 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
444 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); 444 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
445 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); 445 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
446 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); 446 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
447 447
448 { 448 {
449 HandleScope inner_scope(isolate); 449 HandleScope inner_scope(isolate);
450 // Allocate a function and keep it in global object's property. 450 // Allocate a function and keep it in global object's property.
451 Handle<JSFunction> function = factory->NewFunction(name); 451 Handle<JSFunction> function = factory->NewFunction(name);
452 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); 452 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
453 // Allocate an object. Unrooted after leaving the scope. 453 // Allocate an object. Unrooted after leaving the scope.
454 Handle<JSObject> obj = factory->NewJSObject(function); 454 Handle<JSObject> obj = factory->NewJSObject(function);
455 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 455 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
456 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); 456 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
457 457
458 CHECK_EQ(Smi::FromInt(23), 458 CHECK_EQ(Smi::FromInt(23),
459 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 459 *Object::GetProperty(obj, prop_name).ToHandleChecked());
460 CHECK_EQ(Smi::FromInt(24), 460 CHECK_EQ(Smi::FromInt(24),
461 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); 461 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
462 } 462 }
463 463
464 heap->CollectGarbageNewSpace(); 464 heap->CollectGarbage(NEW_SPACE);
465 465
466 // Function should be alive. 466 // Function should be alive.
467 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); 467 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
468 // Check function is retained. 468 // Check function is retained.
469 Handle<Object> func_value = 469 Handle<Object> func_value =
470 Object::GetProperty(global, name).ToHandleChecked(); 470 Object::GetProperty(global, name).ToHandleChecked();
471 CHECK(func_value->IsJSFunction()); 471 CHECK(func_value->IsJSFunction());
472 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 472 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
473 473
474 { 474 {
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
544 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 544 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
545 Handle<Object> u = factory->NewNumber(1.12344); 545 Handle<Object> u = factory->NewNumber(1.12344);
546 546
547 h1 = global_handles->Create(*i); 547 h1 = global_handles->Create(*i);
548 h2 = global_handles->Create(*u); 548 h2 = global_handles->Create(*u);
549 h3 = global_handles->Create(*i); 549 h3 = global_handles->Create(*i);
550 h4 = global_handles->Create(*u); 550 h4 = global_handles->Create(*u);
551 } 551 }
552 552
553 // after gc, it should survive 553 // after gc, it should survive
554 heap->CollectGarbageNewSpace(); 554 heap->CollectGarbage(NEW_SPACE);
555 555
556 CHECK((*h1)->IsString()); 556 CHECK((*h1)->IsString());
557 CHECK((*h2)->IsHeapNumber()); 557 CHECK((*h2)->IsHeapNumber());
558 CHECK((*h3)->IsString()); 558 CHECK((*h3)->IsString());
559 CHECK((*h4)->IsHeapNumber()); 559 CHECK((*h4)->IsHeapNumber());
560 560
561 CHECK_EQ(*h3, *h1); 561 CHECK_EQ(*h3, *h1);
562 GlobalHandles::Destroy(h1.location()); 562 GlobalHandles::Destroy(h1.location());
563 GlobalHandles::Destroy(h3.location()); 563 GlobalHandles::Destroy(h3.location());
564 564
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
602 h1 = global_handles->Create(*i); 602 h1 = global_handles->Create(*i);
603 h2 = global_handles->Create(*u); 603 h2 = global_handles->Create(*u);
604 } 604 }
605 605
606 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 606 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
607 GlobalHandles::MakeWeak(h2.location(), 607 GlobalHandles::MakeWeak(h2.location(),
608 reinterpret_cast<void*>(&handle_and_id), 608 reinterpret_cast<void*>(&handle_and_id),
609 &TestWeakGlobalHandleCallback); 609 &TestWeakGlobalHandleCallback);
610 610
611 // Scavenge treats weak pointers as normal roots. 611 // Scavenge treats weak pointers as normal roots.
612 heap->CollectGarbageNewSpace(); 612 heap->CollectGarbage(NEW_SPACE);
613 613
614 CHECK((*h1)->IsString()); 614 CHECK((*h1)->IsString());
615 CHECK((*h2)->IsHeapNumber()); 615 CHECK((*h2)->IsHeapNumber());
616 616
617 CHECK(!WeakPointerCleared); 617 CHECK(!WeakPointerCleared);
618 CHECK(!global_handles->IsNearDeath(h2.location())); 618 CHECK(!global_handles->IsNearDeath(h2.location()));
619 CHECK(!global_handles->IsNearDeath(h1.location())); 619 CHECK(!global_handles->IsNearDeath(h1.location()));
620 620
621 GlobalHandles::Destroy(h1.location()); 621 GlobalHandles::Destroy(h1.location());
622 GlobalHandles::Destroy(h2.location()); 622 GlobalHandles::Destroy(h2.location());
(...skipping 17 matching lines...) Expand all
640 640
641 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 641 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
642 Handle<Object> u = factory->NewNumber(1.12344); 642 Handle<Object> u = factory->NewNumber(1.12344);
643 643
644 h1 = global_handles->Create(*i); 644 h1 = global_handles->Create(*i);
645 h2 = global_handles->Create(*u); 645 h2 = global_handles->Create(*u);
646 } 646 }
647 647
648 // Make sure the objects are promoted. 648 // Make sure the objects are promoted.
649 heap->CollectGarbage(OLD_SPACE); 649 heap->CollectGarbage(OLD_SPACE);
650 heap->CollectGarbageNewSpace(); 650 heap->CollectGarbage(NEW_SPACE);
651 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); 651 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
652 652
653 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 653 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
654 GlobalHandles::MakeWeak(h2.location(), 654 GlobalHandles::MakeWeak(h2.location(),
655 reinterpret_cast<void*>(&handle_and_id), 655 reinterpret_cast<void*>(&handle_and_id),
656 &TestWeakGlobalHandleCallback); 656 &TestWeakGlobalHandleCallback);
657 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 657 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
658 CHECK(!GlobalHandles::IsNearDeath(h2.location())); 658 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
659 659
660 // Incremental marking potentially marked handles before they turned weak. 660 // Incremental marking potentially marked handles before they turned weak.
(...skipping 26 matching lines...) Expand all
687 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 687 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
688 h = global_handles->Create(*i); 688 h = global_handles->Create(*i);
689 } 689 }
690 690
691 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); 691 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
692 GlobalHandles::MakeWeak(h.location(), 692 GlobalHandles::MakeWeak(h.location(),
693 reinterpret_cast<void*>(&handle_and_id), 693 reinterpret_cast<void*>(&handle_and_id),
694 &TestWeakGlobalHandleCallback); 694 &TestWeakGlobalHandleCallback);
695 695
696 // Scanvenge does not recognize weak reference. 696 // Scanvenge does not recognize weak reference.
697 heap->CollectGarbageNewSpace(); 697 heap->CollectGarbage(NEW_SPACE);
698 698
699 CHECK(!WeakPointerCleared); 699 CHECK(!WeakPointerCleared);
700 700
701 // Mark-compact treats weak reference properly. 701 // Mark-compact treats weak reference properly.
702 heap->CollectGarbage(OLD_SPACE); 702 heap->CollectGarbage(OLD_SPACE);
703 703
704 CHECK(WeakPointerCleared); 704 CHECK(WeakPointerCleared);
705 } 705 }
706 706
707 707
(...skipping 798 matching lines...) Expand 10 before | Expand all | Expand 10 after
1506 for (int i = 0; i < kAgingThreshold; i++) { 1506 for (int i = 0; i < kAgingThreshold; i++) {
1507 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1507 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1508 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1508 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1509 } 1509 }
1510 1510
1511 // Simulate incremental marking so that the functions are enqueued as 1511 // Simulate incremental marking so that the functions are enqueued as
1512 // code flushing candidates. Then kill one of the functions. Finally 1512 // code flushing candidates. Then kill one of the functions. Finally
1513 // perform a scavenge while incremental marking is still running. 1513 // perform a scavenge while incremental marking is still running.
1514 SimulateIncrementalMarking(CcTest::heap()); 1514 SimulateIncrementalMarking(CcTest::heap());
1515 *function2.location() = NULL; 1515 *function2.location() = NULL;
1516 CcTest::heap()->CollectGarbageNewSpace("test scavenge while marking"); 1516 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking");
1517 1517
1518 // Simulate one final GC to make sure the candidate queue is sane. 1518 // Simulate one final GC to make sure the candidate queue is sane.
1519 CcTest::heap()->CollectAllGarbage(); 1519 CcTest::heap()->CollectAllGarbage();
1520 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1520 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1521 CHECK(!function->is_compiled() || function->IsOptimized()); 1521 CHECK(!function->is_compiled() || function->IsOptimized());
1522 } 1522 }
1523 1523
1524 1524
1525 TEST(TestCodeFlushingIncrementalAbort) { 1525 TEST(TestCodeFlushingIncrementalAbort) {
1526 // If we do not flush code this test is invalid. 1526 // If we do not flush code this test is invalid.
(...skipping 253 matching lines...) Expand 10 before | Expand all | Expand 10 after
1780 OptimizeEmptyFunction("f4"); 1780 OptimizeEmptyFunction("f4");
1781 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1781 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1782 OptimizeEmptyFunction("f5"); 1782 OptimizeEmptyFunction("f5");
1783 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1783 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1784 1784
1785 // Remove function f1, and 1785 // Remove function f1, and
1786 CompileRun("f1=null"); 1786 CompileRun("f1=null");
1787 1787
1788 // Scavenge treats these references as strong. 1788 // Scavenge treats these references as strong.
1789 for (int j = 0; j < 10; j++) { 1789 for (int j = 0; j < 10; j++) {
1790 CcTest::heap()->CollectGarbageNewSpace(); 1790 CcTest::heap()->CollectGarbage(NEW_SPACE);
1791 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1791 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1792 } 1792 }
1793 1793
1794 // Mark compact handles the weak references. 1794 // Mark compact handles the weak references.
1795 isolate->compilation_cache()->Clear(); 1795 isolate->compilation_cache()->Clear();
1796 heap->CollectAllGarbage(); 1796 heap->CollectAllGarbage();
1797 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1797 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1798 1798
1799 // Get rid of f3 and f5 in the same way. 1799 // Get rid of f3 and f5 in the same way.
1800 CompileRun("f3=null"); 1800 CompileRun("f3=null");
1801 for (int j = 0; j < 10; j++) { 1801 for (int j = 0; j < 10; j++) {
1802 CcTest::heap()->CollectGarbageNewSpace(); 1802 CcTest::heap()->CollectGarbage(NEW_SPACE);
1803 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1803 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1804 } 1804 }
1805 CcTest::heap()->CollectAllGarbage(); 1805 CcTest::heap()->CollectAllGarbage();
1806 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1806 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1807 CompileRun("f5=null"); 1807 CompileRun("f5=null");
1808 for (int j = 0; j < 10; j++) { 1808 for (int j = 0; j < 10; j++) {
1809 CcTest::heap()->CollectGarbage(NEW_SPACE); 1809 CcTest::heap()->CollectGarbage(NEW_SPACE);
1810 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1810 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1811 } 1811 }
1812 CcTest::heap()->CollectAllGarbage(); 1812 CcTest::heap()->CollectAllGarbage();
(...skipping 562 matching lines...) Expand 10 before | Expand all | Expand 10 after
2375 new_capacity = new_space->TotalCapacity(); 2375 new_capacity = new_space->TotalCapacity();
2376 CHECK(old_capacity == new_capacity); 2376 CHECK(old_capacity == new_capacity);
2377 2377
2378 // Explicitly shrinking should not affect space capacity. 2378 // Explicitly shrinking should not affect space capacity.
2379 old_capacity = new_space->TotalCapacity(); 2379 old_capacity = new_space->TotalCapacity();
2380 new_space->Shrink(); 2380 new_space->Shrink();
2381 new_capacity = new_space->TotalCapacity(); 2381 new_capacity = new_space->TotalCapacity();
2382 CHECK(old_capacity == new_capacity); 2382 CHECK(old_capacity == new_capacity);
2383 2383
2384 // Let the scavenger empty the new space. 2384 // Let the scavenger empty the new space.
2385 heap->CollectGarbageNewSpace(); 2385 heap->CollectGarbage(NEW_SPACE);
2386 CHECK_LE(new_space->Size(), old_capacity); 2386 CHECK_LE(new_space->Size(), old_capacity);
2387 2387
2388 // Explicitly shrinking should halve the space capacity. 2388 // Explicitly shrinking should halve the space capacity.
2389 old_capacity = new_space->TotalCapacity(); 2389 old_capacity = new_space->TotalCapacity();
2390 new_space->Shrink(); 2390 new_space->Shrink();
2391 new_capacity = new_space->TotalCapacity(); 2391 new_capacity = new_space->TotalCapacity();
2392 CHECK(old_capacity == 2 * new_capacity); 2392 CHECK(old_capacity == 2 * new_capacity);
2393 2393
2394 // Consecutive shrinking should not affect space capacity. 2394 // Consecutive shrinking should not affect space capacity.
2395 old_capacity = new_space->TotalCapacity(); 2395 old_capacity = new_space->TotalCapacity();
(...skipping 434 matching lines...) Expand 10 before | Expand all | Expand 10 after
2830 2830
2831 HEAP_TEST(GCFlags) { 2831 HEAP_TEST(GCFlags) {
2832 CcTest::InitializeVM(); 2832 CcTest::InitializeVM();
2833 Heap* heap = CcTest::heap(); 2833 Heap* heap = CcTest::heap();
2834 2834
2835 heap->set_current_gc_flags(Heap::kNoGCFlags); 2835 heap->set_current_gc_flags(Heap::kNoGCFlags);
2836 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2836 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2837 2837
2838 // Set the flags to check whether we appropriately resets them after the GC. 2838 // Set the flags to check whether we appropriately resets them after the GC.
2839 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); 2839 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2840 heap->CollectAllGarbage("GCFlags", Heap::kReduceMemoryFootprintMask); 2840 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2841 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2842 2842
2843 MarkCompactCollector* collector = heap->mark_compact_collector(); 2843 MarkCompactCollector* collector = heap->mark_compact_collector();
2844 if (collector->sweeping_in_progress()) { 2844 if (collector->sweeping_in_progress()) {
2845 collector->EnsureSweepingCompleted(); 2845 collector->EnsureSweepingCompleted();
2846 } 2846 }
2847 2847
2848 IncrementalMarking* marking = heap->incremental_marking(); 2848 IncrementalMarking* marking = heap->incremental_marking();
2849 marking->Stop(); 2849 marking->Stop();
2850 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); 2850 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask);
2851 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2851 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2852 2852
2853 heap->CollectGarbageNewSpace(); 2853 heap->CollectGarbage(NEW_SPACE);
2854 // NewSpace scavenges should not overwrite the flags. 2854 // NewSpace scavenges should not overwrite the flags.
2855 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2855 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2856 2856
2857 heap->CollectAllGarbage("GCFlags", Heap::kAbortIncrementalMarkingMask); 2857 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2858 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2858 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2859 } 2859 }
2860 2860
2861 2861
2862 TEST(IdleNotificationFinishMarking) { 2862 TEST(IdleNotificationFinishMarking) {
2863 i::FLAG_allow_natives_syntax = true; 2863 i::FLAG_allow_natives_syntax = true;
2864 CcTest::InitializeVM(); 2864 CcTest::InitializeVM();
2865 SimulateFullSpace(CcTest::heap()->old_space()); 2865 SimulateFullSpace(CcTest::heap()->old_space());
2866 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2866 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2867 marking->Stop(); 2867 marking->Stop();
(...skipping 865 matching lines...) Expand 10 before | Expand all | Expand 10 after
3733 CHECK_EQ(1, old_space->CountTotalPages()); 3733 CHECK_EQ(1, old_space->CountTotalPages());
3734 for (int i = 0; i < number_of_test_pages; i++) { 3734 for (int i = 0; i < number_of_test_pages; i++) {
3735 AlwaysAllocateScope always_allocate(isolate); 3735 AlwaysAllocateScope always_allocate(isolate);
3736 SimulateFullSpace(old_space); 3736 SimulateFullSpace(old_space);
3737 factory->NewFixedArray(1, TENURED); 3737 factory->NewFixedArray(1, TENURED);
3738 } 3738 }
3739 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages()); 3739 CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
3740 3740
3741 // Triggering one GC will cause a lot of garbage to be discovered but 3741 // Triggering one GC will cause a lot of garbage to be discovered but
3742 // even spread across all allocated pages. 3742 // even spread across all allocated pages.
3743 heap->CollectAllGarbage("triggered for preparation", 3743 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3744 Heap::kFinalizeIncrementalMarkingMask); 3744 "triggered for preparation");
3745 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3745 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3746 3746
3747 // Triggering subsequent GCs should cause at least half of the pages 3747 // Triggering subsequent GCs should cause at least half of the pages
3748 // to be released to the OS after at most two cycles. 3748 // to be released to the OS after at most two cycles.
3749 heap->CollectAllGarbage("triggered by test 1", 3749 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3750 Heap::kFinalizeIncrementalMarkingMask); 3750 "triggered by test 1");
3751 ;
3752 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages()); 3751 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
3753 heap->CollectAllGarbage("triggered by test 2", 3752 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask,
3754 Heap::kFinalizeIncrementalMarkingMask); 3753 "triggered by test 2");
3755 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2); 3754 CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
3756 3755
3757 // Triggering a last-resort GC should cause all pages to be released to the 3756 // Triggering a last-resort GC should cause all pages to be released to the
3758 // OS so that other processes can seize the memory. If we get a failure here 3757 // OS so that other processes can seize the memory. If we get a failure here
3759 // where there are 2 pages left instead of 1, then we should increase the 3758 // where there are 2 pages left instead of 1, then we should increase the
3760 // size of the first page a little in SizeOfFirstPage in spaces.cc. The 3759 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3761 // first page should be small in order to reduce memory used when the VM 3760 // first page should be small in order to reduce memory used when the VM
3762 // boots, but if the 20 small arrays don't fit on the first page then that's 3761 // boots, but if the 20 small arrays don't fit on the first page then that's
3763 // an indication that it is too small. 3762 // an indication that it is too small.
3764 heap->CollectAllAvailableGarbage("triggered really hard"); 3763 heap->CollectAllAvailableGarbage("triggered really hard");
(...skipping 677 matching lines...) Expand 10 before | Expand all | Expand 10 after
4442 "obj = fastliteralcase(get_standard_literal(), 2);"); 4441 "obj = fastliteralcase(get_standard_literal(), 2);");
4443 4442
4444 // prepare the heap 4443 // prepare the heap
4445 v8::Local<v8::String> mote_code_string = 4444 v8::Local<v8::String> mote_code_string =
4446 v8_str("fastliteralcase(mote, 2.5);"); 4445 v8_str("fastliteralcase(mote, 2.5);");
4447 4446
4448 v8::Local<v8::String> array_name = v8_str("mote"); 4447 v8::Local<v8::String> array_name = v8_str("mote");
4449 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0)); 4448 CcTest::global()->Set(array_name, v8::Int32::New(CcTest::isolate(), 0));
4450 4449
4451 // First make sure we flip spaces 4450 // First make sure we flip spaces
4452 CcTest::heap()->CollectGarbageNewSpace(); 4451 CcTest::heap()->CollectGarbage(NEW_SPACE);
4453 4452
4454 // Allocate the object. 4453 // Allocate the object.
4455 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); 4454 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4456 array_data->set(0, Smi::FromInt(1)); 4455 array_data->set(0, Smi::FromInt(1));
4457 array_data->set(1, Smi::FromInt(2)); 4456 array_data->set(1, Smi::FromInt(2));
4458 4457
4459 AllocateAllButNBytes(CcTest::heap()->new_space(), 4458 AllocateAllButNBytes(CcTest::heap()->new_space(),
4460 JSArray::kSize + AllocationMemento::kSize + 4459 JSArray::kSize + AllocationMemento::kSize +
4461 kPointerSize); 4460 kPointerSize);
4462 4461
(...skipping 1049 matching lines...) Expand 10 before | Expand all | Expand 10 after
5512 } 5511 }
5513 5512
5514 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5513 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5515 Handle<WeakCell> weak_cell2; 5514 Handle<WeakCell> weak_cell2;
5516 { 5515 {
5517 HandleScope inner_scope(isolate); 5516 HandleScope inner_scope(isolate);
5518 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); 5517 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5519 } 5518 }
5520 CHECK(weak_cell1->value()->IsFixedArray()); 5519 CHECK(weak_cell1->value()->IsFixedArray());
5521 CHECK_EQ(*survivor, weak_cell2->value()); 5520 CHECK_EQ(*survivor, weak_cell2->value());
5522 heap->CollectGarbageNewSpace(); 5521 heap->CollectGarbage(NEW_SPACE);
5523 CHECK(weak_cell1->value()->IsFixedArray()); 5522 CHECK(weak_cell1->value()->IsFixedArray());
5524 CHECK_EQ(*survivor, weak_cell2->value()); 5523 CHECK_EQ(*survivor, weak_cell2->value());
5525 heap->CollectGarbageNewSpace(); 5524 heap->CollectGarbage(NEW_SPACE);
5526 CHECK(weak_cell1->value()->IsFixedArray()); 5525 CHECK(weak_cell1->value()->IsFixedArray());
5527 CHECK_EQ(*survivor, weak_cell2->value()); 5526 CHECK_EQ(*survivor, weak_cell2->value());
5528 heap->CollectAllAvailableGarbage(); 5527 heap->CollectAllAvailableGarbage();
5529 CHECK(weak_cell1->cleared()); 5528 CHECK(weak_cell1->cleared());
5530 CHECK_EQ(*survivor, weak_cell2->value()); 5529 CHECK_EQ(*survivor, weak_cell2->value());
5531 } 5530 }
5532 5531
5533 5532
5534 TEST(WeakCellsWithIncrementalMarking) { 5533 TEST(WeakCellsWithIncrementalMarking) {
5535 CcTest::InitializeVM(); 5534 CcTest::InitializeVM();
(...skipping 10 matching lines...) Expand all
5546 HandleScope inner_scope(isolate); 5545 HandleScope inner_scope(isolate);
5547 Handle<HeapObject> value = 5546 Handle<HeapObject> value =
5548 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); 5547 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5549 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); 5548 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5550 CHECK(weak_cell->value()->IsFixedArray()); 5549 CHECK(weak_cell->value()->IsFixedArray());
5551 IncrementalMarking* marking = heap->incremental_marking(); 5550 IncrementalMarking* marking = heap->incremental_marking();
5552 if (marking->IsStopped()) { 5551 if (marking->IsStopped()) {
5553 heap->StartIncrementalMarking(); 5552 heap->StartIncrementalMarking();
5554 } 5553 }
5555 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD); 5554 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
5556 heap->CollectGarbageNewSpace(); 5555 heap->CollectGarbage(NEW_SPACE);
5557 CHECK(weak_cell->value()->IsFixedArray()); 5556 CHECK(weak_cell->value()->IsFixedArray());
5558 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); 5557 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5559 } 5558 }
5560 heap->CollectAllGarbage(); 5559 heap->CollectAllGarbage();
5561 CHECK_EQ(*survivor, weak_cells[0]->value()); 5560 CHECK_EQ(*survivor, weak_cells[0]->value());
5562 for (int i = 1; i < N; i++) { 5561 for (int i = 1; i < N; i++) {
5563 CHECK(weak_cells[i]->cleared()); 5562 CHECK(weak_cells[i]->cleared());
5564 } 5563 }
5565 } 5564 }
5566 5565
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after
5752 // If we are in a low memory config, we can't grow to two pages and we can't 5751 // If we are in a low memory config, we can't grow to two pages and we can't
5753 // run this test. This also means the issue we are testing cannot arise, as 5752 // run this test. This also means the issue we are testing cannot arise, as
5754 // there is no fragmentation. 5753 // there is no fragmentation.
5755 if (new_space->IsAtMaximumCapacity()) return; 5754 if (new_space->IsAtMaximumCapacity()) return;
5756 5755
5757 new_space->Grow(); 5756 new_space->Grow();
5758 CHECK(new_space->IsAtMaximumCapacity()); 5757 CHECK(new_space->IsAtMaximumCapacity());
5759 CHECK(2 * old_capacity == new_space->TotalCapacity()); 5758 CHECK(2 * old_capacity == new_space->TotalCapacity());
5760 5759
5761 // Call the scavenger two times to get an empty new space 5760 // Call the scavenger two times to get an empty new space
5762 heap->CollectGarbageNewSpace(); 5761 heap->CollectGarbage(NEW_SPACE);
5763 heap->CollectGarbageNewSpace(); 5762 heap->CollectGarbage(NEW_SPACE);
5764 5763
5765 // First create a few objects which will survive a scavenge, and will get 5764 // First create a few objects which will survive a scavenge, and will get
5766 // promoted to the old generation later on. These objects will create 5765 // promoted to the old generation later on. These objects will create
5767 // promotion queue entries at the end of the second semi-space page. 5766 // promotion queue entries at the end of the second semi-space page.
5768 const int number_handles = 12; 5767 const int number_handles = 12;
5769 Handle<FixedArray> handles[number_handles]; 5768 Handle<FixedArray> handles[number_handles];
5770 for (int i = 0; i < number_handles; i++) { 5769 for (int i = 0; i < number_handles; i++) {
5771 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5770 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5772 } 5771 }
5773 heap->CollectGarbageNewSpace(); 5772 heap->CollectGarbage(NEW_SPACE);
5774 5773
5775 // Create the first huge object which will exactly fit the first semi-space 5774 // Create the first huge object which will exactly fit the first semi-space
5776 // page. 5775 // page.
5777 int new_linear_size = 5776 int new_linear_size =
5778 static_cast<int>(*heap->new_space()->allocation_limit_address() - 5777 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5779 *heap->new_space()->allocation_top_address()); 5778 *heap->new_space()->allocation_top_address());
5780 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize; 5779 int length = new_linear_size / kPointerSize - FixedArray::kHeaderSize;
5781 Handle<FixedArray> first = 5780 Handle<FixedArray> first =
5782 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); 5781 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5783 CHECK(heap->InNewSpace(*first)); 5782 CHECK(heap->InNewSpace(*first));
5784 5783
5785 // Create the second huge object of maximum allocatable second semi-space 5784 // Create the second huge object of maximum allocatable second semi-space
5786 // page size. 5785 // page size.
5787 new_linear_size = 5786 new_linear_size =
5788 static_cast<int>(*heap->new_space()->allocation_limit_address() - 5787 static_cast<int>(*heap->new_space()->allocation_limit_address() -
5789 *heap->new_space()->allocation_top_address()); 5788 *heap->new_space()->allocation_top_address());
5790 length = Page::kMaxRegularHeapObjectSize / kPointerSize - 5789 length = Page::kMaxRegularHeapObjectSize / kPointerSize -
5791 FixedArray::kHeaderSize; 5790 FixedArray::kHeaderSize;
5792 Handle<FixedArray> second = 5791 Handle<FixedArray> second =
5793 i_isolate->factory()->NewFixedArray(length, NOT_TENURED); 5792 i_isolate->factory()->NewFixedArray(length, NOT_TENURED);
5794 CHECK(heap->InNewSpace(*second)); 5793 CHECK(heap->InNewSpace(*second));
5795 5794
5796 // This scavenge will corrupt memory if the promotion queue is not 5795 // This scavenge will corrupt memory if the promotion queue is not
5797 // evacuated. 5796 // evacuated.
5798 heap->CollectGarbageNewSpace(); 5797 heap->CollectGarbage(NEW_SPACE);
5799 } 5798 }
5800 isolate->Dispose(); 5799 isolate->Dispose();
5801 } 5800 }
5802 5801
5803 5802
5804 TEST(Regress388880) { 5803 TEST(Regress388880) {
5805 i::FLAG_expose_gc = true; 5804 i::FLAG_expose_gc = true;
5806 CcTest::InitializeVM(); 5805 CcTest::InitializeVM();
5807 v8::HandleScope scope(CcTest::isolate()); 5806 v8::HandleScope scope(CcTest::isolate());
5808 Isolate* isolate = CcTest::i_isolate(); 5807 Isolate* isolate = CcTest::i_isolate();
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after
6213 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); 6212 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6214 } 6213 }
6215 6214
6216 6215
6217 TEST(NewSpaceAllocationCounter) { 6216 TEST(NewSpaceAllocationCounter) {
6218 CcTest::InitializeVM(); 6217 CcTest::InitializeVM();
6219 v8::HandleScope scope(CcTest::isolate()); 6218 v8::HandleScope scope(CcTest::isolate());
6220 Isolate* isolate = CcTest::i_isolate(); 6219 Isolate* isolate = CcTest::i_isolate();
6221 Heap* heap = isolate->heap(); 6220 Heap* heap = isolate->heap();
6222 size_t counter1 = heap->NewSpaceAllocationCounter(); 6221 size_t counter1 = heap->NewSpaceAllocationCounter();
6223 heap->CollectGarbageNewSpace(); 6222 heap->CollectGarbage(NEW_SPACE);
6224 const size_t kSize = 1024; 6223 const size_t kSize = 1024;
6225 AllocateInSpace(isolate, kSize, NEW_SPACE); 6224 AllocateInSpace(isolate, kSize, NEW_SPACE);
6226 size_t counter2 = heap->NewSpaceAllocationCounter(); 6225 size_t counter2 = heap->NewSpaceAllocationCounter();
6227 CHECK_EQ(kSize, counter2 - counter1); 6226 CHECK_EQ(kSize, counter2 - counter1);
6228 heap->CollectGarbageNewSpace(); 6227 heap->CollectGarbage(NEW_SPACE);
6229 size_t counter3 = heap->NewSpaceAllocationCounter(); 6228 size_t counter3 = heap->NewSpaceAllocationCounter();
6230 CHECK_EQ(0U, counter3 - counter2); 6229 CHECK_EQ(0U, counter3 - counter2);
6231 // Test counter overflow. 6230 // Test counter overflow.
6232 size_t max_counter = -1; 6231 size_t max_counter = -1;
6233 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); 6232 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6234 size_t start = heap->NewSpaceAllocationCounter(); 6233 size_t start = heap->NewSpaceAllocationCounter();
6235 for (int i = 0; i < 20; i++) { 6234 for (int i = 0; i < 20; i++) {
6236 AllocateInSpace(isolate, kSize, NEW_SPACE); 6235 AllocateInSpace(isolate, kSize, NEW_SPACE);
6237 size_t counter = heap->NewSpaceAllocationCounter(); 6236 size_t counter = heap->NewSpaceAllocationCounter();
6238 CHECK_EQ(kSize, counter - start); 6237 CHECK_EQ(kSize, counter - start);
6239 start = counter; 6238 start = counter;
6240 } 6239 }
6241 } 6240 }
6242 6241
6243 6242
6244 TEST(OldSpaceAllocationCounter) { 6243 TEST(OldSpaceAllocationCounter) {
6245 CcTest::InitializeVM(); 6244 CcTest::InitializeVM();
6246 v8::HandleScope scope(CcTest::isolate()); 6245 v8::HandleScope scope(CcTest::isolate());
6247 Isolate* isolate = CcTest::i_isolate(); 6246 Isolate* isolate = CcTest::i_isolate();
6248 Heap* heap = isolate->heap(); 6247 Heap* heap = isolate->heap();
6249 size_t counter1 = heap->OldGenerationAllocationCounter(); 6248 size_t counter1 = heap->OldGenerationAllocationCounter();
6250 heap->CollectGarbageNewSpace(); 6249 heap->CollectGarbage(NEW_SPACE);
6251 heap->CollectGarbageNewSpace(); 6250 heap->CollectGarbage(NEW_SPACE);
6252 const size_t kSize = 1024; 6251 const size_t kSize = 1024;
6253 AllocateInSpace(isolate, kSize, OLD_SPACE); 6252 AllocateInSpace(isolate, kSize, OLD_SPACE);
6254 size_t counter2 = heap->OldGenerationAllocationCounter(); 6253 size_t counter2 = heap->OldGenerationAllocationCounter();
6255 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. 6254 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6256 CHECK_LE(kSize, counter2 - counter1); 6255 CHECK_LE(kSize, counter2 - counter1);
6257 heap->CollectGarbageNewSpace(); 6256 heap->CollectGarbage(NEW_SPACE);
6258 size_t counter3 = heap->OldGenerationAllocationCounter(); 6257 size_t counter3 = heap->OldGenerationAllocationCounter();
6259 CHECK_EQ(0u, counter3 - counter2); 6258 CHECK_EQ(0u, counter3 - counter2);
6260 AllocateInSpace(isolate, kSize, OLD_SPACE); 6259 AllocateInSpace(isolate, kSize, OLD_SPACE);
6261 heap->CollectGarbage(OLD_SPACE); 6260 heap->CollectGarbage(OLD_SPACE);
6262 size_t counter4 = heap->OldGenerationAllocationCounter(); 6261 size_t counter4 = heap->OldGenerationAllocationCounter();
6263 CHECK_LE(kSize, counter4 - counter3); 6262 CHECK_LE(kSize, counter4 - counter3);
6264 // Test counter overflow. 6263 // Test counter overflow.
6265 size_t max_counter = -1; 6264 size_t max_counter = -1;
6266 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); 6265 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6267 size_t start = heap->OldGenerationAllocationCounter(); 6266 size_t start = heap->OldGenerationAllocationCounter();
(...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after
6659 { 6658 {
6660 SharedFunctionInfo::Iterator iterator(isolate); 6659 SharedFunctionInfo::Iterator iterator(isolate);
6661 while (iterator.Next()) sfi_count--; 6660 while (iterator.Next()) sfi_count--;
6662 } 6661 }
6663 6662
6664 CHECK_EQ(0, sfi_count); 6663 CHECK_EQ(0, sfi_count);
6665 } 6664 }
6666 6665
6667 } // namespace internal 6666 } // namespace internal
6668 } // namespace v8 6667 } // namespace v8
OLDNEW
« no previous file with comments | « test/cctest/test-debug.cc ('k') | test/cctest/test-log.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698