Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: test/cctest/heap/test-heap.cc

Issue 2310143002: [heap] Introduce enum of garbage collection reasons. (Closed)
Patch Set: rebase Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/cctest/heap/test-compaction.cc ('k') | test/cctest/heap/test-incremental-marking.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 CodeDesc desc; 131 CodeDesc desc;
132 masm.GetCode(&desc); 132 masm.GetCode(&desc);
133 Handle<Code> code = isolate->factory()->NewCode( 133 Handle<Code> code = isolate->factory()->NewCode(
134 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); 134 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
135 135
136 Code* tmp = nullptr; 136 Code* tmp = nullptr;
137 heap->CopyCode(*code).To(&tmp); 137 heap->CopyCode(*code).To(&tmp);
138 Handle<Code> copy(tmp); 138 Handle<Code> copy(tmp);
139 139
140 CheckEmbeddedObjectsAreEqual(code, copy); 140 CheckEmbeddedObjectsAreEqual(code, copy);
141 heap->CollectAllAvailableGarbage(); 141 CcTest::CollectAllAvailableGarbage();
142 CheckEmbeddedObjectsAreEqual(code, copy); 142 CheckEmbeddedObjectsAreEqual(code, copy);
143 } 143 }
144 144
145 static void CheckFindCodeObject(Isolate* isolate) { 145 static void CheckFindCodeObject(Isolate* isolate) {
146 // Test FindCodeObject 146 // Test FindCodeObject
147 #define __ assm. 147 #define __ assm.
148 148
149 Assembler assm(isolate, NULL, 0); 149 Assembler assm(isolate, NULL, 0);
150 150
151 __ nop(); // supported on all architectures 151 __ nop(); // supported on all architectures
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after
471 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request))); 471 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
472 CHECK(Smi::FromInt(42)->IsSmi()); 472 CHECK(Smi::FromInt(42)->IsSmi());
473 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); 473 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
474 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); 474 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
475 } 475 }
476 476
477 477
478 TEST(GarbageCollection) { 478 TEST(GarbageCollection) {
479 CcTest::InitializeVM(); 479 CcTest::InitializeVM();
480 Isolate* isolate = CcTest::i_isolate(); 480 Isolate* isolate = CcTest::i_isolate();
481 Heap* heap = isolate->heap();
482 Factory* factory = isolate->factory(); 481 Factory* factory = isolate->factory();
483 482
484 HandleScope sc(isolate); 483 HandleScope sc(isolate);
485 // Check GC. 484 // Check GC.
486 heap->CollectGarbage(NEW_SPACE); 485 CcTest::CollectGarbage(NEW_SPACE);
487 486
488 Handle<JSGlobalObject> global( 487 Handle<JSGlobalObject> global(
489 CcTest::i_isolate()->context()->global_object()); 488 CcTest::i_isolate()->context()->global_object());
490 Handle<String> name = factory->InternalizeUtf8String("theFunction"); 489 Handle<String> name = factory->InternalizeUtf8String("theFunction");
491 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); 490 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
492 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); 491 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
493 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); 492 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
494 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); 493 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
495 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); 494 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
496 495
497 { 496 {
498 HandleScope inner_scope(isolate); 497 HandleScope inner_scope(isolate);
499 // Allocate a function and keep it in global object's property. 498 // Allocate a function and keep it in global object's property.
500 Handle<JSFunction> function = factory->NewFunction(name); 499 Handle<JSFunction> function = factory->NewFunction(name);
501 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); 500 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
502 // Allocate an object. Unrooted after leaving the scope. 501 // Allocate an object. Unrooted after leaving the scope.
503 Handle<JSObject> obj = factory->NewJSObject(function); 502 Handle<JSObject> obj = factory->NewJSObject(function);
504 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 503 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
505 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); 504 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
506 505
507 CHECK_EQ(Smi::FromInt(23), 506 CHECK_EQ(Smi::FromInt(23),
508 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 507 *Object::GetProperty(obj, prop_name).ToHandleChecked());
509 CHECK_EQ(Smi::FromInt(24), 508 CHECK_EQ(Smi::FromInt(24),
510 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); 509 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
511 } 510 }
512 511
513 heap->CollectGarbage(NEW_SPACE); 512 CcTest::CollectGarbage(NEW_SPACE);
514 513
515 // Function should be alive. 514 // Function should be alive.
516 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); 515 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
517 // Check function is retained. 516 // Check function is retained.
518 Handle<Object> func_value = 517 Handle<Object> func_value =
519 Object::GetProperty(global, name).ToHandleChecked(); 518 Object::GetProperty(global, name).ToHandleChecked();
520 CHECK(func_value->IsJSFunction()); 519 CHECK(func_value->IsJSFunction());
521 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 520 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
522 521
523 { 522 {
524 HandleScope inner_scope(isolate); 523 HandleScope inner_scope(isolate);
525 // Allocate another object, make it reachable from global. 524 // Allocate another object, make it reachable from global.
526 Handle<JSObject> obj = factory->NewJSObject(function); 525 Handle<JSObject> obj = factory->NewJSObject(function);
527 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check(); 526 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
528 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 527 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
529 } 528 }
530 529
531 // After gc, it should survive. 530 // After gc, it should survive.
532 heap->CollectGarbage(NEW_SPACE); 531 CcTest::CollectGarbage(NEW_SPACE);
533 532
534 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name)); 533 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
535 Handle<Object> obj = 534 Handle<Object> obj =
536 Object::GetProperty(global, obj_name).ToHandleChecked(); 535 Object::GetProperty(global, obj_name).ToHandleChecked();
537 CHECK(obj->IsJSObject()); 536 CHECK(obj->IsJSObject());
538 CHECK_EQ(Smi::FromInt(23), 537 CHECK_EQ(Smi::FromInt(23),
539 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 538 *Object::GetProperty(obj, prop_name).ToHandleChecked());
540 } 539 }
541 540
542 541
(...skipping 28 matching lines...) Expand all
571 v8::HandleScope scope(CcTest::isolate()); 570 v8::HandleScope scope(CcTest::isolate());
572 const char* name = "Kasper the spunky"; 571 const char* name = "Kasper the spunky";
573 Handle<String> string = factory->NewStringFromAsciiChecked(name); 572 Handle<String> string = factory->NewStringFromAsciiChecked(name);
574 CHECK_EQ(StrLength(name), string->length()); 573 CHECK_EQ(StrLength(name), string->length());
575 } 574 }
576 575
577 576
578 TEST(GlobalHandles) { 577 TEST(GlobalHandles) {
579 CcTest::InitializeVM(); 578 CcTest::InitializeVM();
580 Isolate* isolate = CcTest::i_isolate(); 579 Isolate* isolate = CcTest::i_isolate();
581 Heap* heap = isolate->heap();
582 Factory* factory = isolate->factory(); 580 Factory* factory = isolate->factory();
583 GlobalHandles* global_handles = isolate->global_handles(); 581 GlobalHandles* global_handles = isolate->global_handles();
584 582
585 Handle<Object> h1; 583 Handle<Object> h1;
586 Handle<Object> h2; 584 Handle<Object> h2;
587 Handle<Object> h3; 585 Handle<Object> h3;
588 Handle<Object> h4; 586 Handle<Object> h4;
589 587
590 { 588 {
591 HandleScope scope(isolate); 589 HandleScope scope(isolate);
592 590
593 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 591 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
594 Handle<Object> u = factory->NewNumber(1.12344); 592 Handle<Object> u = factory->NewNumber(1.12344);
595 593
596 h1 = global_handles->Create(*i); 594 h1 = global_handles->Create(*i);
597 h2 = global_handles->Create(*u); 595 h2 = global_handles->Create(*u);
598 h3 = global_handles->Create(*i); 596 h3 = global_handles->Create(*i);
599 h4 = global_handles->Create(*u); 597 h4 = global_handles->Create(*u);
600 } 598 }
601 599
602 // after gc, it should survive 600 // after gc, it should survive
603 heap->CollectGarbage(NEW_SPACE); 601 CcTest::CollectGarbage(NEW_SPACE);
604 602
605 CHECK((*h1)->IsString()); 603 CHECK((*h1)->IsString());
606 CHECK((*h2)->IsHeapNumber()); 604 CHECK((*h2)->IsHeapNumber());
607 CHECK((*h3)->IsString()); 605 CHECK((*h3)->IsString());
608 CHECK((*h4)->IsHeapNumber()); 606 CHECK((*h4)->IsHeapNumber());
609 607
610 CHECK_EQ(*h3, *h1); 608 CHECK_EQ(*h3, *h1);
611 GlobalHandles::Destroy(h1.location()); 609 GlobalHandles::Destroy(h1.location());
612 GlobalHandles::Destroy(h3.location()); 610 GlobalHandles::Destroy(h3.location());
613 611
(...skipping 12 matching lines...) Expand all
626 data.GetParameter()); 624 data.GetParameter());
627 if (p->second == 1234) WeakPointerCleared = true; 625 if (p->second == 1234) WeakPointerCleared = true;
628 p->first->Reset(); 626 p->first->Reset();
629 } 627 }
630 628
631 629
632 TEST(WeakGlobalHandlesScavenge) { 630 TEST(WeakGlobalHandlesScavenge) {
633 i::FLAG_stress_compaction = false; 631 i::FLAG_stress_compaction = false;
634 CcTest::InitializeVM(); 632 CcTest::InitializeVM();
635 Isolate* isolate = CcTest::i_isolate(); 633 Isolate* isolate = CcTest::i_isolate();
636 Heap* heap = isolate->heap();
637 Factory* factory = isolate->factory(); 634 Factory* factory = isolate->factory();
638 GlobalHandles* global_handles = isolate->global_handles(); 635 GlobalHandles* global_handles = isolate->global_handles();
639 636
640 WeakPointerCleared = false; 637 WeakPointerCleared = false;
641 638
642 Handle<Object> h1; 639 Handle<Object> h1;
643 Handle<Object> h2; 640 Handle<Object> h2;
644 641
645 { 642 {
646 HandleScope scope(isolate); 643 HandleScope scope(isolate);
647 644
648 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 645 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
649 Handle<Object> u = factory->NewNumber(1.12344); 646 Handle<Object> u = factory->NewNumber(1.12344);
650 647
651 h1 = global_handles->Create(*i); 648 h1 = global_handles->Create(*i);
652 h2 = global_handles->Create(*u); 649 h2 = global_handles->Create(*u);
653 } 650 }
654 651
655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 652 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
656 GlobalHandles::MakeWeak( 653 GlobalHandles::MakeWeak(
657 h2.location(), reinterpret_cast<void*>(&handle_and_id), 654 h2.location(), reinterpret_cast<void*>(&handle_and_id),
658 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter); 655 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
659 656
660 // Scavenge treats weak pointers as normal roots. 657 // Scavenge treats weak pointers as normal roots.
661 heap->CollectGarbage(NEW_SPACE); 658 CcTest::CollectGarbage(NEW_SPACE);
662 659
663 CHECK((*h1)->IsString()); 660 CHECK((*h1)->IsString());
664 CHECK((*h2)->IsHeapNumber()); 661 CHECK((*h2)->IsHeapNumber());
665 662
666 CHECK(!WeakPointerCleared); 663 CHECK(!WeakPointerCleared);
667 CHECK(!global_handles->IsNearDeath(h2.location())); 664 CHECK(!global_handles->IsNearDeath(h2.location()));
668 CHECK(!global_handles->IsNearDeath(h1.location())); 665 CHECK(!global_handles->IsNearDeath(h1.location()));
669 666
670 GlobalHandles::Destroy(h1.location()); 667 GlobalHandles::Destroy(h1.location());
671 GlobalHandles::Destroy(h2.location()); 668 GlobalHandles::Destroy(h2.location());
(...skipping 16 matching lines...) Expand all
688 HandleScope scope(isolate); 685 HandleScope scope(isolate);
689 686
690 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 687 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
691 Handle<Object> u = factory->NewNumber(1.12344); 688 Handle<Object> u = factory->NewNumber(1.12344);
692 689
693 h1 = global_handles->Create(*i); 690 h1 = global_handles->Create(*i);
694 h2 = global_handles->Create(*u); 691 h2 = global_handles->Create(*u);
695 } 692 }
696 693
697 // Make sure the objects are promoted. 694 // Make sure the objects are promoted.
698 heap->CollectGarbage(OLD_SPACE); 695 CcTest::CollectGarbage(OLD_SPACE);
699 heap->CollectGarbage(NEW_SPACE); 696 CcTest::CollectGarbage(NEW_SPACE);
700 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); 697 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
701 698
702 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 699 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
703 GlobalHandles::MakeWeak( 700 GlobalHandles::MakeWeak(
704 h2.location(), reinterpret_cast<void*>(&handle_and_id), 701 h2.location(), reinterpret_cast<void*>(&handle_and_id),
705 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter); 702 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
706 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 703 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
707 CHECK(!GlobalHandles::IsNearDeath(h2.location())); 704 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
708 705
709 // Incremental marking potentially marked handles before they turned weak. 706 // Incremental marking potentially marked handles before they turned weak.
710 heap->CollectAllGarbage(); 707 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
711 708
712 CHECK((*h1)->IsString()); 709 CHECK((*h1)->IsString());
713 710
714 CHECK(WeakPointerCleared); 711 CHECK(WeakPointerCleared);
715 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 712 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
716 713
717 GlobalHandles::Destroy(h1.location()); 714 GlobalHandles::Destroy(h1.location());
718 } 715 }
719 716
720 717
721 TEST(DeleteWeakGlobalHandle) { 718 TEST(DeleteWeakGlobalHandle) {
722 i::FLAG_stress_compaction = false; 719 i::FLAG_stress_compaction = false;
723 CcTest::InitializeVM(); 720 CcTest::InitializeVM();
724 Isolate* isolate = CcTest::i_isolate(); 721 Isolate* isolate = CcTest::i_isolate();
725 Heap* heap = isolate->heap();
726 Factory* factory = isolate->factory(); 722 Factory* factory = isolate->factory();
727 GlobalHandles* global_handles = isolate->global_handles(); 723 GlobalHandles* global_handles = isolate->global_handles();
728 724
729 WeakPointerCleared = false; 725 WeakPointerCleared = false;
730 726
731 Handle<Object> h; 727 Handle<Object> h;
732 728
733 { 729 {
734 HandleScope scope(isolate); 730 HandleScope scope(isolate);
735 731
736 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 732 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
737 h = global_handles->Create(*i); 733 h = global_handles->Create(*i);
738 } 734 }
739 735
740 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); 736 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
741 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id), 737 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
742 &TestWeakGlobalHandleCallback, 738 &TestWeakGlobalHandleCallback,
743 v8::WeakCallbackType::kParameter); 739 v8::WeakCallbackType::kParameter);
744 740
745 // Scanvenge does not recognize weak reference. 741 // Scanvenge does not recognize weak reference.
746 heap->CollectGarbage(NEW_SPACE); 742 CcTest::CollectGarbage(NEW_SPACE);
747 743
748 CHECK(!WeakPointerCleared); 744 CHECK(!WeakPointerCleared);
749 745
750 // Mark-compact treats weak reference properly. 746 // Mark-compact treats weak reference properly.
751 heap->CollectGarbage(OLD_SPACE); 747 CcTest::CollectGarbage(OLD_SPACE);
752 748
753 CHECK(WeakPointerCleared); 749 CHECK(WeakPointerCleared);
754 } 750 }
755 751
756 TEST(DoNotPromoteWhiteObjectsOnScavenge) { 752 TEST(DoNotPromoteWhiteObjectsOnScavenge) {
757 CcTest::InitializeVM(); 753 CcTest::InitializeVM();
758 Isolate* isolate = CcTest::i_isolate(); 754 Isolate* isolate = CcTest::i_isolate();
759 Heap* heap = isolate->heap(); 755 Heap* heap = isolate->heap();
760 Factory* factory = isolate->factory(); 756 Factory* factory = isolate->factory();
761 757
762 HandleScope scope(isolate); 758 HandleScope scope(isolate);
763 Handle<Object> white = factory->NewStringFromStaticChars("white"); 759 Handle<Object> white = factory->NewStringFromStaticChars("white");
764 760
765 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white)))); 761 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white))));
766 762
767 heap->CollectGarbage(NEW_SPACE); 763 CcTest::CollectGarbage(NEW_SPACE);
768 764
769 CHECK(heap->InNewSpace(*white)); 765 CHECK(heap->InNewSpace(*white));
770 } 766 }
771 767
772 TEST(PromoteGreyOrBlackObjectsOnScavenge) { 768 TEST(PromoteGreyOrBlackObjectsOnScavenge) {
773 CcTest::InitializeVM(); 769 CcTest::InitializeVM();
774 Isolate* isolate = CcTest::i_isolate(); 770 Isolate* isolate = CcTest::i_isolate();
775 Heap* heap = isolate->heap(); 771 Heap* heap = isolate->heap();
776 Factory* factory = isolate->factory(); 772 Factory* factory = isolate->factory();
777 773
778 HandleScope scope(isolate); 774 HandleScope scope(isolate);
779 Handle<Object> marked = factory->NewStringFromStaticChars("marked"); 775 Handle<Object> marked = factory->NewStringFromStaticChars("marked");
780 776
781 IncrementalMarking* marking = heap->incremental_marking(); 777 IncrementalMarking* marking = heap->incremental_marking();
782 marking->Stop(); 778 marking->Stop();
783 heap->StartIncrementalMarking(); 779 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
780 i::GarbageCollectionReason::kTesting);
784 while ( 781 while (
785 Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) { 782 Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) {
786 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 783 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
787 IncrementalMarking::DO_NOT_FORCE_COMPLETION); 784 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
788 } 785 }
789 786
790 heap->CollectGarbage(NEW_SPACE); 787 CcTest::CollectGarbage(NEW_SPACE);
791 788
792 CHECK(!heap->InNewSpace(*marked)); 789 CHECK(!heap->InNewSpace(*marked));
793 } 790 }
794 791
795 TEST(BytecodeArray) { 792 TEST(BytecodeArray) {
796 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a}; 793 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
797 static const int kRawBytesSize = sizeof(kRawBytes); 794 static const int kRawBytesSize = sizeof(kRawBytes);
798 static const int kFrameSize = 32; 795 static const int kFrameSize = 32;
799 static const int kParameterCount = 2; 796 static const int kParameterCount = 2;
800 797
(...skipping 27 matching lines...) Expand all
828 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]); 825 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
829 CHECK_EQ(array->get(i), kRawBytes[i]); 826 CHECK_EQ(array->get(i), kRawBytes[i]);
830 } 827 }
831 828
832 FixedArray* old_constant_pool_address = *constant_pool; 829 FixedArray* old_constant_pool_address = *constant_pool;
833 830
834 // Perform a full garbage collection and force the constant pool to be on an 831 // Perform a full garbage collection and force the constant pool to be on an
835 // evacuation candidate. 832 // evacuation candidate.
836 Page* evac_page = Page::FromAddress(constant_pool->address()); 833 Page* evac_page = Page::FromAddress(constant_pool->address());
837 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 834 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
838 heap->CollectAllGarbage(); 835 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
839 836
840 // BytecodeArray should survive. 837 // BytecodeArray should survive.
841 CHECK_EQ(array->length(), kRawBytesSize); 838 CHECK_EQ(array->length(), kRawBytesSize);
842 CHECK_EQ(array->frame_size(), kFrameSize); 839 CHECK_EQ(array->frame_size(), kFrameSize);
843 for (int i = 0; i < kRawBytesSize; i++) { 840 for (int i = 0; i < kRawBytesSize; i++) {
844 CHECK_EQ(array->get(i), kRawBytes[i]); 841 CHECK_EQ(array->get(i), kRawBytes[i]);
845 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]); 842 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
846 } 843 }
847 844
848 // Constant pool should have been migrated. 845 // Constant pool should have been migrated.
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
1298 } 1295 }
1299 1296
1300 // Check function is compiled. 1297 // Check function is compiled.
1301 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(), 1298 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1302 foo_name).ToHandleChecked(); 1299 foo_name).ToHandleChecked();
1303 CHECK(func_value->IsJSFunction()); 1300 CHECK(func_value->IsJSFunction());
1304 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1301 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1305 CHECK(function->shared()->is_compiled()); 1302 CHECK(function->shared()->is_compiled());
1306 1303
1307 // The code will survive at least two GCs. 1304 // The code will survive at least two GCs.
1308 i_isolate->heap()->CollectAllGarbage(); 1305 i_isolate->heap()->CollectAllGarbage(
1309 i_isolate->heap()->CollectAllGarbage(); 1306 i::Heap::kFinalizeIncrementalMarkingMask,
1307 i::GarbageCollectionReason::kTesting);
1308 i_isolate->heap()->CollectAllGarbage(
1309 i::Heap::kFinalizeIncrementalMarkingMask,
1310 i::GarbageCollectionReason::kTesting);
1310 CHECK(function->shared()->is_compiled()); 1311 CHECK(function->shared()->is_compiled());
1311 1312
1312 // Simulate several GCs that use full marking. 1313 // Simulate several GCs that use full marking.
1313 const int kAgingThreshold = 6; 1314 const int kAgingThreshold = 6;
1314 for (int i = 0; i < kAgingThreshold; i++) { 1315 for (int i = 0; i < kAgingThreshold; i++) {
1315 i_isolate->heap()->CollectAllGarbage(); 1316 i_isolate->heap()->CollectAllGarbage(
1317 i::Heap::kFinalizeIncrementalMarkingMask,
1318 i::GarbageCollectionReason::kTesting);
1316 } 1319 }
1317 1320
1318 // foo should no longer be in the compilation cache 1321 // foo should no longer be in the compilation cache
1319 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1322 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1320 CHECK(!function->is_compiled() || function->IsOptimized()); 1323 CHECK(!function->is_compiled() || function->IsOptimized());
1321 // Call foo to get it recompiled. 1324 // Call foo to get it recompiled.
1322 CompileRun("foo()"); 1325 CompileRun("foo()");
1323 CHECK(function->shared()->is_compiled()); 1326 CHECK(function->shared()->is_compiled());
1324 CHECK(function->is_compiled()); 1327 CHECK(function->is_compiled());
1325 } 1328 }
(...skipping 25 matching lines...) Expand all
1351 } 1354 }
1352 1355
1353 // Check function is compiled. 1356 // Check function is compiled.
1354 Handle<Object> func_value = 1357 Handle<Object> func_value =
1355 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1358 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1356 CHECK(func_value->IsJSFunction()); 1359 CHECK(func_value->IsJSFunction());
1357 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1360 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1358 CHECK(function->shared()->is_compiled()); 1361 CHECK(function->shared()->is_compiled());
1359 1362
1360 // The code has been run so will survive at least one GC. 1363 // The code has been run so will survive at least one GC.
1361 CcTest::heap()->CollectAllGarbage(); 1364 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1362 CHECK(function->shared()->is_compiled()); 1365 CHECK(function->shared()->is_compiled());
1363 1366
1364 // The code was only run once, so it should be pre-aged and collected on the 1367 // The code was only run once, so it should be pre-aged and collected on the
1365 // next GC. 1368 // next GC.
1366 CcTest::heap()->CollectAllGarbage(); 1369 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1367 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1370 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1368 1371
1369 // Execute the function again twice, and ensure it is reset to the young age. 1372 // Execute the function again twice, and ensure it is reset to the young age.
1370 { v8::HandleScope scope(CcTest::isolate()); 1373 { v8::HandleScope scope(CcTest::isolate());
1371 CompileRun("foo();" 1374 CompileRun("foo();"
1372 "foo();"); 1375 "foo();");
1373 } 1376 }
1374 1377
1375 // The code will survive at least two GC now that it is young again. 1378 // The code will survive at least two GC now that it is young again.
1376 CcTest::heap()->CollectAllGarbage(); 1379 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1377 CcTest::heap()->CollectAllGarbage(); 1380 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1378 CHECK(function->shared()->is_compiled()); 1381 CHECK(function->shared()->is_compiled());
1379 1382
1380 // Simulate several GCs that use full marking. 1383 // Simulate several GCs that use full marking.
1381 const int kAgingThreshold = 6; 1384 const int kAgingThreshold = 6;
1382 for (int i = 0; i < kAgingThreshold; i++) { 1385 for (int i = 0; i < kAgingThreshold; i++) {
1383 CcTest::heap()->CollectAllGarbage(); 1386 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1384 } 1387 }
1385 1388
1386 // foo should no longer be in the compilation cache 1389 // foo should no longer be in the compilation cache
1387 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1390 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1388 CHECK(!function->is_compiled() || function->IsOptimized()); 1391 CHECK(!function->is_compiled() || function->IsOptimized());
1389 // Call foo to get it recompiled. 1392 // Call foo to get it recompiled.
1390 CompileRun("foo()"); 1393 CompileRun("foo()");
1391 CHECK(function->shared()->is_compiled()); 1394 CHECK(function->shared()->is_compiled());
1392 CHECK(function->is_compiled()); 1395 CHECK(function->is_compiled());
1393 } 1396 }
(...skipping 22 matching lines...) Expand all
1416 } 1419 }
1417 1420
1418 // Check function is compiled. 1421 // Check function is compiled.
1419 Handle<Object> func_value = 1422 Handle<Object> func_value =
1420 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1423 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1421 CHECK(func_value->IsJSFunction()); 1424 CHECK(func_value->IsJSFunction());
1422 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1425 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1423 CHECK(function->shared()->is_compiled()); 1426 CHECK(function->shared()->is_compiled());
1424 1427
1425 // The code will survive at least two GCs. 1428 // The code will survive at least two GCs.
1426 CcTest::heap()->CollectAllGarbage(); 1429 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1427 CcTest::heap()->CollectAllGarbage(); 1430 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1428 CHECK(function->shared()->is_compiled()); 1431 CHECK(function->shared()->is_compiled());
1429 1432
1430 // Simulate several GCs that use incremental marking. 1433 // Simulate several GCs that use incremental marking.
1431 const int kAgingThreshold = 6; 1434 const int kAgingThreshold = 6;
1432 for (int i = 0; i < kAgingThreshold; i++) { 1435 for (int i = 0; i < kAgingThreshold; i++) {
1433 heap::SimulateIncrementalMarking(CcTest::heap()); 1436 heap::SimulateIncrementalMarking(CcTest::heap());
1434 CcTest::heap()->CollectAllGarbage(); 1437 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1435 } 1438 }
1436 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1439 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1437 CHECK(!function->is_compiled() || function->IsOptimized()); 1440 CHECK(!function->is_compiled() || function->IsOptimized());
1438 1441
1439 // This compile will compile the function again. 1442 // This compile will compile the function again.
1440 { v8::HandleScope scope(CcTest::isolate()); 1443 { v8::HandleScope scope(CcTest::isolate());
1441 CompileRun("foo();"); 1444 CompileRun("foo();");
1442 } 1445 }
1443 1446
1444 // Simulate several GCs that use incremental marking but make sure 1447 // Simulate several GCs that use incremental marking but make sure
1445 // the loop breaks once the function is enqueued as a candidate. 1448 // the loop breaks once the function is enqueued as a candidate.
1446 for (int i = 0; i < kAgingThreshold; i++) { 1449 for (int i = 0; i < kAgingThreshold; i++) {
1447 heap::SimulateIncrementalMarking(CcTest::heap()); 1450 heap::SimulateIncrementalMarking(CcTest::heap());
1448 if (!function->next_function_link()->IsUndefined(CcTest::i_isolate())) 1451 if (!function->next_function_link()->IsUndefined(CcTest::i_isolate()))
1449 break; 1452 break;
1450 CcTest::heap()->CollectAllGarbage(); 1453 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1451 } 1454 }
1452 1455
1453 // Force optimization while incremental marking is active and while 1456 // Force optimization while incremental marking is active and while
1454 // the function is enqueued as a candidate. 1457 // the function is enqueued as a candidate.
1455 { v8::HandleScope scope(CcTest::isolate()); 1458 { v8::HandleScope scope(CcTest::isolate());
1456 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1459 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1457 } 1460 }
1458 1461
1459 // Simulate one final GC to make sure the candidate queue is sane. 1462 // Simulate one final GC to make sure the candidate queue is sane.
1460 CcTest::heap()->CollectAllGarbage(); 1463 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1461 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); 1464 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1462 CHECK(function->is_compiled() || !function->IsOptimized()); 1465 CHECK(function->is_compiled() || !function->IsOptimized());
1463 } 1466 }
1464 1467
1465 1468
1466 TEST(TestCodeFlushingIncrementalScavenge) { 1469 TEST(TestCodeFlushingIncrementalScavenge) {
1467 // If we do not flush code this test is invalid. 1470 // If we do not flush code this test is invalid.
1468 if (!FLAG_flush_code) return; 1471 if (!FLAG_flush_code) return;
1469 i::FLAG_allow_natives_syntax = true; 1472 i::FLAG_allow_natives_syntax = true;
1470 i::FLAG_optimize_for_size = false; 1473 i::FLAG_optimize_for_size = false;
1471 CcTest::InitializeVM(); 1474 CcTest::InitializeVM();
1472 Isolate* isolate = CcTest::i_isolate(); 1475 Isolate* isolate = CcTest::i_isolate();
1473 Factory* factory = isolate->factory(); 1476 Factory* factory = isolate->factory();
1474 v8::HandleScope scope(CcTest::isolate()); 1477 v8::HandleScope scope(CcTest::isolate());
1475 const char* source = "var foo = function() {" 1478 const char* source = "var foo = function() {"
1476 " var x = 42;" 1479 " var x = 42;"
1477 " var y = 42;" 1480 " var y = 42;"
1478 " var z = x + y;" 1481 " var z = x + y;"
1479 "};" 1482 "};"
1480 "foo();" 1483 "foo();"
1481 "var bar = function() {" 1484 "var bar = function() {"
1482 " var x = 23;" 1485 " var x = 23;"
1483 "};" 1486 "};"
1484 "bar();"; 1487 "bar();";
1485 Handle<String> foo_name = factory->InternalizeUtf8String("foo"); 1488 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1486 Handle<String> bar_name = factory->InternalizeUtf8String("bar"); 1489 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1487 1490
1488 // Perfrom one initial GC to enable code flushing. 1491 // Perfrom one initial GC to enable code flushing.
1489 CcTest::heap()->CollectAllGarbage(); 1492 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1490 1493
1491 // This compile will add the code to the compilation cache. 1494 // This compile will add the code to the compilation cache.
1492 { v8::HandleScope scope(CcTest::isolate()); 1495 { v8::HandleScope scope(CcTest::isolate());
1493 CompileRun(source); 1496 CompileRun(source);
1494 } 1497 }
1495 1498
1496 // Check functions are compiled. 1499 // Check functions are compiled.
1497 Handle<Object> func_value = 1500 Handle<Object> func_value =
1498 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1501 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1499 CHECK(func_value->IsJSFunction()); 1502 CHECK(func_value->IsJSFunction());
(...skipping 16 matching lines...) Expand all
1516 for (int i = 0; i < kAgingThreshold; i++) { 1519 for (int i = 0; i < kAgingThreshold; i++) {
1517 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1520 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1518 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1521 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1519 } 1522 }
1520 1523
1521 // Simulate incremental marking so that the functions are enqueued as 1524 // Simulate incremental marking so that the functions are enqueued as
1522 // code flushing candidates. Then kill one of the functions. Finally 1525 // code flushing candidates. Then kill one of the functions. Finally
1523 // perform a scavenge while incremental marking is still running. 1526 // perform a scavenge while incremental marking is still running.
1524 heap::SimulateIncrementalMarking(CcTest::heap(), false); 1527 heap::SimulateIncrementalMarking(CcTest::heap(), false);
1525 *function2.location() = NULL; 1528 *function2.location() = NULL;
1526 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); 1529 CcTest::CollectGarbage(NEW_SPACE);
1527 1530
1528 // Simulate one final GC to make sure the candidate queue is sane. 1531 // Simulate one final GC to make sure the candidate queue is sane.
1529 CcTest::heap()->CollectAllGarbage(); 1532 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1530 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1533 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1531 CHECK(!function->is_compiled() || function->IsOptimized()); 1534 CHECK(!function->is_compiled() || function->IsOptimized());
1532 } 1535 }
1533 1536
1534 1537
1535 TEST(TestCodeFlushingIncrementalAbort) { 1538 TEST(TestCodeFlushingIncrementalAbort) {
1536 // If we do not flush code this test is invalid. 1539 // If we do not flush code this test is invalid.
1537 if (!FLAG_flush_code) return; 1540 if (!FLAG_flush_code) return;
1538 i::FLAG_allow_natives_syntax = true; 1541 i::FLAG_allow_natives_syntax = true;
1539 i::FLAG_optimize_for_size = false; 1542 i::FLAG_optimize_for_size = false;
(...skipping 16 matching lines...) Expand all
1556 } 1559 }
1557 1560
1558 // Check function is compiled. 1561 // Check function is compiled.
1559 Handle<Object> func_value = 1562 Handle<Object> func_value =
1560 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1563 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1561 CHECK(func_value->IsJSFunction()); 1564 CHECK(func_value->IsJSFunction());
1562 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1565 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1563 CHECK(function->shared()->is_compiled()); 1566 CHECK(function->shared()->is_compiled());
1564 1567
1565 // The code will survive at least two GCs. 1568 // The code will survive at least two GCs.
1566 heap->CollectAllGarbage(); 1569 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1567 heap->CollectAllGarbage(); 1570 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1568 CHECK(function->shared()->is_compiled()); 1571 CHECK(function->shared()->is_compiled());
1569 1572
1570 // Bump the code age so that flushing is triggered. 1573 // Bump the code age so that flushing is triggered.
1571 const int kAgingThreshold = 6; 1574 const int kAgingThreshold = 6;
1572 for (int i = 0; i < kAgingThreshold; i++) { 1575 for (int i = 0; i < kAgingThreshold; i++) {
1573 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1576 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1574 } 1577 }
1575 1578
1576 // Simulate incremental marking so that the function is enqueued as 1579 // Simulate incremental marking so that the function is enqueued as
1577 // code flushing candidate. 1580 // code flushing candidate.
1578 heap::SimulateIncrementalMarking(heap); 1581 heap::SimulateIncrementalMarking(heap);
1579 1582
1580 // Enable the debugger and add a breakpoint while incremental marking 1583 // Enable the debugger and add a breakpoint while incremental marking
1581 // is running so that incremental marking aborts and code flushing is 1584 // is running so that incremental marking aborts and code flushing is
1582 // disabled. 1585 // disabled.
1583 int position = function->shared()->start_position(); 1586 int position = function->shared()->start_position();
1584 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate); 1587 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1585 EnableDebugger(CcTest::isolate()); 1588 EnableDebugger(CcTest::isolate());
1586 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); 1589 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1587 isolate->debug()->ClearBreakPoint(breakpoint_object); 1590 isolate->debug()->ClearBreakPoint(breakpoint_object);
1588 DisableDebugger(CcTest::isolate()); 1591 DisableDebugger(CcTest::isolate());
1589 1592
1590 // Force optimization now that code flushing is disabled. 1593 // Force optimization now that code flushing is disabled.
1591 { v8::HandleScope scope(CcTest::isolate()); 1594 { v8::HandleScope scope(CcTest::isolate());
1592 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1595 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1593 } 1596 }
1594 1597
1595 // Simulate one final GC to make sure the candidate queue is sane. 1598 // Simulate one final GC to make sure the candidate queue is sane.
1596 heap->CollectAllGarbage(); 1599 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1597 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); 1600 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1598 CHECK(function->is_compiled() || !function->IsOptimized()); 1601 CHECK(function->is_compiled() || !function->IsOptimized());
1599 } 1602 }
1600 1603
1601 TEST(TestUseOfIncrementalBarrierOnCompileLazy) { 1604 TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1602 // Turn off always_opt because it interferes with running the built-in for 1605 // Turn off always_opt because it interferes with running the built-in for
1603 // the last call to g(). 1606 // the last call to g().
1604 i::FLAG_always_opt = false; 1607 i::FLAG_always_opt = false;
1605 i::FLAG_allow_natives_syntax = true; 1608 i::FLAG_allow_natives_syntax = true;
1606 CcTest::InitializeVM(); 1609 CcTest::InitializeVM();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 1645
1643 TEST(CompilationCacheCachingBehavior) { 1646 TEST(CompilationCacheCachingBehavior) {
1644 // If we do not flush code, or have the compilation cache turned off, this 1647 // If we do not flush code, or have the compilation cache turned off, this
1645 // test is invalid. 1648 // test is invalid.
1646 if (!FLAG_flush_code || !FLAG_compilation_cache) { 1649 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1647 return; 1650 return;
1648 } 1651 }
1649 CcTest::InitializeVM(); 1652 CcTest::InitializeVM();
1650 Isolate* isolate = CcTest::i_isolate(); 1653 Isolate* isolate = CcTest::i_isolate();
1651 Factory* factory = isolate->factory(); 1654 Factory* factory = isolate->factory();
1652 Heap* heap = isolate->heap();
1653 CompilationCache* compilation_cache = isolate->compilation_cache(); 1655 CompilationCache* compilation_cache = isolate->compilation_cache();
1654 LanguageMode language_mode = construct_language_mode(FLAG_use_strict); 1656 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1655 1657
1656 v8::HandleScope scope(CcTest::isolate()); 1658 v8::HandleScope scope(CcTest::isolate());
1657 const char* raw_source = 1659 const char* raw_source =
1658 "function foo() {" 1660 "function foo() {"
1659 " var x = 42;" 1661 " var x = 42;"
1660 " var y = 42;" 1662 " var y = 42;"
1661 " var z = x + y;" 1663 " var z = x + y;"
1662 "};" 1664 "};"
(...skipping 10 matching lines...) Expand all
1673 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript( 1675 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1674 source, Handle<Object>(), 0, 0, 1676 source, Handle<Object>(), 0, 0,
1675 v8::ScriptOriginOptions(false, true, false), native_context, 1677 v8::ScriptOriginOptions(false, true, false), native_context,
1676 language_mode); 1678 language_mode);
1677 CHECK(!info.is_null()); 1679 CHECK(!info.is_null());
1678 1680
1679 // Check that the code cache entry survives at least on GC. 1681 // Check that the code cache entry survives at least on GC.
1680 // (Unless --optimize-for-size, in which case it might get collected 1682 // (Unless --optimize-for-size, in which case it might get collected
1681 // immediately.) 1683 // immediately.)
1682 if (!FLAG_optimize_for_size) { 1684 if (!FLAG_optimize_for_size) {
1683 heap->CollectAllGarbage(); 1685 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1684 info = compilation_cache->LookupScript( 1686 info = compilation_cache->LookupScript(
1685 source, Handle<Object>(), 0, 0, 1687 source, Handle<Object>(), 0, 0,
1686 v8::ScriptOriginOptions(false, true, false), native_context, 1688 v8::ScriptOriginOptions(false, true, false), native_context,
1687 language_mode); 1689 language_mode);
1688 CHECK(!info.is_null()); 1690 CHECK(!info.is_null());
1689 } 1691 }
1690 1692
1691 // Progress code age until it's old and ready for GC. 1693 // Progress code age until it's old and ready for GC.
1692 while (!info.ToHandleChecked()->code()->IsOld()) { 1694 while (!info.ToHandleChecked()->code()->IsOld()) {
1693 // To guarantee progress, we have to MakeOlder with different parities. 1695 // To guarantee progress, we have to MakeOlder with different parities.
1694 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is 1696 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1695 // always NO_MARKING_PARITY and the code age only progresses if the parity 1697 // always NO_MARKING_PARITY and the code age only progresses if the parity
1696 // is different. 1698 // is different.
1697 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY); 1699 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1698 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY); 1700 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1699 } 1701 }
1700 1702
1701 heap->CollectAllGarbage(); 1703 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1702 // Ensure code aging cleared the entry from the cache. 1704 // Ensure code aging cleared the entry from the cache.
1703 info = compilation_cache->LookupScript( 1705 info = compilation_cache->LookupScript(
1704 source, Handle<Object>(), 0, 0, 1706 source, Handle<Object>(), 0, 0,
1705 v8::ScriptOriginOptions(false, true, false), native_context, 1707 v8::ScriptOriginOptions(false, true, false), native_context,
1706 language_mode); 1708 language_mode);
1707 CHECK(info.is_null()); 1709 CHECK(info.is_null());
1708 } 1710 }
1709 1711
1710 1712
1711 static void OptimizeEmptyFunction(const char* name) { 1713 static void OptimizeEmptyFunction(const char* name) {
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1754 v8::V8::Initialize(); 1756 v8::V8::Initialize();
1755 1757
1756 // Some flags turn Scavenge collections into Mark-sweep collections 1758 // Some flags turn Scavenge collections into Mark-sweep collections
1757 // and hence are incompatible with this test case. 1759 // and hence are incompatible with this test case.
1758 if (FLAG_gc_global || FLAG_stress_compaction) return; 1760 if (FLAG_gc_global || FLAG_stress_compaction) return;
1759 FLAG_retain_maps_for_n_gc = 0; 1761 FLAG_retain_maps_for_n_gc = 0;
1760 1762
1761 static const int kNumTestContexts = 10; 1763 static const int kNumTestContexts = 10;
1762 1764
1763 Isolate* isolate = CcTest::i_isolate(); 1765 Isolate* isolate = CcTest::i_isolate();
1764 Heap* heap = isolate->heap();
1765 HandleScope scope(isolate); 1766 HandleScope scope(isolate);
1766 v8::Local<v8::Context> ctx[kNumTestContexts]; 1767 v8::Local<v8::Context> ctx[kNumTestContexts];
1767 if (!isolate->use_crankshaft()) return; 1768 if (!isolate->use_crankshaft()) return;
1768 1769
1769 CHECK_EQ(0, CountNativeContexts()); 1770 CHECK_EQ(0, CountNativeContexts());
1770 1771
1771 // Create a number of global contests which gets linked together. 1772 // Create a number of global contests which gets linked together.
1772 for (int i = 0; i < kNumTestContexts; i++) { 1773 for (int i = 0; i < kNumTestContexts; i++) {
1773 ctx[i] = v8::Context::New(CcTest::isolate()); 1774 ctx[i] = v8::Context::New(CcTest::isolate());
1774 1775
1775 // Collect garbage that might have been created by one of the 1776 // Collect garbage that might have been created by one of the
1776 // installed extensions. 1777 // installed extensions.
1777 isolate->compilation_cache()->Clear(); 1778 isolate->compilation_cache()->Clear();
1778 heap->CollectAllGarbage(); 1779 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1779 1780
1780 CHECK_EQ(i + 1, CountNativeContexts()); 1781 CHECK_EQ(i + 1, CountNativeContexts());
1781 1782
1782 ctx[i]->Enter(); 1783 ctx[i]->Enter();
1783 1784
1784 // Create a handle scope so no function objects get stuck in the outer 1785 // Create a handle scope so no function objects get stuck in the outer
1785 // handle scope. 1786 // handle scope.
1786 HandleScope scope(isolate); 1787 HandleScope scope(isolate);
1787 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i])); 1788 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1788 OptimizeEmptyFunction("f1"); 1789 OptimizeEmptyFunction("f1");
1789 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i])); 1790 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1790 OptimizeEmptyFunction("f2"); 1791 OptimizeEmptyFunction("f2");
1791 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i])); 1792 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1792 OptimizeEmptyFunction("f3"); 1793 OptimizeEmptyFunction("f3");
1793 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1794 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1794 OptimizeEmptyFunction("f4"); 1795 OptimizeEmptyFunction("f4");
1795 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1796 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1796 OptimizeEmptyFunction("f5"); 1797 OptimizeEmptyFunction("f5");
1797 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1798 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1798 1799
1799 // Remove function f1, and 1800 // Remove function f1, and
1800 CompileRun("f1=null"); 1801 CompileRun("f1=null");
1801 1802
1802 // Scavenge treats these references as strong. 1803 // Scavenge treats these references as strong.
1803 for (int j = 0; j < 10; j++) { 1804 for (int j = 0; j < 10; j++) {
1804 CcTest::heap()->CollectGarbage(NEW_SPACE); 1805 CcTest::CollectGarbage(NEW_SPACE);
1805 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1806 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1806 } 1807 }
1807 1808
1808 // Mark compact handles the weak references. 1809 // Mark compact handles the weak references.
1809 isolate->compilation_cache()->Clear(); 1810 isolate->compilation_cache()->Clear();
1810 heap->CollectAllGarbage(); 1811 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1811 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1812 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1812 1813
1813 // Get rid of f3 and f5 in the same way. 1814 // Get rid of f3 and f5 in the same way.
1814 CompileRun("f3=null"); 1815 CompileRun("f3=null");
1815 for (int j = 0; j < 10; j++) { 1816 for (int j = 0; j < 10; j++) {
1816 CcTest::heap()->CollectGarbage(NEW_SPACE); 1817 CcTest::CollectGarbage(NEW_SPACE);
1817 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1818 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1818 } 1819 }
1819 CcTest::heap()->CollectAllGarbage(); 1820 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1820 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1821 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1821 CompileRun("f5=null"); 1822 CompileRun("f5=null");
1822 for (int j = 0; j < 10; j++) { 1823 for (int j = 0; j < 10; j++) {
1823 CcTest::heap()->CollectGarbage(NEW_SPACE); 1824 CcTest::CollectGarbage(NEW_SPACE);
1824 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1825 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1825 } 1826 }
1826 CcTest::heap()->CollectAllGarbage(); 1827 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1827 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i])); 1828 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1828 1829
1829 ctx[i]->Exit(); 1830 ctx[i]->Exit();
1830 } 1831 }
1831 1832
1832 // Force compilation cache cleanup. 1833 // Force compilation cache cleanup.
1833 CcTest::heap()->NotifyContextDisposed(true); 1834 CcTest::heap()->NotifyContextDisposed(true);
1834 CcTest::heap()->CollectAllGarbage(); 1835 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1835 1836
1836 // Dispose the native contexts one by one. 1837 // Dispose the native contexts one by one.
1837 for (int i = 0; i < kNumTestContexts; i++) { 1838 for (int i = 0; i < kNumTestContexts; i++) {
1838 // TODO(dcarney): is there a better way to do this? 1839 // TODO(dcarney): is there a better way to do this?
1839 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]); 1840 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1840 *unsafe = CcTest::heap()->undefined_value(); 1841 *unsafe = CcTest::heap()->undefined_value();
1841 ctx[i].Clear(); 1842 ctx[i].Clear();
1842 1843
1843 // Scavenge treats these references as strong. 1844 // Scavenge treats these references as strong.
1844 for (int j = 0; j < 10; j++) { 1845 for (int j = 0; j < 10; j++) {
1845 CcTest::heap()->CollectGarbage(i::NEW_SPACE); 1846 CcTest::CollectGarbage(i::NEW_SPACE);
1846 CHECK_EQ(kNumTestContexts - i, CountNativeContexts()); 1847 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1847 } 1848 }
1848 1849
1849 // Mark compact handles the weak references. 1850 // Mark compact handles the weak references.
1850 CcTest::heap()->CollectAllGarbage(); 1851 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1851 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts()); 1852 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1852 } 1853 }
1853 1854
1854 CHECK_EQ(0, CountNativeContexts()); 1855 CHECK_EQ(0, CountNativeContexts());
1855 } 1856 }
1856 1857
1857 1858
1858 // Count the number of native contexts in the weak list of native contexts 1859 // Count the number of native contexts in the weak list of native contexts
1859 // causing a GC after the specified number of elements. 1860 // causing a GC after the specified number of elements.
1860 static int CountNativeContextsWithGC(Isolate* isolate, int n) { 1861 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1861 Heap* heap = isolate->heap(); 1862 Heap* heap = isolate->heap();
1862 int count = 0; 1863 int count = 0;
1863 Handle<Object> object(heap->native_contexts_list(), isolate); 1864 Handle<Object> object(heap->native_contexts_list(), isolate);
1864 while (!object->IsUndefined(isolate)) { 1865 while (!object->IsUndefined(isolate)) {
1865 count++; 1866 count++;
1866 if (count == n) heap->CollectAllGarbage(); 1867 if (count == n)
1868 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1867 object = 1869 object =
1868 Handle<Object>(Context::cast(*object)->next_context_link(), isolate); 1870 Handle<Object>(Context::cast(*object)->next_context_link(), isolate);
1869 } 1871 }
1870 return count; 1872 return count;
1871 } 1873 }
1872 1874
1873 1875
1874 // Count the number of user functions in the weak list of optimized 1876 // Count the number of user functions in the weak list of optimized
1875 // functions attached to a native context causing a GC after the 1877 // functions attached to a native context causing a GC after the
1876 // specified number of elements. 1878 // specified number of elements.
1877 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context, 1879 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1878 int n) { 1880 int n) {
1879 int count = 0; 1881 int count = 0;
1880 Handle<Context> icontext = v8::Utils::OpenHandle(*context); 1882 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1881 Isolate* isolate = icontext->GetIsolate(); 1883 Isolate* isolate = icontext->GetIsolate();
1882 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1884 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1883 isolate); 1885 isolate);
1884 while (object->IsJSFunction() && 1886 while (object->IsJSFunction() &&
1885 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) { 1887 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1886 count++; 1888 count++;
1887 if (count == n) isolate->heap()->CollectAllGarbage(); 1889 if (count == n)
1890 isolate->heap()->CollectAllGarbage(
1891 i::Heap::kFinalizeIncrementalMarkingMask,
1892 i::GarbageCollectionReason::kTesting);
1888 object = Handle<Object>( 1893 object = Handle<Object>(
1889 Object::cast(JSFunction::cast(*object)->next_function_link()), 1894 Object::cast(JSFunction::cast(*object)->next_function_link()),
1890 isolate); 1895 isolate);
1891 } 1896 }
1892 return count; 1897 return count;
1893 } 1898 }
1894 1899
1895 1900
1896 TEST(TestInternalWeakListsTraverseWithGC) { 1901 TEST(TestInternalWeakListsTraverseWithGC) {
1897 FLAG_always_opt = false; 1902 FLAG_always_opt = false;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1960 "var half_size_reg_exp;" 1965 "var half_size_reg_exp;"
1961 "while (reg_exp_source.length < 20 * 1024) {" 1966 "while (reg_exp_source.length < 20 * 1024) {"
1962 " half_size_reg_exp = reg_exp_source;" 1967 " half_size_reg_exp = reg_exp_source;"
1963 " reg_exp_source = reg_exp_source + reg_exp_source;" 1968 " reg_exp_source = reg_exp_source + reg_exp_source;"
1964 "}" 1969 "}"
1965 // Flatten string. 1970 // Flatten string.
1966 "reg_exp_source.match(/f/);"); 1971 "reg_exp_source.match(/f/);");
1967 1972
1968 // Get initial heap size after several full GCs, which will stabilize 1973 // Get initial heap size after several full GCs, which will stabilize
1969 // the heap size and return with sweeping finished completely. 1974 // the heap size and return with sweeping finished completely.
1970 CcTest::heap()->CollectAllAvailableGarbage("initial cleanup"); 1975 CcTest::CollectAllAvailableGarbage();
1971 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); 1976 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1972 if (collector->sweeping_in_progress()) { 1977 if (collector->sweeping_in_progress()) {
1973 collector->EnsureSweepingCompleted(); 1978 collector->EnsureSweepingCompleted();
1974 } 1979 }
1975 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); 1980 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1976 1981
1977 CompileRun("'foo'.match(reg_exp_source);"); 1982 CompileRun("'foo'.match(reg_exp_source);");
1978 CcTest::heap()->CollectAllGarbage(); 1983 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1979 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); 1984 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1980 1985
1981 CompileRun("'foo'.match(half_size_reg_exp);"); 1986 CompileRun("'foo'.match(half_size_reg_exp);");
1982 CcTest::heap()->CollectAllGarbage(); 1987 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1983 int size_with_optimized_regexp = 1988 int size_with_optimized_regexp =
1984 static_cast<int>(CcTest::heap()->SizeOfObjects()); 1989 static_cast<int>(CcTest::heap()->SizeOfObjects());
1985 1990
1986 int size_of_regexp_code = size_with_regexp - initial_size; 1991 int size_of_regexp_code = size_with_regexp - initial_size;
1987 1992
1988 // On some platforms the debug-code flag causes huge amounts of regexp code 1993 // On some platforms the debug-code flag causes huge amounts of regexp code
1989 // to be emitted, breaking this test. 1994 // to be emitted, breaking this test.
1990 if (!FLAG_debug_code) { 1995 if (!FLAG_debug_code) {
1991 CHECK_LE(size_of_regexp_code, 1 * MB); 1996 CHECK_LE(size_of_regexp_code, 1 * MB);
1992 } 1997 }
1993 1998
1994 // Small regexp is half the size, but compiles to more than twice the code 1999 // Small regexp is half the size, but compiles to more than twice the code
1995 // due to the optimization steps. 2000 // due to the optimization steps.
1996 CHECK_GE(size_with_optimized_regexp, 2001 CHECK_GE(size_with_optimized_regexp,
1997 size_with_regexp + size_of_regexp_code * 2); 2002 size_with_regexp + size_of_regexp_code * 2);
1998 } 2003 }
1999 2004
2000 2005
2001 HEAP_TEST(TestSizeOfObjects) { 2006 HEAP_TEST(TestSizeOfObjects) {
2002 v8::V8::Initialize(); 2007 v8::V8::Initialize();
2003 Heap* heap = CcTest::heap(); 2008 Heap* heap = CcTest::heap();
2004 MarkCompactCollector* collector = heap->mark_compact_collector(); 2009 MarkCompactCollector* collector = heap->mark_compact_collector();
2005 2010
2006 // Get initial heap size after several full GCs, which will stabilize 2011 // Get initial heap size after several full GCs, which will stabilize
2007 // the heap size and return with sweeping finished completely. 2012 // the heap size and return with sweeping finished completely.
2008 heap->CollectAllAvailableGarbage("initial cleanup"); 2013 CcTest::CollectAllAvailableGarbage();
2009 if (collector->sweeping_in_progress()) { 2014 if (collector->sweeping_in_progress()) {
2010 collector->EnsureSweepingCompleted(); 2015 collector->EnsureSweepingCompleted();
2011 } 2016 }
2012 int initial_size = static_cast<int>(heap->SizeOfObjects()); 2017 int initial_size = static_cast<int>(heap->SizeOfObjects());
2013 2018
2014 { 2019 {
2015 // Allocate objects on several different old-space pages so that 2020 // Allocate objects on several different old-space pages so that
2016 // concurrent sweeper threads will be busy sweeping the old space on 2021 // concurrent sweeper threads will be busy sweeping the old space on
2017 // subsequent GC runs. 2022 // subsequent GC runs.
2018 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); 2023 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2019 int filler_size = static_cast<int>(FixedArray::SizeFor(8192)); 2024 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
2020 for (int i = 1; i <= 100; i++) { 2025 for (int i = 1; i <= 100; i++) {
2021 heap->AllocateFixedArray(8192, TENURED).ToObjectChecked(); 2026 heap->AllocateFixedArray(8192, TENURED).ToObjectChecked();
2022 CHECK_EQ(initial_size + i * filler_size, 2027 CHECK_EQ(initial_size + i * filler_size,
2023 static_cast<int>(heap->SizeOfObjects())); 2028 static_cast<int>(heap->SizeOfObjects()));
2024 } 2029 }
2025 } 2030 }
2026 2031
2027 // The heap size should go back to initial size after a full GC, even 2032 // The heap size should go back to initial size after a full GC, even
2028 // though sweeping didn't finish yet. 2033 // though sweeping didn't finish yet.
2029 heap->CollectAllGarbage(); 2034 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2030 // Normally sweeping would not be complete here, but no guarantees. 2035 // Normally sweeping would not be complete here, but no guarantees.
2031 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); 2036 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
2032 // Waiting for sweeper threads should not change heap size. 2037 // Waiting for sweeper threads should not change heap size.
2033 if (collector->sweeping_in_progress()) { 2038 if (collector->sweeping_in_progress()) {
2034 collector->EnsureSweepingCompleted(); 2039 collector->EnsureSweepingCompleted();
2035 } 2040 }
2036 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); 2041 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
2037 } 2042 }
2038 2043
2039 2044
(...skipping 353 matching lines...) Expand 10 before | Expand all | Expand 10 after
2393 new_capacity = new_space->TotalCapacity(); 2398 new_capacity = new_space->TotalCapacity();
2394 CHECK(old_capacity == new_capacity); 2399 CHECK(old_capacity == new_capacity);
2395 2400
2396 // Explicitly shrinking should not affect space capacity. 2401 // Explicitly shrinking should not affect space capacity.
2397 old_capacity = new_space->TotalCapacity(); 2402 old_capacity = new_space->TotalCapacity();
2398 new_space->Shrink(); 2403 new_space->Shrink();
2399 new_capacity = new_space->TotalCapacity(); 2404 new_capacity = new_space->TotalCapacity();
2400 CHECK(old_capacity == new_capacity); 2405 CHECK(old_capacity == new_capacity);
2401 2406
2402 // Let the scavenger empty the new space. 2407 // Let the scavenger empty the new space.
2403 heap->CollectGarbage(NEW_SPACE); 2408 CcTest::CollectGarbage(NEW_SPACE);
2404 CHECK_LE(new_space->Size(), old_capacity); 2409 CHECK_LE(new_space->Size(), old_capacity);
2405 2410
2406 // Explicitly shrinking should halve the space capacity. 2411 // Explicitly shrinking should halve the space capacity.
2407 old_capacity = new_space->TotalCapacity(); 2412 old_capacity = new_space->TotalCapacity();
2408 new_space->Shrink(); 2413 new_space->Shrink();
2409 new_capacity = new_space->TotalCapacity(); 2414 new_capacity = new_space->TotalCapacity();
2410 CHECK(old_capacity == 2 * new_capacity); 2415 CHECK(old_capacity == 2 * new_capacity);
2411 2416
2412 // Consecutive shrinking should not affect space capacity. 2417 // Consecutive shrinking should not affect space capacity.
2413 old_capacity = new_space->TotalCapacity(); 2418 old_capacity = new_space->TotalCapacity();
(...skipping 13 matching lines...) Expand all
2427 } 2432 }
2428 2433
2429 v8::HandleScope scope(CcTest::isolate()); 2434 v8::HandleScope scope(CcTest::isolate());
2430 NewSpace* new_space = heap->new_space(); 2435 NewSpace* new_space = heap->new_space();
2431 intptr_t old_capacity, new_capacity; 2436 intptr_t old_capacity, new_capacity;
2432 old_capacity = new_space->TotalCapacity(); 2437 old_capacity = new_space->TotalCapacity();
2433 new_space->Grow(); 2438 new_space->Grow();
2434 new_capacity = new_space->TotalCapacity(); 2439 new_capacity = new_space->TotalCapacity();
2435 CHECK(2 * old_capacity == new_capacity); 2440 CHECK(2 * old_capacity == new_capacity);
2436 FillUpNewSpace(new_space); 2441 FillUpNewSpace(new_space);
2437 heap->CollectAllAvailableGarbage(); 2442 CcTest::CollectAllAvailableGarbage();
2438 new_capacity = new_space->TotalCapacity(); 2443 new_capacity = new_space->TotalCapacity();
2439 CHECK(old_capacity == new_capacity); 2444 CHECK(old_capacity == new_capacity);
2440 } 2445 }
2441 2446
2442 2447
2443 static int NumberOfGlobalObjects() { 2448 static int NumberOfGlobalObjects() {
2444 int count = 0; 2449 int count = 0;
2445 HeapIterator iterator(CcTest::heap()); 2450 HeapIterator iterator(CcTest::heap());
2446 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 2451 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2447 if (obj->IsJSGlobalObject()) count++; 2452 if (obj->IsJSGlobalObject()) count++;
(...skipping 10 matching lines...) Expand all
2458 v8::HandleScope outer_scope(isolate); 2463 v8::HandleScope outer_scope(isolate);
2459 v8::Persistent<v8::Context> ctx1p; 2464 v8::Persistent<v8::Context> ctx1p;
2460 v8::Persistent<v8::Context> ctx2p; 2465 v8::Persistent<v8::Context> ctx2p;
2461 { 2466 {
2462 v8::HandleScope scope(isolate); 2467 v8::HandleScope scope(isolate);
2463 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2468 ctx1p.Reset(isolate, v8::Context::New(isolate));
2464 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2469 ctx2p.Reset(isolate, v8::Context::New(isolate));
2465 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2470 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2466 } 2471 }
2467 2472
2468 CcTest::heap()->CollectAllAvailableGarbage(); 2473 CcTest::CollectAllAvailableGarbage();
2469 CHECK_EQ(2, NumberOfGlobalObjects()); 2474 CHECK_EQ(2, NumberOfGlobalObjects());
2470 2475
2471 { 2476 {
2472 v8::HandleScope inner_scope(isolate); 2477 v8::HandleScope inner_scope(isolate);
2473 CompileRun("var v = {x: 42}"); 2478 CompileRun("var v = {x: 42}");
2474 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2479 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2475 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2480 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2476 v8::Local<v8::Value> v = 2481 v8::Local<v8::Value> v =
2477 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2482 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2478 ctx2->Enter(); 2483 ctx2->Enter();
2479 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2484 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2480 v8::Local<v8::Value> res = CompileRun( 2485 v8::Local<v8::Value> res = CompileRun(
2481 "function f() { return o.x; }" 2486 "function f() { return o.x; }"
2482 "for (var i = 0; i < 10; ++i) f();" 2487 "for (var i = 0; i < 10; ++i) f();"
2483 "%OptimizeFunctionOnNextCall(f);" 2488 "%OptimizeFunctionOnNextCall(f);"
2484 "f();"); 2489 "f();");
2485 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2490 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2486 CHECK(ctx2->Global() 2491 CHECK(ctx2->Global()
2487 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2492 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2488 .FromJust()); 2493 .FromJust());
2489 ctx2->Exit(); 2494 ctx2->Exit();
2490 v8::Local<v8::Context>::New(isolate, ctx1)->Exit(); 2495 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2491 ctx1p.Reset(); 2496 ctx1p.Reset();
2492 isolate->ContextDisposedNotification(); 2497 isolate->ContextDisposedNotification();
2493 } 2498 }
2494 CcTest::heap()->CollectAllAvailableGarbage(); 2499 CcTest::CollectAllAvailableGarbage();
2495 CHECK_EQ(1, NumberOfGlobalObjects()); 2500 CHECK_EQ(1, NumberOfGlobalObjects());
2496 ctx2p.Reset(); 2501 ctx2p.Reset();
2497 CcTest::heap()->CollectAllAvailableGarbage(); 2502 CcTest::CollectAllAvailableGarbage();
2498 CHECK_EQ(0, NumberOfGlobalObjects()); 2503 CHECK_EQ(0, NumberOfGlobalObjects());
2499 } 2504 }
2500 2505
2501 2506
2502 // Test that we don't embed functions from foreign contexts into 2507 // Test that we don't embed functions from foreign contexts into
2503 // optimized code. 2508 // optimized code.
2504 TEST(LeakNativeContextViaFunction) { 2509 TEST(LeakNativeContextViaFunction) {
2505 i::FLAG_allow_natives_syntax = true; 2510 i::FLAG_allow_natives_syntax = true;
2506 v8::Isolate* isolate = CcTest::isolate(); 2511 v8::Isolate* isolate = CcTest::isolate();
2507 v8::HandleScope outer_scope(isolate); 2512 v8::HandleScope outer_scope(isolate);
2508 v8::Persistent<v8::Context> ctx1p; 2513 v8::Persistent<v8::Context> ctx1p;
2509 v8::Persistent<v8::Context> ctx2p; 2514 v8::Persistent<v8::Context> ctx2p;
2510 { 2515 {
2511 v8::HandleScope scope(isolate); 2516 v8::HandleScope scope(isolate);
2512 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2517 ctx1p.Reset(isolate, v8::Context::New(isolate));
2513 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2518 ctx2p.Reset(isolate, v8::Context::New(isolate));
2514 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2519 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2515 } 2520 }
2516 2521
2517 CcTest::heap()->CollectAllAvailableGarbage(); 2522 CcTest::CollectAllAvailableGarbage();
2518 CHECK_EQ(2, NumberOfGlobalObjects()); 2523 CHECK_EQ(2, NumberOfGlobalObjects());
2519 2524
2520 { 2525 {
2521 v8::HandleScope inner_scope(isolate); 2526 v8::HandleScope inner_scope(isolate);
2522 CompileRun("var v = function() { return 42; }"); 2527 CompileRun("var v = function() { return 42; }");
2523 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2528 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2524 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2529 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2525 v8::Local<v8::Value> v = 2530 v8::Local<v8::Value> v =
2526 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2531 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2527 ctx2->Enter(); 2532 ctx2->Enter();
2528 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2533 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2529 v8::Local<v8::Value> res = CompileRun( 2534 v8::Local<v8::Value> res = CompileRun(
2530 "function f(x) { return x(); }" 2535 "function f(x) { return x(); }"
2531 "for (var i = 0; i < 10; ++i) f(o);" 2536 "for (var i = 0; i < 10; ++i) f(o);"
2532 "%OptimizeFunctionOnNextCall(f);" 2537 "%OptimizeFunctionOnNextCall(f);"
2533 "f(o);"); 2538 "f(o);");
2534 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2539 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2535 CHECK(ctx2->Global() 2540 CHECK(ctx2->Global()
2536 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2541 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2537 .FromJust()); 2542 .FromJust());
2538 ctx2->Exit(); 2543 ctx2->Exit();
2539 ctx1->Exit(); 2544 ctx1->Exit();
2540 ctx1p.Reset(); 2545 ctx1p.Reset();
2541 isolate->ContextDisposedNotification(); 2546 isolate->ContextDisposedNotification();
2542 } 2547 }
2543 CcTest::heap()->CollectAllAvailableGarbage(); 2548 CcTest::CollectAllAvailableGarbage();
2544 CHECK_EQ(1, NumberOfGlobalObjects()); 2549 CHECK_EQ(1, NumberOfGlobalObjects());
2545 ctx2p.Reset(); 2550 ctx2p.Reset();
2546 CcTest::heap()->CollectAllAvailableGarbage(); 2551 CcTest::CollectAllAvailableGarbage();
2547 CHECK_EQ(0, NumberOfGlobalObjects()); 2552 CHECK_EQ(0, NumberOfGlobalObjects());
2548 } 2553 }
2549 2554
2550 2555
2551 TEST(LeakNativeContextViaMapKeyed) { 2556 TEST(LeakNativeContextViaMapKeyed) {
2552 i::FLAG_allow_natives_syntax = true; 2557 i::FLAG_allow_natives_syntax = true;
2553 v8::Isolate* isolate = CcTest::isolate(); 2558 v8::Isolate* isolate = CcTest::isolate();
2554 v8::HandleScope outer_scope(isolate); 2559 v8::HandleScope outer_scope(isolate);
2555 v8::Persistent<v8::Context> ctx1p; 2560 v8::Persistent<v8::Context> ctx1p;
2556 v8::Persistent<v8::Context> ctx2p; 2561 v8::Persistent<v8::Context> ctx2p;
2557 { 2562 {
2558 v8::HandleScope scope(isolate); 2563 v8::HandleScope scope(isolate);
2559 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2564 ctx1p.Reset(isolate, v8::Context::New(isolate));
2560 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2565 ctx2p.Reset(isolate, v8::Context::New(isolate));
2561 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2566 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2562 } 2567 }
2563 2568
2564 CcTest::heap()->CollectAllAvailableGarbage(); 2569 CcTest::CollectAllAvailableGarbage();
2565 CHECK_EQ(2, NumberOfGlobalObjects()); 2570 CHECK_EQ(2, NumberOfGlobalObjects());
2566 2571
2567 { 2572 {
2568 v8::HandleScope inner_scope(isolate); 2573 v8::HandleScope inner_scope(isolate);
2569 CompileRun("var v = [42, 43]"); 2574 CompileRun("var v = [42, 43]");
2570 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2575 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2571 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2576 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2572 v8::Local<v8::Value> v = 2577 v8::Local<v8::Value> v =
2573 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2578 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2574 ctx2->Enter(); 2579 ctx2->Enter();
2575 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2580 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2576 v8::Local<v8::Value> res = CompileRun( 2581 v8::Local<v8::Value> res = CompileRun(
2577 "function f() { return o[0]; }" 2582 "function f() { return o[0]; }"
2578 "for (var i = 0; i < 10; ++i) f();" 2583 "for (var i = 0; i < 10; ++i) f();"
2579 "%OptimizeFunctionOnNextCall(f);" 2584 "%OptimizeFunctionOnNextCall(f);"
2580 "f();"); 2585 "f();");
2581 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2586 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2582 CHECK(ctx2->Global() 2587 CHECK(ctx2->Global()
2583 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2588 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2584 .FromJust()); 2589 .FromJust());
2585 ctx2->Exit(); 2590 ctx2->Exit();
2586 ctx1->Exit(); 2591 ctx1->Exit();
2587 ctx1p.Reset(); 2592 ctx1p.Reset();
2588 isolate->ContextDisposedNotification(); 2593 isolate->ContextDisposedNotification();
2589 } 2594 }
2590 CcTest::heap()->CollectAllAvailableGarbage(); 2595 CcTest::CollectAllAvailableGarbage();
2591 CHECK_EQ(1, NumberOfGlobalObjects()); 2596 CHECK_EQ(1, NumberOfGlobalObjects());
2592 ctx2p.Reset(); 2597 ctx2p.Reset();
2593 CcTest::heap()->CollectAllAvailableGarbage(); 2598 CcTest::CollectAllAvailableGarbage();
2594 CHECK_EQ(0, NumberOfGlobalObjects()); 2599 CHECK_EQ(0, NumberOfGlobalObjects());
2595 } 2600 }
2596 2601
2597 2602
2598 TEST(LeakNativeContextViaMapProto) { 2603 TEST(LeakNativeContextViaMapProto) {
2599 i::FLAG_allow_natives_syntax = true; 2604 i::FLAG_allow_natives_syntax = true;
2600 v8::Isolate* isolate = CcTest::isolate(); 2605 v8::Isolate* isolate = CcTest::isolate();
2601 v8::HandleScope outer_scope(isolate); 2606 v8::HandleScope outer_scope(isolate);
2602 v8::Persistent<v8::Context> ctx1p; 2607 v8::Persistent<v8::Context> ctx1p;
2603 v8::Persistent<v8::Context> ctx2p; 2608 v8::Persistent<v8::Context> ctx2p;
2604 { 2609 {
2605 v8::HandleScope scope(isolate); 2610 v8::HandleScope scope(isolate);
2606 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2611 ctx1p.Reset(isolate, v8::Context::New(isolate));
2607 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2612 ctx2p.Reset(isolate, v8::Context::New(isolate));
2608 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2613 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2609 } 2614 }
2610 2615
2611 CcTest::heap()->CollectAllAvailableGarbage(); 2616 CcTest::CollectAllAvailableGarbage();
2612 CHECK_EQ(2, NumberOfGlobalObjects()); 2617 CHECK_EQ(2, NumberOfGlobalObjects());
2613 2618
2614 { 2619 {
2615 v8::HandleScope inner_scope(isolate); 2620 v8::HandleScope inner_scope(isolate);
2616 CompileRun("var v = { y: 42}"); 2621 CompileRun("var v = { y: 42}");
2617 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2622 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2618 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2623 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2619 v8::Local<v8::Value> v = 2624 v8::Local<v8::Value> v =
2620 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2625 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2621 ctx2->Enter(); 2626 ctx2->Enter();
2622 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2627 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2623 v8::Local<v8::Value> res = CompileRun( 2628 v8::Local<v8::Value> res = CompileRun(
2624 "function f() {" 2629 "function f() {"
2625 " var p = {x: 42};" 2630 " var p = {x: 42};"
2626 " p.__proto__ = o;" 2631 " p.__proto__ = o;"
2627 " return p.x;" 2632 " return p.x;"
2628 "}" 2633 "}"
2629 "for (var i = 0; i < 10; ++i) f();" 2634 "for (var i = 0; i < 10; ++i) f();"
2630 "%OptimizeFunctionOnNextCall(f);" 2635 "%OptimizeFunctionOnNextCall(f);"
2631 "f();"); 2636 "f();");
2632 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2637 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2633 CHECK(ctx2->Global() 2638 CHECK(ctx2->Global()
2634 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2639 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2635 .FromJust()); 2640 .FromJust());
2636 ctx2->Exit(); 2641 ctx2->Exit();
2637 ctx1->Exit(); 2642 ctx1->Exit();
2638 ctx1p.Reset(); 2643 ctx1p.Reset();
2639 isolate->ContextDisposedNotification(); 2644 isolate->ContextDisposedNotification();
2640 } 2645 }
2641 CcTest::heap()->CollectAllAvailableGarbage(); 2646 CcTest::CollectAllAvailableGarbage();
2642 CHECK_EQ(1, NumberOfGlobalObjects()); 2647 CHECK_EQ(1, NumberOfGlobalObjects());
2643 ctx2p.Reset(); 2648 ctx2p.Reset();
2644 CcTest::heap()->CollectAllAvailableGarbage(); 2649 CcTest::CollectAllAvailableGarbage();
2645 CHECK_EQ(0, NumberOfGlobalObjects()); 2650 CHECK_EQ(0, NumberOfGlobalObjects());
2646 } 2651 }
2647 2652
2648 2653
2649 TEST(InstanceOfStubWriteBarrier) { 2654 TEST(InstanceOfStubWriteBarrier) {
2650 i::FLAG_allow_natives_syntax = true; 2655 i::FLAG_allow_natives_syntax = true;
2651 #ifdef VERIFY_HEAP 2656 #ifdef VERIFY_HEAP
2652 i::FLAG_verify_heap = true; 2657 i::FLAG_verify_heap = true;
2653 #endif 2658 #endif
2654 2659
(...skipping 10 matching lines...) Expand all
2665 "function mkbar () { return new (new Function(\"\")) (); }" 2670 "function mkbar () { return new (new Function(\"\")) (); }"
2666 "function f (x) { return (x instanceof foo); }" 2671 "function f (x) { return (x instanceof foo); }"
2667 "function g () { f(mkbar()); }" 2672 "function g () { f(mkbar()); }"
2668 "f(new foo()); f(new foo());" 2673 "f(new foo()); f(new foo());"
2669 "%OptimizeFunctionOnNextCall(f);" 2674 "%OptimizeFunctionOnNextCall(f);"
2670 "f(new foo()); g();"); 2675 "f(new foo()); g();");
2671 } 2676 }
2672 2677
2673 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2678 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2674 marking->Stop(); 2679 marking->Stop();
2675 CcTest::heap()->StartIncrementalMarking(); 2680 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2681 i::GarbageCollectionReason::kTesting);
2676 2682
2677 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2683 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2678 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2684 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2679 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2685 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2680 2686
2681 CHECK(f->IsOptimized()); 2687 CHECK(f->IsOptimized());
2682 2688
2683 while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) && 2689 while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) &&
2684 !marking->IsStopped()) { 2690 !marking->IsStopped()) {
2685 // Discard any pending GC requests otherwise we will get GC when we enter 2691 // Discard any pending GC requests otherwise we will get GC when we enter
2686 // code below. 2692 // code below.
2687 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 2693 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2688 IncrementalMarking::FORCE_COMPLETION); 2694 IncrementalMarking::FORCE_COMPLETION);
2689 } 2695 }
2690 2696
2691 CHECK(marking->IsMarking()); 2697 CHECK(marking->IsMarking());
2692 2698
2693 { 2699 {
2694 v8::HandleScope scope(CcTest::isolate()); 2700 v8::HandleScope scope(CcTest::isolate());
2695 v8::Local<v8::Object> global = CcTest::global(); 2701 v8::Local<v8::Object> global = CcTest::global();
2696 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast( 2702 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2697 global->Get(ctx, v8_str("g")).ToLocalChecked()); 2703 global->Get(ctx, v8_str("g")).ToLocalChecked());
2698 g->Call(ctx, global, 0, nullptr).ToLocalChecked(); 2704 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2699 } 2705 }
2700 2706
2701 CcTest::heap()->incremental_marking()->set_should_hurry(true); 2707 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2702 CcTest::heap()->CollectGarbage(OLD_SPACE); 2708 CcTest::CollectGarbage(OLD_SPACE);
2703 } 2709 }
2704 2710
2705 namespace { 2711 namespace {
2706 2712
2707 int GetProfilerTicks(SharedFunctionInfo* shared) { 2713 int GetProfilerTicks(SharedFunctionInfo* shared) {
2708 return FLAG_ignition ? shared->profiler_ticks() 2714 return FLAG_ignition ? shared->profiler_ticks()
2709 : shared->code()->profiler_ticks(); 2715 : shared->code()->profiler_ticks();
2710 } 2716 }
2711 2717
2712 } // namespace 2718 } // namespace
(...skipping 25 matching lines...) Expand all
2738 "f();"); 2744 "f();");
2739 } 2745 }
2740 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2746 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2741 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2747 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2742 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2748 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2743 CHECK(f->IsOptimized()); 2749 CHECK(f->IsOptimized());
2744 2750
2745 // Make sure incremental marking it not running. 2751 // Make sure incremental marking it not running.
2746 CcTest::heap()->incremental_marking()->Stop(); 2752 CcTest::heap()->incremental_marking()->Stop();
2747 2753
2748 CcTest::heap()->StartIncrementalMarking(); 2754 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2755 i::GarbageCollectionReason::kTesting);
2749 // The following calls will increment CcTest::heap()->global_ic_age(). 2756 // The following calls will increment CcTest::heap()->global_ic_age().
2750 CcTest::isolate()->ContextDisposedNotification(); 2757 CcTest::isolate()->ContextDisposedNotification();
2751 heap::SimulateIncrementalMarking(CcTest::heap()); 2758 heap::SimulateIncrementalMarking(CcTest::heap());
2752 CcTest::heap()->CollectAllGarbage(); 2759 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2753 2760
2754 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); 2761 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2755 CHECK_EQ(0, f->shared()->opt_count()); 2762 CHECK_EQ(0, f->shared()->opt_count());
2756 CHECK_EQ(0, GetProfilerTicks(f->shared())); 2763 CHECK_EQ(0, GetProfilerTicks(f->shared()));
2757 } 2764 }
2758 2765
2759 2766
2760 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) { 2767 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2761 i::FLAG_stress_compaction = false; 2768 i::FLAG_stress_compaction = false;
2762 i::FLAG_allow_natives_syntax = true; 2769 i::FLAG_allow_natives_syntax = true;
(...skipping 23 matching lines...) Expand all
2786 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2793 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2787 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2794 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2788 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2795 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2789 CHECK(f->IsOptimized()); 2796 CHECK(f->IsOptimized());
2790 2797
2791 // Make sure incremental marking it not running. 2798 // Make sure incremental marking it not running.
2792 CcTest::heap()->incremental_marking()->Stop(); 2799 CcTest::heap()->incremental_marking()->Stop();
2793 2800
2794 // The following two calls will increment CcTest::heap()->global_ic_age(). 2801 // The following two calls will increment CcTest::heap()->global_ic_age().
2795 CcTest::isolate()->ContextDisposedNotification(); 2802 CcTest::isolate()->ContextDisposedNotification();
2796 CcTest::heap()->CollectAllGarbage(); 2803 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2797 2804
2798 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); 2805 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2799 CHECK_EQ(0, f->shared()->opt_count()); 2806 CHECK_EQ(0, f->shared()->opt_count());
2800 CHECK_EQ(0, GetProfilerTicks(f->shared())); 2807 CHECK_EQ(0, GetProfilerTicks(f->shared()));
2801 } 2808 }
2802 2809
2803 2810
2804 HEAP_TEST(GCFlags) { 2811 HEAP_TEST(GCFlags) {
2805 CcTest::InitializeVM(); 2812 CcTest::InitializeVM();
2806 Heap* heap = CcTest::heap(); 2813 Heap* heap = CcTest::heap();
2807 2814
2808 heap->set_current_gc_flags(Heap::kNoGCFlags); 2815 heap->set_current_gc_flags(Heap::kNoGCFlags);
2809 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2816 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2810 2817
2811 // Set the flags to check whether we appropriately resets them after the GC. 2818 // Set the flags to check whether we appropriately resets them after the GC.
2812 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); 2819 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2813 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); 2820 CcTest::CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2814 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2821 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2815 2822
2816 MarkCompactCollector* collector = heap->mark_compact_collector(); 2823 MarkCompactCollector* collector = heap->mark_compact_collector();
2817 if (collector->sweeping_in_progress()) { 2824 if (collector->sweeping_in_progress()) {
2818 collector->EnsureSweepingCompleted(); 2825 collector->EnsureSweepingCompleted();
2819 } 2826 }
2820 2827
2821 IncrementalMarking* marking = heap->incremental_marking(); 2828 IncrementalMarking* marking = heap->incremental_marking();
2822 marking->Stop(); 2829 marking->Stop();
2823 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); 2830 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2831 i::GarbageCollectionReason::kTesting);
2824 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2832 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2825 2833
2826 heap->CollectGarbage(NEW_SPACE); 2834 CcTest::CollectGarbage(NEW_SPACE);
2827 // NewSpace scavenges should not overwrite the flags. 2835 // NewSpace scavenges should not overwrite the flags.
2828 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2836 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2829 2837
2830 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 2838 CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2831 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2839 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2832 } 2840 }
2833 2841
2834 2842
2835 TEST(IdleNotificationFinishMarking) { 2843 TEST(IdleNotificationFinishMarking) {
2836 i::FLAG_allow_natives_syntax = true; 2844 i::FLAG_allow_natives_syntax = true;
2837 CcTest::InitializeVM(); 2845 CcTest::InitializeVM();
2838 const int initial_gc_count = CcTest::heap()->gc_count(); 2846 const int initial_gc_count = CcTest::heap()->gc_count();
2839 heap::SimulateFullSpace(CcTest::heap()->old_space()); 2847 heap::SimulateFullSpace(CcTest::heap()->old_space());
2840 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2848 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2841 marking->Stop(); 2849 marking->Stop();
2842 CcTest::heap()->StartIncrementalMarking(); 2850 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2851 i::GarbageCollectionReason::kTesting);
2843 2852
2844 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count); 2853 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2845 2854
2846 // TODO(hpayer): We cannot write proper unit test right now for heap. 2855 // TODO(hpayer): We cannot write proper unit test right now for heap.
2847 // The ideal test would call kMaxIdleMarkingDelayCounter to test the 2856 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2848 // marking delay counter. 2857 // marking delay counter.
2849 2858
2850 // Perform a huge incremental marking step but don't complete marking. 2859 // Perform a huge incremental marking step but don't complete marking.
2851 do { 2860 do {
2852 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 2861 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
3354 i::Handle<JSReceiver> root = 3363 i::Handle<JSReceiver> root =
3355 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3364 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3356 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked())); 3365 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3357 3366
3358 // Count number of live transitions before marking. 3367 // Count number of live transitions before marking.
3359 int transitions_before = CountMapTransitions(root->map()); 3368 int transitions_before = CountMapTransitions(root->map());
3360 CompileRun("%DebugPrint(root);"); 3369 CompileRun("%DebugPrint(root);");
3361 CHECK_EQ(transitions_count, transitions_before); 3370 CHECK_EQ(transitions_count, transitions_before);
3362 3371
3363 heap::SimulateIncrementalMarking(CcTest::heap()); 3372 heap::SimulateIncrementalMarking(CcTest::heap());
3364 CcTest::heap()->CollectAllGarbage(); 3373 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3365 3374
3366 // Count number of live transitions after marking. Note that one transition 3375 // Count number of live transitions after marking. Note that one transition
3367 // is left, because 'o' still holds an instance of one transition target. 3376 // is left, because 'o' still holds an instance of one transition target.
3368 int transitions_after = CountMapTransitions(root->map()); 3377 int transitions_after = CountMapTransitions(root->map());
3369 CompileRun("%DebugPrint(root);"); 3378 CompileRun("%DebugPrint(root);");
3370 CHECK_EQ(1, transitions_after); 3379 CHECK_EQ(1, transitions_after);
3371 } 3380 }
3372 3381
3373 3382
3374 #ifdef DEBUG 3383 #ifdef DEBUG
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
3418 3427
3419 // Count number of live transitions before marking. 3428 // Count number of live transitions before marking.
3420 int transitions_before = CountMapTransitions(root->map()); 3429 int transitions_before = CountMapTransitions(root->map());
3421 CHECK_EQ(transitions_count, transitions_before); 3430 CHECK_EQ(transitions_count, transitions_before);
3422 3431
3423 // Get rid of o 3432 // Get rid of o
3424 CompileRun("o = new F;" 3433 CompileRun("o = new F;"
3425 "root = new F"); 3434 "root = new F");
3426 root = GetByName("root"); 3435 root = GetByName("root");
3427 AddPropertyTo(2, root, "funny"); 3436 AddPropertyTo(2, root, "funny");
3428 CcTest::heap()->CollectGarbage(NEW_SPACE); 3437 CcTest::CollectGarbage(NEW_SPACE);
3429 3438
3430 // Count number of live transitions after marking. Note that one transition 3439 // Count number of live transitions after marking. Note that one transition
3431 // is left, because 'o' still holds an instance of one transition target. 3440 // is left, because 'o' still holds an instance of one transition target.
3432 int transitions_after = CountMapTransitions( 3441 int transitions_after = CountMapTransitions(
3433 Map::cast(root->map()->GetBackPointer())); 3442 Map::cast(root->map()->GetBackPointer()));
3434 CHECK_EQ(1, transitions_after); 3443 CHECK_EQ(1, transitions_after);
3435 } 3444 }
3436 3445
3437 3446
3438 TEST(TransitionArrayShrinksDuringAllocToOne) { 3447 TEST(TransitionArrayShrinksDuringAllocToOne) {
3439 i::FLAG_stress_compaction = false; 3448 i::FLAG_stress_compaction = false;
3440 i::FLAG_allow_natives_syntax = true; 3449 i::FLAG_allow_natives_syntax = true;
3441 CcTest::InitializeVM(); 3450 CcTest::InitializeVM();
3442 v8::HandleScope scope(CcTest::isolate()); 3451 v8::HandleScope scope(CcTest::isolate());
3443 static const int transitions_count = 10; 3452 static const int transitions_count = 10;
3444 CompileRun("function F() {}"); 3453 CompileRun("function F() {}");
3445 AddTransitions(transitions_count); 3454 AddTransitions(transitions_count);
3446 CompileRun("var root = new F;"); 3455 CompileRun("var root = new F;");
3447 Handle<JSObject> root = GetByName("root"); 3456 Handle<JSObject> root = GetByName("root");
3448 3457
3449 // Count number of live transitions before marking. 3458 // Count number of live transitions before marking.
3450 int transitions_before = CountMapTransitions(root->map()); 3459 int transitions_before = CountMapTransitions(root->map());
3451 CHECK_EQ(transitions_count, transitions_before); 3460 CHECK_EQ(transitions_count, transitions_before);
3452 3461
3453 root = GetByName("root"); 3462 root = GetByName("root");
3454 AddPropertyTo(2, root, "funny"); 3463 AddPropertyTo(2, root, "funny");
3455 CcTest::heap()->CollectGarbage(NEW_SPACE); 3464 CcTest::CollectGarbage(NEW_SPACE);
3456 3465
3457 // Count number of live transitions after marking. Note that one transition 3466 // Count number of live transitions after marking. Note that one transition
3458 // is left, because 'o' still holds an instance of one transition target. 3467 // is left, because 'o' still holds an instance of one transition target.
3459 int transitions_after = CountMapTransitions( 3468 int transitions_after = CountMapTransitions(
3460 Map::cast(root->map()->GetBackPointer())); 3469 Map::cast(root->map()->GetBackPointer()));
3461 CHECK_EQ(2, transitions_after); 3470 CHECK_EQ(2, transitions_after);
3462 } 3471 }
3463 3472
3464 3473
3465 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) { 3474 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3466 i::FLAG_stress_compaction = false; 3475 i::FLAG_stress_compaction = false;
3467 i::FLAG_allow_natives_syntax = true; 3476 i::FLAG_allow_natives_syntax = true;
3468 CcTest::InitializeVM(); 3477 CcTest::InitializeVM();
3469 v8::HandleScope scope(CcTest::isolate()); 3478 v8::HandleScope scope(CcTest::isolate());
3470 static const int transitions_count = 10; 3479 static const int transitions_count = 10;
3471 CompileRun("function F() {}"); 3480 CompileRun("function F() {}");
3472 AddTransitions(transitions_count); 3481 AddTransitions(transitions_count);
3473 CompileRun("var root = new F;"); 3482 CompileRun("var root = new F;");
3474 Handle<JSObject> root = GetByName("root"); 3483 Handle<JSObject> root = GetByName("root");
3475 3484
3476 // Count number of live transitions before marking. 3485 // Count number of live transitions before marking.
3477 int transitions_before = CountMapTransitions(root->map()); 3486 int transitions_before = CountMapTransitions(root->map());
3478 CHECK_EQ(transitions_count, transitions_before); 3487 CHECK_EQ(transitions_count, transitions_before);
3479 3488
3480 root = GetByName("root"); 3489 root = GetByName("root");
3481 AddPropertyTo(0, root, "prop9"); 3490 AddPropertyTo(0, root, "prop9");
3482 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE); 3491 CcTest::CollectGarbage(OLD_SPACE);
3483 3492
3484 // Count number of live transitions after marking. Note that one transition 3493 // Count number of live transitions after marking. Note that one transition
3485 // is left, because 'o' still holds an instance of one transition target. 3494 // is left, because 'o' still holds an instance of one transition target.
3486 int transitions_after = CountMapTransitions( 3495 int transitions_after = CountMapTransitions(
3487 Map::cast(root->map()->GetBackPointer())); 3496 Map::cast(root->map()->GetBackPointer()));
3488 CHECK_EQ(1, transitions_after); 3497 CHECK_EQ(1, transitions_after);
3489 } 3498 }
3490 3499
3491 3500
3492 TEST(TransitionArraySimpleToFull) { 3501 TEST(TransitionArraySimpleToFull) {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3538 CompileRun("function f(o) {" 3547 CompileRun("function f(o) {"
3539 " o.foo = 0;" 3548 " o.foo = 0;"
3540 "}" 3549 "}"
3541 "f(new Object);" 3550 "f(new Object);"
3542 "f(root);"); 3551 "f(root);");
3543 3552
3544 // This bug only triggers with aggressive IC clearing. 3553 // This bug only triggers with aggressive IC clearing.
3545 CcTest::heap()->AgeInlineCaches(); 3554 CcTest::heap()->AgeInlineCaches();
3546 3555
3547 // Explicitly request GC to perform final marking step and sweeping. 3556 // Explicitly request GC to perform final marking step and sweeping.
3548 CcTest::heap()->CollectAllGarbage(); 3557 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3549 3558
3550 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3559 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3551 CcTest::global() 3560 CcTest::global()
3552 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root")) 3561 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3553 .ToLocalChecked())); 3562 .ToLocalChecked()));
3554 3563
3555 // The root object should be in a sane state. 3564 // The root object should be in a sane state.
3556 CHECK(root->IsJSObject()); 3565 CHECK(root->IsJSObject());
3557 CHECK(root->map()->IsMap()); 3566 CHECK(root->map()->IsMap());
3558 } 3567 }
(...skipping 22 matching lines...) Expand all
3581 "f(new Object);" 3590 "f(new Object);"
3582 "f(new Object);" 3591 "f(new Object);"
3583 "%OptimizeFunctionOnNextCall(f);" 3592 "%OptimizeFunctionOnNextCall(f);"
3584 "f(root);" 3593 "f(root);"
3585 "%DeoptimizeFunction(f);"); 3594 "%DeoptimizeFunction(f);");
3586 3595
3587 // This bug only triggers with aggressive IC clearing. 3596 // This bug only triggers with aggressive IC clearing.
3588 CcTest::heap()->AgeInlineCaches(); 3597 CcTest::heap()->AgeInlineCaches();
3589 3598
3590 // Explicitly request GC to perform final marking step and sweeping. 3599 // Explicitly request GC to perform final marking step and sweeping.
3591 CcTest::heap()->CollectAllGarbage(); 3600 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3592 3601
3593 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3602 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3594 CcTest::global() 3603 CcTest::global()
3595 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root")) 3604 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3596 .ToLocalChecked())); 3605 .ToLocalChecked()));
3597 3606
3598 // The root object should be in a sane state. 3607 // The root object should be in a sane state.
3599 CHECK(root->IsJSObject()); 3608 CHECK(root->IsJSObject());
3600 CHECK(root->map()->IsMap()); 3609 CHECK(root->map()->IsMap());
3601 } 3610 }
(...skipping 30 matching lines...) Expand all
3632 const int overall_page_count = number_of_test_pages + initial_page_count; 3641 const int overall_page_count = number_of_test_pages + initial_page_count;
3633 for (int i = 0; i < number_of_test_pages; i++) { 3642 for (int i = 0; i < number_of_test_pages; i++) {
3634 AlwaysAllocateScope always_allocate(isolate); 3643 AlwaysAllocateScope always_allocate(isolate);
3635 heap::SimulateFullSpace(old_space); 3644 heap::SimulateFullSpace(old_space);
3636 factory->NewFixedArray(1, TENURED); 3645 factory->NewFixedArray(1, TENURED);
3637 } 3646 }
3638 CHECK_EQ(overall_page_count, old_space->CountTotalPages()); 3647 CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3639 3648
3640 // Triggering one GC will cause a lot of garbage to be discovered but 3649 // Triggering one GC will cause a lot of garbage to be discovered but
3641 // even spread across all allocated pages. 3650 // even spread across all allocated pages.
3642 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3651 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3643 "triggered for preparation");
3644 CHECK_GE(overall_page_count, old_space->CountTotalPages()); 3652 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3645 3653
3646 // Triggering subsequent GCs should cause at least half of the pages 3654 // Triggering subsequent GCs should cause at least half of the pages
3647 // to be released to the OS after at most two cycles. 3655 // to be released to the OS after at most two cycles.
3648 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3656 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3649 "triggered by test 1");
3650 CHECK_GE(overall_page_count, old_space->CountTotalPages()); 3657 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3651 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3658 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3652 "triggered by test 2");
3653 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2); 3659 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3654 3660
3655 // Triggering a last-resort GC should cause all pages to be released to the 3661 // Triggering a last-resort GC should cause all pages to be released to the
3656 // OS so that other processes can seize the memory. If we get a failure here 3662 // OS so that other processes can seize the memory. If we get a failure here
3657 // where there are 2 pages left instead of 1, then we should increase the 3663 // where there are 2 pages left instead of 1, then we should increase the
3658 // size of the first page a little in SizeOfFirstPage in spaces.cc. The 3664 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3659 // first page should be small in order to reduce memory used when the VM 3665 // first page should be small in order to reduce memory used when the VM
3660 // boots, but if the 20 small arrays don't fit on the first page then that's 3666 // boots, but if the 20 small arrays don't fit on the first page then that's
3661 // an indication that it is too small. 3667 // an indication that it is too small.
3662 heap->CollectAllAvailableGarbage("triggered really hard"); 3668 CcTest::CollectAllAvailableGarbage();
3663 CHECK_EQ(initial_page_count, old_space->CountTotalPages()); 3669 CHECK_EQ(initial_page_count, old_space->CountTotalPages());
3664 } 3670 }
3665 3671
3666 static int forced_gc_counter = 0; 3672 static int forced_gc_counter = 0;
3667 3673
3668 void MockUseCounterCallback(v8::Isolate* isolate, 3674 void MockUseCounterCallback(v8::Isolate* isolate,
3669 v8::Isolate::UseCounterFeature feature) { 3675 v8::Isolate::UseCounterFeature feature) {
3670 isolate->GetCurrentContext(); 3676 isolate->GetCurrentContext();
3671 if (feature == v8::Isolate::kForcedGC) { 3677 if (feature == v8::Isolate::kForcedGC) {
3672 forced_gc_counter++; 3678 forced_gc_counter++;
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
3737 FeedbackVectorHelper feedback_helper(feedback_vector); 3743 FeedbackVectorHelper feedback_helper(feedback_vector);
3738 3744
3739 int expected_slots = 2; 3745 int expected_slots = 2;
3740 CHECK_EQ(expected_slots, feedback_helper.slot_count()); 3746 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3741 int slot1 = 0; 3747 int slot1 = 0;
3742 int slot2 = 1; 3748 int slot2 = 1;
3743 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell()); 3749 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3744 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell()); 3750 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3745 3751
3746 heap::SimulateIncrementalMarking(CcTest::heap()); 3752 heap::SimulateIncrementalMarking(CcTest::heap());
3747 CcTest::heap()->CollectAllGarbage(); 3753 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3748 3754
3749 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1))) 3755 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3750 ->cleared()); 3756 ->cleared());
3751 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2))) 3757 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3752 ->cleared()); 3758 ->cleared());
3753 } 3759 }
3754 3760
3755 3761
3756 static Code* FindFirstIC(Code* code, Code::Kind kind) { 3762 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3757 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 3763 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3794 "function fun() { this.x = 1; };" 3800 "function fun() { this.x = 1; };"
3795 "function f(o) { return new o(); } f(fun); f(fun);"); 3801 "function f(o) { return new o(); } f(fun); f(fun);");
3796 Handle<JSFunction> f = Handle<JSFunction>::cast( 3802 Handle<JSFunction> f = Handle<JSFunction>::cast(
3797 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3803 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3798 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3804 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3799 3805
3800 Handle<TypeFeedbackVector> vector(f->feedback_vector()); 3806 Handle<TypeFeedbackVector> vector(f->feedback_vector());
3801 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3807 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3802 3808
3803 heap::SimulateIncrementalMarking(CcTest::heap()); 3809 heap::SimulateIncrementalMarking(CcTest::heap());
3804 CcTest::heap()->CollectAllGarbage(); 3810 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3805 3811
3806 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3812 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3807 } 3813 }
3808 3814
3809 TEST(IncrementalMarkingPreservesMonomorphicIC) { 3815 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3810 if (i::FLAG_always_opt) return; 3816 if (i::FLAG_always_opt) return;
3811 CcTest::InitializeVM(); 3817 CcTest::InitializeVM();
3812 v8::HandleScope scope(CcTest::isolate()); 3818 v8::HandleScope scope(CcTest::isolate());
3813 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3819 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3814 // Prepare function f that contains a monomorphic IC for object 3820 // Prepare function f that contains a monomorphic IC for object
3815 // originating from the same native context. 3821 // originating from the same native context.
3816 CompileRun("function fun() { this.x = 1; }; var obj = new fun();" 3822 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3817 "function f(o) { return o.x; } f(obj); f(obj);"); 3823 "function f(o) { return o.x; } f(obj); f(obj);");
3818 Handle<JSFunction> f = Handle<JSFunction>::cast( 3824 Handle<JSFunction> f = Handle<JSFunction>::cast(
3819 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3825 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3820 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3826 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3821 3827
3822 CheckVectorIC(f, 0, MONOMORPHIC); 3828 CheckVectorIC(f, 0, MONOMORPHIC);
3823 3829
3824 heap::SimulateIncrementalMarking(CcTest::heap()); 3830 heap::SimulateIncrementalMarking(CcTest::heap());
3825 CcTest::heap()->CollectAllGarbage(); 3831 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3826 3832
3827 CheckVectorIC(f, 0, MONOMORPHIC); 3833 CheckVectorIC(f, 0, MONOMORPHIC);
3828 } 3834 }
3829 3835
3830 TEST(IncrementalMarkingPreservesPolymorphicIC) { 3836 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3831 if (i::FLAG_always_opt) return; 3837 if (i::FLAG_always_opt) return;
3832 CcTest::InitializeVM(); 3838 CcTest::InitializeVM();
3833 v8::HandleScope scope(CcTest::isolate()); 3839 v8::HandleScope scope(CcTest::isolate());
3834 v8::Local<v8::Value> obj1, obj2; 3840 v8::Local<v8::Value> obj1, obj2;
3835 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3841 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
(...skipping 16 matching lines...) Expand all
3852 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust()); 3858 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3853 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3859 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3854 Handle<JSFunction> f = Handle<JSFunction>::cast( 3860 Handle<JSFunction> f = Handle<JSFunction>::cast(
3855 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3861 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3856 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3862 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3857 3863
3858 CheckVectorIC(f, 0, POLYMORPHIC); 3864 CheckVectorIC(f, 0, POLYMORPHIC);
3859 3865
3860 // Fire context dispose notification. 3866 // Fire context dispose notification.
3861 heap::SimulateIncrementalMarking(CcTest::heap()); 3867 heap::SimulateIncrementalMarking(CcTest::heap());
3862 CcTest::heap()->CollectAllGarbage(); 3868 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3863 3869
3864 CheckVectorIC(f, 0, POLYMORPHIC); 3870 CheckVectorIC(f, 0, POLYMORPHIC);
3865 } 3871 }
3866 3872
3867 TEST(ContextDisposeDoesntClearPolymorphicIC) { 3873 TEST(ContextDisposeDoesntClearPolymorphicIC) {
3868 if (i::FLAG_always_opt) return; 3874 if (i::FLAG_always_opt) return;
3869 CcTest::InitializeVM(); 3875 CcTest::InitializeVM();
3870 v8::HandleScope scope(CcTest::isolate()); 3876 v8::HandleScope scope(CcTest::isolate());
3871 v8::Local<v8::Value> obj1, obj2; 3877 v8::Local<v8::Value> obj1, obj2;
3872 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3878 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
(...skipping 17 matching lines...) Expand all
3890 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3896 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3891 Handle<JSFunction> f = Handle<JSFunction>::cast( 3897 Handle<JSFunction> f = Handle<JSFunction>::cast(
3892 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3898 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3893 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3899 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3894 3900
3895 CheckVectorIC(f, 0, POLYMORPHIC); 3901 CheckVectorIC(f, 0, POLYMORPHIC);
3896 3902
3897 // Fire context dispose notification. 3903 // Fire context dispose notification.
3898 CcTest::isolate()->ContextDisposedNotification(); 3904 CcTest::isolate()->ContextDisposedNotification();
3899 heap::SimulateIncrementalMarking(CcTest::heap()); 3905 heap::SimulateIncrementalMarking(CcTest::heap());
3900 CcTest::heap()->CollectAllGarbage(); 3906 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3901 3907
3902 CheckVectorIC(f, 0, POLYMORPHIC); 3908 CheckVectorIC(f, 0, POLYMORPHIC);
3903 } 3909 }
3904 3910
3905 3911
3906 class SourceResource : public v8::String::ExternalOneByteStringResource { 3912 class SourceResource : public v8::String::ExternalOneByteStringResource {
3907 public: 3913 public:
3908 explicit SourceResource(const char* data) 3914 explicit SourceResource(const char* data)
3909 : data_(data), length_(strlen(data)) { } 3915 : data_(data), length_(strlen(data)) { }
3910 3916
(...skipping 21 matching lines...) Expand all
3932 // to check whether the data is being released since the external string 3938 // to check whether the data is being released since the external string
3933 // resource's callback is fired when the external string is GC'ed. 3939 // resource's callback is fired when the external string is GC'ed.
3934 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); 3940 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3935 v8::HandleScope scope(isolate); 3941 v8::HandleScope scope(isolate);
3936 SourceResource* resource = new SourceResource(i::StrDup(source)); 3942 SourceResource* resource = new SourceResource(i::StrDup(source));
3937 { 3943 {
3938 v8::HandleScope scope(isolate); 3944 v8::HandleScope scope(isolate);
3939 v8::Local<v8::Context> ctx = isolate->GetCurrentContext(); 3945 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3940 v8::Local<v8::String> source_string = 3946 v8::Local<v8::String> source_string =
3941 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked(); 3947 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3942 i_isolate->heap()->CollectAllAvailableGarbage(); 3948 i_isolate->heap()->CollectAllAvailableGarbage(
3949 i::GarbageCollectionReason::kTesting);
3943 v8::Script::Compile(ctx, source_string) 3950 v8::Script::Compile(ctx, source_string)
3944 .ToLocalChecked() 3951 .ToLocalChecked()
3945 ->Run(ctx) 3952 ->Run(ctx)
3946 .ToLocalChecked(); 3953 .ToLocalChecked();
3947 CHECK(!resource->IsDisposed()); 3954 CHECK(!resource->IsDisposed());
3948 } 3955 }
3949 // i_isolate->heap()->CollectAllAvailableGarbage(); 3956 // i_isolate->heap()->CollectAllAvailableGarbage();
3950 CHECK(!resource->IsDisposed()); 3957 CHECK(!resource->IsDisposed());
3951 3958
3952 CompileRun(accessor); 3959 CompileRun(accessor);
3953 i_isolate->heap()->CollectAllAvailableGarbage(); 3960 i_isolate->heap()->CollectAllAvailableGarbage(
3961 i::GarbageCollectionReason::kTesting);
3954 3962
3955 // External source has been released. 3963 // External source has been released.
3956 CHECK(resource->IsDisposed()); 3964 CHECK(resource->IsDisposed());
3957 delete resource; 3965 delete resource;
3958 } 3966 }
3959 3967
3960 3968
3961 UNINITIALIZED_TEST(ReleaseStackTraceData) { 3969 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3962 if (i::FLAG_always_opt) { 3970 if (i::FLAG_always_opt) {
3963 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed. 3971 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
4019 4027
4020 TEST(Regress159140) { 4028 TEST(Regress159140) {
4021 i::FLAG_allow_natives_syntax = true; 4029 i::FLAG_allow_natives_syntax = true;
4022 CcTest::InitializeVM(); 4030 CcTest::InitializeVM();
4023 Isolate* isolate = CcTest::i_isolate(); 4031 Isolate* isolate = CcTest::i_isolate();
4024 LocalContext env; 4032 LocalContext env;
4025 Heap* heap = isolate->heap(); 4033 Heap* heap = isolate->heap();
4026 HandleScope scope(isolate); 4034 HandleScope scope(isolate);
4027 4035
4028 // Perform one initial GC to enable code flushing. 4036 // Perform one initial GC to enable code flushing.
4029 heap->CollectAllGarbage(); 4037 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4030 4038
4031 // Prepare several closures that are all eligible for code flushing 4039 // Prepare several closures that are all eligible for code flushing
4032 // because all reachable ones are not optimized. Make sure that the 4040 // because all reachable ones are not optimized. Make sure that the
4033 // optimized code object is directly reachable through a handle so 4041 // optimized code object is directly reachable through a handle so
4034 // that it is marked black during incremental marking. 4042 // that it is marked black during incremental marking.
4035 Handle<Code> code; 4043 Handle<Code> code;
4036 { 4044 {
4037 HandleScope inner_scope(isolate); 4045 HandleScope inner_scope(isolate);
4038 CompileRun("function h(x) {}" 4046 CompileRun("function h(x) {}"
4039 "function mkClosure() {" 4047 "function mkClosure() {"
(...skipping 23 matching lines...) Expand all
4063 } 4071 }
4064 4072
4065 code = inner_scope.CloseAndEscape(Handle<Code>(f->code())); 4073 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4066 } 4074 }
4067 4075
4068 // Simulate incremental marking so that the functions are enqueued as 4076 // Simulate incremental marking so that the functions are enqueued as
4069 // code flushing candidates. Then optimize one function. Finally 4077 // code flushing candidates. Then optimize one function. Finally
4070 // finish the GC to complete code flushing. 4078 // finish the GC to complete code flushing.
4071 heap::SimulateIncrementalMarking(heap); 4079 heap::SimulateIncrementalMarking(heap);
4072 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); 4080 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4073 heap->CollectAllGarbage(); 4081 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4074 4082
4075 // Unoptimized code is missing and the deoptimizer will go ballistic. 4083 // Unoptimized code is missing and the deoptimizer will go ballistic.
4076 CompileRun("g('bozo');"); 4084 CompileRun("g('bozo');");
4077 } 4085 }
4078 4086
4079 4087
4080 TEST(Regress165495) { 4088 TEST(Regress165495) {
4081 i::FLAG_allow_natives_syntax = true; 4089 i::FLAG_allow_natives_syntax = true;
4082 CcTest::InitializeVM(); 4090 CcTest::InitializeVM();
4083 Isolate* isolate = CcTest::i_isolate(); 4091 Isolate* isolate = CcTest::i_isolate();
4084 Heap* heap = isolate->heap(); 4092 Heap* heap = isolate->heap();
4085 HandleScope scope(isolate); 4093 HandleScope scope(isolate);
4086 4094
4087 // Perform one initial GC to enable code flushing. 4095 // Perform one initial GC to enable code flushing.
4088 heap->CollectAllGarbage(); 4096 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4089 4097
4090 // Prepare an optimized closure that the optimized code map will get 4098 // Prepare an optimized closure that the optimized code map will get
4091 // populated. Then age the unoptimized code to trigger code flushing 4099 // populated. Then age the unoptimized code to trigger code flushing
4092 // but make sure the optimized code is unreachable. 4100 // but make sure the optimized code is unreachable.
4093 { 4101 {
4094 HandleScope inner_scope(isolate); 4102 HandleScope inner_scope(isolate);
4095 LocalContext env; 4103 LocalContext env;
4096 CompileRun("function mkClosure() {" 4104 CompileRun("function mkClosure() {"
4097 " return function(x) { return x + 1; };" 4105 " return function(x) { return x + 1; };"
4098 "}" 4106 "}"
4099 "var f = mkClosure();" 4107 "var f = mkClosure();"
4100 "f(1); f(2);" 4108 "f(1); f(2);"
4101 "%OptimizeFunctionOnNextCall(f); f(3);"); 4109 "%OptimizeFunctionOnNextCall(f); f(3);");
4102 4110
4103 Handle<JSFunction> f = Handle<JSFunction>::cast( 4111 Handle<JSFunction> f = Handle<JSFunction>::cast(
4104 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4112 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4105 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked()))); 4113 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4106 CHECK(f->is_compiled()); 4114 CHECK(f->is_compiled());
4107 const int kAgingThreshold = 6; 4115 const int kAgingThreshold = 6;
4108 for (int i = 0; i < kAgingThreshold; i++) { 4116 for (int i = 0; i < kAgingThreshold; i++) {
4109 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 4117 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4110 } 4118 }
4111 4119
4112 CompileRun("f = null;"); 4120 CompileRun("f = null;");
4113 } 4121 }
4114 4122
4115 // Simulate incremental marking so that unoptimized code is flushed 4123 // Simulate incremental marking so that unoptimized code is flushed
4116 // even though it still is cached in the optimized code map. 4124 // even though it still is cached in the optimized code map.
4117 heap::SimulateIncrementalMarking(heap); 4125 heap::SimulateIncrementalMarking(heap);
4118 heap->CollectAllGarbage(); 4126 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4119 4127
4120 // Make a new closure that will get code installed from the code map. 4128 // Make a new closure that will get code installed from the code map.
4121 // Unoptimized code is missing and the deoptimizer will go ballistic. 4129 // Unoptimized code is missing and the deoptimizer will go ballistic.
4122 CompileRun("var g = mkClosure(); g('bozo');"); 4130 CompileRun("var g = mkClosure(); g('bozo');");
4123 } 4131 }
4124 4132
4125 4133
4126 TEST(Regress169209) { 4134 TEST(Regress169209) {
4127 i::FLAG_stress_compaction = false; 4135 i::FLAG_stress_compaction = false;
4128 i::FLAG_allow_natives_syntax = true; 4136 i::FLAG_allow_natives_syntax = true;
4129 4137
4130 CcTest::InitializeVM(); 4138 CcTest::InitializeVM();
4131 Isolate* isolate = CcTest::i_isolate(); 4139 Isolate* isolate = CcTest::i_isolate();
4132 Heap* heap = isolate->heap(); 4140 Heap* heap = isolate->heap();
4133 HandleScope scope(isolate); 4141 HandleScope scope(isolate);
4134 4142
4135 // Perform one initial GC to enable code flushing. 4143 // Perform one initial GC to enable code flushing.
4136 heap->CollectAllGarbage(); 4144 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4137 4145
4138 // Prepare a shared function info eligible for code flushing for which 4146 // Prepare a shared function info eligible for code flushing for which
4139 // the unoptimized code will be replaced during optimization. 4147 // the unoptimized code will be replaced during optimization.
4140 Handle<SharedFunctionInfo> shared1; 4148 Handle<SharedFunctionInfo> shared1;
4141 { 4149 {
4142 HandleScope inner_scope(isolate); 4150 HandleScope inner_scope(isolate);
4143 LocalContext env; 4151 LocalContext env;
4144 CompileRun("function f() { return 'foobar'; }" 4152 CompileRun("function f() { return 'foobar'; }"
4145 "function g(x) { if (x) f(); }" 4153 "function g(x) { if (x) f(); }"
4146 "f();" 4154 "f();"
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
4183 4191
4184 // Simulate incremental marking and collect code flushing candidates. 4192 // Simulate incremental marking and collect code flushing candidates.
4185 heap::SimulateIncrementalMarking(heap); 4193 heap::SimulateIncrementalMarking(heap);
4186 CHECK(shared1->code()->gc_metadata() != NULL); 4194 CHECK(shared1->code()->gc_metadata() != NULL);
4187 4195
4188 // Optimize function and make sure the unoptimized code is replaced. 4196 // Optimize function and make sure the unoptimized code is replaced.
4189 CompileRun("%OptimizeFunctionOnNextCall(g);" 4197 CompileRun("%OptimizeFunctionOnNextCall(g);"
4190 "g(false);"); 4198 "g(false);");
4191 4199
4192 // Finish garbage collection cycle. 4200 // Finish garbage collection cycle.
4193 heap->CollectAllGarbage(); 4201 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4194 CHECK(shared1->code()->gc_metadata() == NULL); 4202 CHECK(shared1->code()->gc_metadata() == NULL);
4195 } 4203 }
4196 4204
4197 4205
4198 TEST(Regress169928) { 4206 TEST(Regress169928) {
4199 i::FLAG_allow_natives_syntax = true; 4207 i::FLAG_allow_natives_syntax = true;
4200 i::FLAG_crankshaft = false; 4208 i::FLAG_crankshaft = false;
4201 CcTest::InitializeVM(); 4209 CcTest::InitializeVM();
4202 Isolate* isolate = CcTest::i_isolate(); 4210 Isolate* isolate = CcTest::i_isolate();
4203 LocalContext env; 4211 LocalContext env;
(...skipping 20 matching lines...) Expand all
4224 // prepare the heap 4232 // prepare the heap
4225 v8::Local<v8::String> mote_code_string = 4233 v8::Local<v8::String> mote_code_string =
4226 v8_str("fastliteralcase(mote, 2.5);"); 4234 v8_str("fastliteralcase(mote, 2.5);");
4227 4235
4228 v8::Local<v8::String> array_name = v8_str("mote"); 4236 v8::Local<v8::String> array_name = v8_str("mote");
4229 CHECK(CcTest::global() 4237 CHECK(CcTest::global()
4230 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0)) 4238 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4231 .FromJust()); 4239 .FromJust());
4232 4240
4233 // First make sure we flip spaces 4241 // First make sure we flip spaces
4234 CcTest::heap()->CollectGarbage(NEW_SPACE); 4242 CcTest::CollectGarbage(NEW_SPACE);
4235 4243
4236 // Allocate the object. 4244 // Allocate the object.
4237 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); 4245 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4238 array_data->set(0, Smi::FromInt(1)); 4246 array_data->set(0, Smi::FromInt(1));
4239 array_data->set(1, Smi::FromInt(2)); 4247 array_data->set(1, Smi::FromInt(2));
4240 4248
4241 heap::AllocateAllButNBytes( 4249 heap::AllocateAllButNBytes(
4242 CcTest::heap()->new_space(), 4250 CcTest::heap()->new_space(),
4243 JSArray::kSize + AllocationMemento::kSize + kPointerSize); 4251 JSArray::kSize + AllocationMemento::kSize + kPointerSize);
4244 4252
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
4334 TEST(Regress514122) { 4342 TEST(Regress514122) {
4335 i::FLAG_flush_optimized_code_cache = false; 4343 i::FLAG_flush_optimized_code_cache = false;
4336 i::FLAG_allow_natives_syntax = true; 4344 i::FLAG_allow_natives_syntax = true;
4337 CcTest::InitializeVM(); 4345 CcTest::InitializeVM();
4338 Isolate* isolate = CcTest::i_isolate(); 4346 Isolate* isolate = CcTest::i_isolate();
4339 LocalContext env; 4347 LocalContext env;
4340 Heap* heap = isolate->heap(); 4348 Heap* heap = isolate->heap();
4341 HandleScope scope(isolate); 4349 HandleScope scope(isolate);
4342 4350
4343 // Perfrom one initial GC to enable code flushing. 4351 // Perfrom one initial GC to enable code flushing.
4344 CcTest::heap()->CollectAllGarbage(); 4352 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4345 4353
4346 // Prepare function whose optimized code map we can use. 4354 // Prepare function whose optimized code map we can use.
4347 Handle<SharedFunctionInfo> shared; 4355 Handle<SharedFunctionInfo> shared;
4348 { 4356 {
4349 HandleScope inner_scope(isolate); 4357 HandleScope inner_scope(isolate);
4350 CompileRun("function f() { return 1 }" 4358 CompileRun("function f() { return 1 }"
4351 "f(); %OptimizeFunctionOnNextCall(f); f();"); 4359 "f(); %OptimizeFunctionOnNextCall(f); f();");
4352 4360
4353 Handle<JSFunction> f = Handle<JSFunction>::cast( 4361 Handle<JSFunction> f = Handle<JSFunction>::cast(
4354 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4362 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
4412 4420
4413 // Add the code several times to the optimized code map. This will leave old 4421 // Add the code several times to the optimized code map. This will leave old
4414 // copies of the optimized code map unreachable but still marked. 4422 // copies of the optimized code map unreachable but still marked.
4415 for (int i = 3; i < 6; ++i) { 4423 for (int i = 3; i < 6; ++i) {
4416 HandleScope inner_scope(isolate); 4424 HandleScope inner_scope(isolate);
4417 BailoutId id = BailoutId(i); 4425 BailoutId id = BailoutId(i);
4418 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); 4426 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4419 } 4427 }
4420 4428
4421 // Trigger a GC to flush out the bug. 4429 // Trigger a GC to flush out the bug.
4422 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole"); 4430 CcTest::CollectGarbage(i::OLD_SPACE);
4423 boomer->Print(); 4431 boomer->Print();
4424 } 4432 }
4425 4433
4426 4434
4427 TEST(OptimizedCodeMapReuseEntries) { 4435 TEST(OptimizedCodeMapReuseEntries) {
4428 i::FLAG_flush_optimized_code_cache = false; 4436 i::FLAG_flush_optimized_code_cache = false;
4429 i::FLAG_allow_natives_syntax = true; 4437 i::FLAG_allow_natives_syntax = true;
4430 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't 4438 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4431 // run this test. 4439 // run this test.
4432 if (i::FLAG_turbo) return; 4440 if (i::FLAG_turbo) return;
4433 CcTest::InitializeVM(); 4441 CcTest::InitializeVM();
4434 v8::Isolate* v8_isolate = CcTest::isolate(); 4442 v8::Isolate* v8_isolate = CcTest::isolate();
4435 Isolate* isolate = CcTest::i_isolate(); 4443 Isolate* isolate = CcTest::i_isolate();
4436 Heap* heap = isolate->heap();
4437 HandleScope scope(isolate); 4444 HandleScope scope(isolate);
4438 4445
4439 // Create 3 contexts, allow the 2nd one to be disposed, and verify that 4446 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4440 // a 4th context will re-use the weak slots in the optimized code map 4447 // a 4th context will re-use the weak slots in the optimized code map
4441 // to hold data, rather than expanding the map. 4448 // to hold data, rather than expanding the map.
4442 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate); 4449 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4443 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }"; 4450 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4444 v8::ScriptCompiler::Source script_source( 4451 v8::ScriptCompiler::Source script_source(
4445 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal) 4452 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4446 .ToLocalChecked()); 4453 .ToLocalChecked());
4447 v8::Local<v8::UnboundScript> indep = 4454 v8::Local<v8::UnboundScript> indep =
4448 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source) 4455 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4449 .ToLocalChecked(); 4456 .ToLocalChecked();
4450 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);"; 4457 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4451 // Perfrom one initial GC to enable code flushing. 4458 // Perfrom one initial GC to enable code flushing.
4452 heap->CollectAllGarbage(); 4459 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4453 4460
4454 c1->Enter(); 4461 c1->Enter();
4455 indep->BindToCurrentContext()->Run(c1).ToLocalChecked(); 4462 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4456 CompileRun(toplevel); 4463 CompileRun(toplevel);
4457 4464
4458 Handle<SharedFunctionInfo> shared; 4465 Handle<SharedFunctionInfo> shared;
4459 Handle<JSFunction> foo = Handle<JSFunction>::cast( 4466 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4460 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4467 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4461 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked()))); 4468 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4462 CHECK(foo->shared()->is_compiled()); 4469 CHECK(foo->shared()->is_compiled());
(...skipping 13 matching lines...) Expand all
4476 HandleScope scope(isolate); 4483 HandleScope scope(isolate);
4477 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate); 4484 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4478 c3->Enter(); 4485 c3->Enter();
4479 indep->BindToCurrentContext()->Run(c3).ToLocalChecked(); 4486 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4480 CompileRun(toplevel); 4487 CompileRun(toplevel);
4481 c3->Exit(); 4488 c3->Exit();
4482 4489
4483 // Now, collect garbage. Context c2 should have no roots to it, and it's 4490 // Now, collect garbage. Context c2 should have no roots to it, and it's
4484 // entry in the optimized code map should be free for a new context. 4491 // entry in the optimized code map should be free for a new context.
4485 for (int i = 0; i < 4; i++) { 4492 for (int i = 0; i < 4; i++) {
4486 heap->CollectAllGarbage(); 4493 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4487 } 4494 }
4488 4495
4489 Handle<FixedArray> optimized_code_map = 4496 Handle<FixedArray> optimized_code_map =
4490 handle(shared->optimized_code_map()); 4497 handle(shared->optimized_code_map());
4491 // There should be 3 entries in the map. 4498 // There should be 3 entries in the map.
4492 CHECK_EQ( 4499 CHECK_EQ(
4493 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) / 4500 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4494 SharedFunctionInfo::kEntryLength)); 4501 SharedFunctionInfo::kEntryLength));
4495 // But one of them (formerly for c2) should be cleared. 4502 // But one of them (formerly for c2) should be cleared.
4496 int cleared_count = 0; 4503 int cleared_count = 0;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4531 CHECK_EQ(0, cleared_count); 4538 CHECK_EQ(0, cleared_count);
4532 } 4539 }
4533 } 4540 }
4534 4541
4535 4542
4536 TEST(Regress513496) { 4543 TEST(Regress513496) {
4537 i::FLAG_flush_optimized_code_cache = false; 4544 i::FLAG_flush_optimized_code_cache = false;
4538 i::FLAG_allow_natives_syntax = true; 4545 i::FLAG_allow_natives_syntax = true;
4539 CcTest::InitializeVM(); 4546 CcTest::InitializeVM();
4540 Isolate* isolate = CcTest::i_isolate(); 4547 Isolate* isolate = CcTest::i_isolate();
4541 Heap* heap = isolate->heap();
4542 HandleScope scope(isolate); 4548 HandleScope scope(isolate);
4543 4549
4544 // Perfrom one initial GC to enable code flushing. 4550 // Perfrom one initial GC to enable code flushing.
4545 CcTest::heap()->CollectAllGarbage(); 4551 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4546 4552
4547 // Prepare an optimized closure with containing an inlined function. Then age 4553 // Prepare an optimized closure with containing an inlined function. Then age
4548 // the inlined unoptimized code to trigger code flushing but make sure the 4554 // the inlined unoptimized code to trigger code flushing but make sure the
4549 // outer optimized code is kept in the optimized code map. 4555 // outer optimized code is kept in the optimized code map.
4550 Handle<SharedFunctionInfo> shared; 4556 Handle<SharedFunctionInfo> shared;
4551 { 4557 {
4552 LocalContext context; 4558 LocalContext context;
4553 HandleScope inner_scope(isolate); 4559 HandleScope inner_scope(isolate);
4554 CompileRun( 4560 CompileRun(
4555 "function g(x) { return x + 1 }" 4561 "function g(x) { return x + 1 }"
(...skipping 23 matching lines...) Expand all
4579 CompileRun("f = null"); 4585 CompileRun("f = null");
4580 } 4586 }
4581 4587
4582 // Lookup the optimized code and keep it alive. 4588 // Lookup the optimized code and keep it alive.
4583 CodeAndLiterals result = shared->SearchOptimizedCodeMap( 4589 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4584 isolate->context()->native_context(), BailoutId::None()); 4590 isolate->context()->native_context(), BailoutId::None());
4585 Handle<Code> optimized_code(result.code, isolate); 4591 Handle<Code> optimized_code(result.code, isolate);
4586 4592
4587 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even 4593 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4588 // though the optimized code for 'f' is reachable via the optimized code map. 4594 // though the optimized code for 'f' is reachable via the optimized code map.
4589 heap->CollectAllGarbage(); 4595 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4590 4596
4591 // Make a new closure that will get code installed from the code map. 4597 // Make a new closure that will get code installed from the code map.
4592 // Unoptimized code is missing and the deoptimizer will go ballistic. 4598 // Unoptimized code is missing and the deoptimizer will go ballistic.
4593 CompileRun("var h = mkClosure(); h('bozo');"); 4599 CompileRun("var h = mkClosure(); h('bozo');");
4594 } 4600 }
4595 4601
4596 4602
4597 TEST(LargeObjectSlotRecording) { 4603 TEST(LargeObjectSlotRecording) {
4598 FLAG_manual_evacuation_candidates_selection = true; 4604 FLAG_manual_evacuation_candidates_selection = true;
4599 CcTest::InitializeVM(); 4605 CcTest::InitializeVM();
(...skipping 22 matching lines...) Expand all
4622 4628
4623 // Create references from the large object to the object on the evacuation 4629 // Create references from the large object to the object on the evacuation
4624 // candidate. 4630 // candidate.
4625 const int kStep = size / 10; 4631 const int kStep = size / 10;
4626 for (int i = 0; i < size; i += kStep) { 4632 for (int i = 0; i < size; i += kStep) {
4627 lo->set(i, *lit); 4633 lo->set(i, *lit);
4628 CHECK(lo->get(i) == old_location); 4634 CHECK(lo->get(i) == old_location);
4629 } 4635 }
4630 4636
4631 // Move the evaucation candidate object. 4637 // Move the evaucation candidate object.
4632 CcTest::heap()->CollectAllGarbage(); 4638 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4633 4639
4634 // Verify that the pointers in the large object got updated. 4640 // Verify that the pointers in the large object got updated.
4635 for (int i = 0; i < size; i += kStep) { 4641 for (int i = 0; i < size; i += kStep) {
4636 CHECK_EQ(lo->get(i), *lit); 4642 CHECK_EQ(lo->get(i), *lit);
4637 CHECK(lo->get(i) != old_location); 4643 CHECK(lo->get(i) != old_location);
4638 } 4644 }
4639 } 4645 }
4640 4646
4641 4647
4642 class DummyVisitor : public ObjectVisitor { 4648 class DummyVisitor : public ObjectVisitor {
(...skipping 26 matching lines...) Expand all
4669 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) { 4675 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4670 CcTest::InitializeVM(); 4676 CcTest::InitializeVM();
4671 v8::HandleScope scope(CcTest::isolate()); 4677 v8::HandleScope scope(CcTest::isolate());
4672 CompileRun("function f(n) {" 4678 CompileRun("function f(n) {"
4673 " var a = new Array(n);" 4679 " var a = new Array(n);"
4674 " for (var i = 0; i < n; i += 100) a[i] = i;" 4680 " for (var i = 0; i < n; i += 100) a[i] = i;"
4675 "};" 4681 "};"
4676 "f(10 * 1024 * 1024);"); 4682 "f(10 * 1024 * 1024);");
4677 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 4683 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4678 if (marking->IsStopped()) { 4684 if (marking->IsStopped()) {
4679 CcTest::heap()->StartIncrementalMarking(); 4685 CcTest::heap()->StartIncrementalMarking(
4686 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4680 } 4687 }
4681 // This big step should be sufficient to mark the whole array. 4688 // This big step should be sufficient to mark the whole array.
4682 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 4689 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4683 IncrementalMarking::FORCE_COMPLETION); 4690 IncrementalMarking::FORCE_COMPLETION);
4684 CHECK(marking->IsComplete() || 4691 CHECK(marking->IsComplete() ||
4685 marking->IsReadyToOverApproximateWeakClosure()); 4692 marking->IsReadyToOverApproximateWeakClosure());
4686 } 4693 }
4687 4694
4688 4695
4689 TEST(DisableInlineAllocation) { 4696 TEST(DisableInlineAllocation) {
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
4780 } 4787 }
4781 4788
4782 // TurboFan respects pretenuring feedback from allocation sites, Crankshaft 4789 // TurboFan respects pretenuring feedback from allocation sites, Crankshaft
4783 // does not. Either is fine for the purposes of this test. 4790 // does not. Either is fine for the purposes of this test.
4784 CHECK(dependency_group_count == 1 || dependency_group_count == 2); 4791 CHECK(dependency_group_count == 1 || dependency_group_count == 2);
4785 } 4792 }
4786 4793
4787 // Now make sure that a gc should get rid of the function, even though we 4794 // Now make sure that a gc should get rid of the function, even though we
4788 // still have the allocation site alive. 4795 // still have the allocation site alive.
4789 for (int i = 0; i < 4; i++) { 4796 for (int i = 0; i < 4; i++) {
4790 heap->CollectAllGarbage(); 4797 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4791 } 4798 }
4792 4799
4793 // The site still exists because of our global handle, but the code is no 4800 // The site still exists because of our global handle, but the code is no
4794 // longer referred to by dependent_code(). 4801 // longer referred to by dependent_code().
4795 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() && 4802 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4796 WeakCell::cast(site->dependent_code()->object_at(0))->cleared()); 4803 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4797 } 4804 }
4798 4805
4799 4806
4800 TEST(CellsInOptimizedCodeAreWeak) { 4807 TEST(CellsInOptimizedCodeAreWeak) {
(...skipping 27 matching lines...) Expand all
4828 4835
4829 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4836 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4830 *v8::Local<v8::Function>::Cast(CcTest::global() 4837 *v8::Local<v8::Function>::Cast(CcTest::global()
4831 ->Get(context.local(), v8_str("bar")) 4838 ->Get(context.local(), v8_str("bar"))
4832 .ToLocalChecked()))); 4839 .ToLocalChecked())));
4833 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4840 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4834 } 4841 }
4835 4842
4836 // Now make sure that a gc should get rid of the function 4843 // Now make sure that a gc should get rid of the function
4837 for (int i = 0; i < 4; i++) { 4844 for (int i = 0; i < 4; i++) {
4838 heap->CollectAllGarbage(); 4845 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4839 } 4846 }
4840 4847
4841 CHECK(code->marked_for_deoptimization()); 4848 CHECK(code->marked_for_deoptimization());
4842 } 4849 }
4843 4850
4844 4851
4845 TEST(ObjectsInOptimizedCodeAreWeak) { 4852 TEST(ObjectsInOptimizedCodeAreWeak) {
4846 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return; 4853 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4847 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4854 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4848 i::FLAG_allow_natives_syntax = true; 4855 i::FLAG_allow_natives_syntax = true;
(...skipping 22 matching lines...) Expand all
4871 4878
4872 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4879 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4873 *v8::Local<v8::Function>::Cast(CcTest::global() 4880 *v8::Local<v8::Function>::Cast(CcTest::global()
4874 ->Get(context.local(), v8_str("bar")) 4881 ->Get(context.local(), v8_str("bar"))
4875 .ToLocalChecked()))); 4882 .ToLocalChecked())));
4876 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4883 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4877 } 4884 }
4878 4885
4879 // Now make sure that a gc should get rid of the function 4886 // Now make sure that a gc should get rid of the function
4880 for (int i = 0; i < 4; i++) { 4887 for (int i = 0; i < 4; i++) {
4881 heap->CollectAllGarbage(); 4888 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4882 } 4889 }
4883 4890
4884 CHECK(code->marked_for_deoptimization()); 4891 CHECK(code->marked_for_deoptimization());
4885 } 4892 }
4886 4893
4887 TEST(NewSpaceObjectsInOptimizedCode) { 4894 TEST(NewSpaceObjectsInOptimizedCode) {
4888 if (i::FLAG_always_opt || !i::FLAG_crankshaft || i::FLAG_turbo) return; 4895 if (i::FLAG_always_opt || !i::FLAG_crankshaft || i::FLAG_turbo) return;
4889 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4896 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4890 i::FLAG_allow_natives_syntax = true; 4897 i::FLAG_allow_natives_syntax = true;
4891 CcTest::InitializeVM(); 4898 CcTest::InitializeVM();
(...skipping 29 matching lines...) Expand all
4921 *v8::Local<v8::Function>::Cast(CcTest::global() 4928 *v8::Local<v8::Function>::Cast(CcTest::global()
4922 ->Get(context.local(), v8_str("bar")) 4929 ->Get(context.local(), v8_str("bar"))
4923 .ToLocalChecked()))); 4930 .ToLocalChecked())));
4924 4931
4925 Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4932 Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4926 *v8::Local<v8::Function>::Cast(CcTest::global() 4933 *v8::Local<v8::Function>::Cast(CcTest::global()
4927 ->Get(context.local(), v8_str("foo")) 4934 ->Get(context.local(), v8_str("foo"))
4928 .ToLocalChecked()))); 4935 .ToLocalChecked())));
4929 4936
4930 CHECK(heap->InNewSpace(*foo)); 4937 CHECK(heap->InNewSpace(*foo));
4931 heap->CollectGarbage(NEW_SPACE); 4938 CcTest::CollectGarbage(NEW_SPACE);
4932 heap->CollectGarbage(NEW_SPACE); 4939 CcTest::CollectGarbage(NEW_SPACE);
4933 CHECK(!heap->InNewSpace(*foo)); 4940 CHECK(!heap->InNewSpace(*foo));
4934 #ifdef VERIFY_HEAP 4941 #ifdef VERIFY_HEAP
4935 heap->Verify(); 4942 heap->Verify();
4936 #endif 4943 #endif
4937 CHECK(!bar->code()->marked_for_deoptimization()); 4944 CHECK(!bar->code()->marked_for_deoptimization());
4938 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4945 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4939 } 4946 }
4940 4947
4941 // Now make sure that a gc should get rid of the function 4948 // Now make sure that a gc should get rid of the function
4942 for (int i = 0; i < 4; i++) { 4949 for (int i = 0; i < 4; i++) {
4943 heap->CollectAllGarbage(); 4950 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4944 } 4951 }
4945 4952
4946 CHECK(code->marked_for_deoptimization()); 4953 CHECK(code->marked_for_deoptimization());
4947 } 4954 }
4948 4955
4949 TEST(NoWeakHashTableLeakWithIncrementalMarking) { 4956 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4950 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return; 4957 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4951 if (!i::FLAG_incremental_marking) return; 4958 if (!i::FLAG_incremental_marking) return;
4952 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4959 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4953 i::FLAG_allow_natives_syntax = true; 4960 i::FLAG_allow_natives_syntax = true;
4954 i::FLAG_compilation_cache = false; 4961 i::FLAG_compilation_cache = false;
4955 i::FLAG_retain_maps_for_n_gc = 0; 4962 i::FLAG_retain_maps_for_n_gc = 0;
4956 CcTest::InitializeVM(); 4963 CcTest::InitializeVM();
4957 Isolate* isolate = CcTest::i_isolate(); 4964 Isolate* isolate = CcTest::i_isolate();
4958 4965
4959 // Do not run for no-snap builds. 4966 // Do not run for no-snap builds.
4960 if (!i::Snapshot::HasContextSnapshot(isolate, 0)) return; 4967 if (!i::Snapshot::HasContextSnapshot(isolate, 0)) return;
4961 4968
4962 v8::internal::Heap* heap = CcTest::heap(); 4969 v8::internal::Heap* heap = CcTest::heap();
4963 4970
4964 // Get a clean slate regarding optimized functions on the heap. 4971 // Get a clean slate regarding optimized functions on the heap.
4965 i::Deoptimizer::DeoptimizeAll(isolate); 4972 i::Deoptimizer::DeoptimizeAll(isolate);
4966 heap->CollectAllGarbage(); 4973 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4967 4974
4968 if (!isolate->use_crankshaft()) return; 4975 if (!isolate->use_crankshaft()) return;
4969 HandleScope outer_scope(heap->isolate()); 4976 HandleScope outer_scope(heap->isolate());
4970 for (int i = 0; i < 3; i++) { 4977 for (int i = 0; i < 3; i++) {
4971 heap::SimulateIncrementalMarking(heap); 4978 heap::SimulateIncrementalMarking(heap);
4972 { 4979 {
4973 LocalContext context; 4980 LocalContext context;
4974 HandleScope scope(heap->isolate()); 4981 HandleScope scope(heap->isolate());
4975 EmbeddedVector<char, 256> source; 4982 EmbeddedVector<char, 256> source;
4976 SNPrintF(source, 4983 SNPrintF(source,
4977 "function bar%d() {" 4984 "function bar%d() {"
4978 " return foo%d(1);" 4985 " return foo%d(1);"
4979 "};" 4986 "};"
4980 "function foo%d(x) { with (x) { return 1 + x; } };" 4987 "function foo%d(x) { with (x) { return 1 + x; } };"
4981 "bar%d();" 4988 "bar%d();"
4982 "bar%d();" 4989 "bar%d();"
4983 "bar%d();" 4990 "bar%d();"
4984 "%%OptimizeFunctionOnNextCall(bar%d);" 4991 "%%OptimizeFunctionOnNextCall(bar%d);"
4985 "bar%d();", 4992 "bar%d();",
4986 i, i, i, i, i, i, i, i); 4993 i, i, i, i, i, i, i, i);
4987 CompileRun(source.start()); 4994 CompileRun(source.start());
4988 } 4995 }
4989 // We have to abort incremental marking here to abandon black pages. 4996 // We have to abort incremental marking here to abandon black pages.
4990 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 4997 CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4991 } 4998 }
4992 int elements = 0; 4999 int elements = 0;
4993 if (heap->weak_object_to_code_table()->IsHashTable()) { 5000 if (heap->weak_object_to_code_table()->IsHashTable()) {
4994 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table()); 5001 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4995 elements = t->NumberOfElements(); 5002 elements = t->NumberOfElements();
4996 } 5003 }
4997 CHECK_EQ(0, elements); 5004 CHECK_EQ(0, elements);
4998 } 5005 }
4999 5006
5000 5007
(...skipping 28 matching lines...) Expand all
5029 TEST(NextCodeLinkIsWeak) { 5036 TEST(NextCodeLinkIsWeak) {
5030 i::FLAG_always_opt = false; 5037 i::FLAG_always_opt = false;
5031 i::FLAG_allow_natives_syntax = true; 5038 i::FLAG_allow_natives_syntax = true;
5032 CcTest::InitializeVM(); 5039 CcTest::InitializeVM();
5033 Isolate* isolate = CcTest::i_isolate(); 5040 Isolate* isolate = CcTest::i_isolate();
5034 v8::internal::Heap* heap = CcTest::heap(); 5041 v8::internal::Heap* heap = CcTest::heap();
5035 5042
5036 if (!isolate->use_crankshaft()) return; 5043 if (!isolate->use_crankshaft()) return;
5037 HandleScope outer_scope(heap->isolate()); 5044 HandleScope outer_scope(heap->isolate());
5038 Handle<Code> code; 5045 Handle<Code> code;
5039 heap->CollectAllAvailableGarbage(); 5046 CcTest::CollectAllAvailableGarbage();
5040 int code_chain_length_before, code_chain_length_after; 5047 int code_chain_length_before, code_chain_length_after;
5041 { 5048 {
5042 HandleScope scope(heap->isolate()); 5049 HandleScope scope(heap->isolate());
5043 Handle<JSFunction> mortal = 5050 Handle<JSFunction> mortal =
5044 OptimizeDummyFunction(CcTest::isolate(), "mortal"); 5051 OptimizeDummyFunction(CcTest::isolate(), "mortal");
5045 Handle<JSFunction> immortal = 5052 Handle<JSFunction> immortal =
5046 OptimizeDummyFunction(CcTest::isolate(), "immortal"); 5053 OptimizeDummyFunction(CcTest::isolate(), "immortal");
5047 CHECK_EQ(immortal->code()->next_code_link(), mortal->code()); 5054 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
5048 code_chain_length_before = GetCodeChainLength(immortal->code()); 5055 code_chain_length_before = GetCodeChainLength(immortal->code());
5049 // Keep the immortal code and let the mortal code die. 5056 // Keep the immortal code and let the mortal code die.
5050 code = scope.CloseAndEscape(Handle<Code>(immortal->code())); 5057 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5051 CompileRun("mortal = null; immortal = null;"); 5058 CompileRun("mortal = null; immortal = null;");
5052 } 5059 }
5053 heap->CollectAllAvailableGarbage(); 5060 CcTest::CollectAllAvailableGarbage();
5054 // Now mortal code should be dead. 5061 // Now mortal code should be dead.
5055 code_chain_length_after = GetCodeChainLength(*code); 5062 code_chain_length_after = GetCodeChainLength(*code);
5056 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after); 5063 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5057 } 5064 }
5058 5065
5059 5066
5060 static Handle<Code> DummyOptimizedCode(Isolate* isolate) { 5067 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5061 i::byte buffer[i::Assembler::kMinimalBufferSize]; 5068 i::byte buffer[i::Assembler::kMinimalBufferSize];
5062 MacroAssembler masm(isolate, buffer, sizeof(buffer), 5069 MacroAssembler masm(isolate, buffer, sizeof(buffer),
5063 v8::internal::CodeObjectRequired::kYes); 5070 v8::internal::CodeObjectRequired::kYes);
(...skipping 10 matching lines...) Expand all
5074 5081
5075 5082
5076 TEST(NextCodeLinkIsWeak2) { 5083 TEST(NextCodeLinkIsWeak2) {
5077 i::FLAG_allow_natives_syntax = true; 5084 i::FLAG_allow_natives_syntax = true;
5078 CcTest::InitializeVM(); 5085 CcTest::InitializeVM();
5079 Isolate* isolate = CcTest::i_isolate(); 5086 Isolate* isolate = CcTest::i_isolate();
5080 v8::internal::Heap* heap = CcTest::heap(); 5087 v8::internal::Heap* heap = CcTest::heap();
5081 5088
5082 if (!isolate->use_crankshaft()) return; 5089 if (!isolate->use_crankshaft()) return;
5083 HandleScope outer_scope(heap->isolate()); 5090 HandleScope outer_scope(heap->isolate());
5084 heap->CollectAllAvailableGarbage(); 5091 CcTest::CollectAllAvailableGarbage();
5085 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate); 5092 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5086 Handle<Code> new_head; 5093 Handle<Code> new_head;
5087 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate); 5094 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5088 { 5095 {
5089 HandleScope scope(heap->isolate()); 5096 HandleScope scope(heap->isolate());
5090 Handle<Code> immortal = DummyOptimizedCode(isolate); 5097 Handle<Code> immortal = DummyOptimizedCode(isolate);
5091 Handle<Code> mortal = DummyOptimizedCode(isolate); 5098 Handle<Code> mortal = DummyOptimizedCode(isolate);
5092 mortal->set_next_code_link(*old_head); 5099 mortal->set_next_code_link(*old_head);
5093 immortal->set_next_code_link(*mortal); 5100 immortal->set_next_code_link(*mortal);
5094 context->set(Context::OPTIMIZED_CODE_LIST, *immortal); 5101 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5095 new_head = scope.CloseAndEscape(immortal); 5102 new_head = scope.CloseAndEscape(immortal);
5096 } 5103 }
5097 heap->CollectAllAvailableGarbage(); 5104 CcTest::CollectAllAvailableGarbage();
5098 // Now mortal code should be dead. 5105 // Now mortal code should be dead.
5099 CHECK_EQ(*old_head, new_head->next_code_link()); 5106 CHECK_EQ(*old_head, new_head->next_code_link());
5100 } 5107 }
5101 5108
5102 5109
5103 static bool weak_ic_cleared = false; 5110 static bool weak_ic_cleared = false;
5104 5111
5105 static void ClearWeakIC( 5112 static void ClearWeakIC(
5106 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) { 5113 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5107 printf("clear weak is called\n"); 5114 printf("clear weak is called\n");
(...skipping 29 matching lines...) Expand all
5137 " createObj(hat);" 5144 " createObj(hat);"
5138 " return hat;" 5145 " return hat;"
5139 " })();"; 5146 " })();";
5140 garbage.Reset(isolate, CompileRun(env.local(), source) 5147 garbage.Reset(isolate, CompileRun(env.local(), source)
5141 .ToLocalChecked() 5148 .ToLocalChecked()
5142 ->ToObject(env.local()) 5149 ->ToObject(env.local())
5143 .ToLocalChecked()); 5150 .ToLocalChecked());
5144 } 5151 }
5145 weak_ic_cleared = false; 5152 weak_ic_cleared = false;
5146 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); 5153 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5147 Heap* heap = CcTest::i_isolate()->heap(); 5154 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5148 heap->CollectAllGarbage();
5149 CHECK(weak_ic_cleared); 5155 CHECK(weak_ic_cleared);
5150 5156
5151 // We've determined the constructor in createObj has had it's weak cell 5157 // We've determined the constructor in createObj has had it's weak cell
5152 // cleared. Now, verify that one additional call with a new function 5158 // cleared. Now, verify that one additional call with a new function
5153 // allows monomorphicity. 5159 // allows monomorphicity.
5154 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>( 5160 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5155 createObj->feedback_vector(), CcTest::i_isolate()); 5161 createObj->feedback_vector(), CcTest::i_isolate());
5156 for (int i = 0; i < 20; i++) { 5162 for (int i = 0; i < 20; i++) {
5157 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5163 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5158 CHECK(slot_value->IsWeakCell()); 5164 CHECK(slot_value->IsWeakCell());
5159 if (WeakCell::cast(slot_value)->cleared()) break; 5165 if (WeakCell::cast(slot_value)->cleared()) break;
5160 heap->CollectAllGarbage(); 5166 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5161 } 5167 }
5162 5168
5163 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5169 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5164 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared()); 5170 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5165 CompileRun( 5171 CompileRun(
5166 "function coat() { this.x = 6; }" 5172 "function coat() { this.x = 6; }"
5167 "createObj(coat);"); 5173 "createObj(coat);");
5168 slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5174 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5169 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared()); 5175 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5170 } 5176 }
5171 5177
5172 5178
5173 // Checks that the value returned by execution of the source is weak. 5179 // Checks that the value returned by execution of the source is weak.
5174 void CheckWeakness(const char* source) { 5180 void CheckWeakness(const char* source) {
5175 i::FLAG_stress_compaction = false; 5181 i::FLAG_stress_compaction = false;
5176 CcTest::InitializeVM(); 5182 CcTest::InitializeVM();
5177 v8::Isolate* isolate = CcTest::isolate(); 5183 v8::Isolate* isolate = CcTest::isolate();
5178 LocalContext env; 5184 LocalContext env;
5179 v8::HandleScope scope(isolate); 5185 v8::HandleScope scope(isolate);
5180 v8::Persistent<v8::Object> garbage; 5186 v8::Persistent<v8::Object> garbage;
5181 { 5187 {
5182 v8::HandleScope scope(isolate); 5188 v8::HandleScope scope(isolate);
5183 garbage.Reset(isolate, CompileRun(env.local(), source) 5189 garbage.Reset(isolate, CompileRun(env.local(), source)
5184 .ToLocalChecked() 5190 .ToLocalChecked()
5185 ->ToObject(env.local()) 5191 ->ToObject(env.local())
5186 .ToLocalChecked()); 5192 .ToLocalChecked());
5187 } 5193 }
5188 weak_ic_cleared = false; 5194 weak_ic_cleared = false;
5189 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); 5195 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5190 Heap* heap = CcTest::i_isolate()->heap(); 5196 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5191 heap->CollectAllGarbage();
5192 CHECK(weak_ic_cleared); 5197 CHECK(weak_ic_cleared);
5193 } 5198 }
5194 5199
5195 5200
5196 // Each of the following "weak IC" tests creates an IC that embeds a map with 5201 // Each of the following "weak IC" tests creates an IC that embeds a map with
5197 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC. 5202 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5198 TEST(WeakMapInMonomorphicLoadIC) { 5203 TEST(WeakMapInMonomorphicLoadIC) {
5199 CheckWeakness("function loadIC(obj) {" 5204 CheckWeakness("function loadIC(obj) {"
5200 " return obj.name;" 5205 " return obj.name;"
5201 "}" 5206 "}"
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
5375 CHECK(!IC::ICUseVector(kind)); 5380 CHECK(!IC::ICUseVector(kind));
5376 CHECK_EQ(state, IC::StateFromCode(ic)); 5381 CHECK_EQ(state, IC::StateFromCode(ic));
5377 } 5382 }
5378 } 5383 }
5379 5384
5380 5385
5381 TEST(MonomorphicStaysMonomorphicAfterGC) { 5386 TEST(MonomorphicStaysMonomorphicAfterGC) {
5382 if (FLAG_always_opt) return; 5387 if (FLAG_always_opt) return;
5383 CcTest::InitializeVM(); 5388 CcTest::InitializeVM();
5384 Isolate* isolate = CcTest::i_isolate(); 5389 Isolate* isolate = CcTest::i_isolate();
5385 Heap* heap = isolate->heap();
5386 v8::HandleScope scope(CcTest::isolate()); 5390 v8::HandleScope scope(CcTest::isolate());
5387 CompileRun( 5391 CompileRun(
5388 "function loadIC(obj) {" 5392 "function loadIC(obj) {"
5389 " return obj.name;" 5393 " return obj.name;"
5390 "}" 5394 "}"
5391 "function testIC() {" 5395 "function testIC() {"
5392 " var proto = {'name' : 'weak'};" 5396 " var proto = {'name' : 'weak'};"
5393 " var obj = Object.create(proto);" 5397 " var obj = Object.create(proto);"
5394 " loadIC(obj);" 5398 " loadIC(obj);"
5395 " loadIC(obj);" 5399 " loadIC(obj);"
5396 " loadIC(obj);" 5400 " loadIC(obj);"
5397 " return proto;" 5401 " return proto;"
5398 "};"); 5402 "};");
5399 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC"); 5403 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5400 { 5404 {
5401 v8::HandleScope scope(CcTest::isolate()); 5405 v8::HandleScope scope(CcTest::isolate());
5402 CompileRun("(testIC())"); 5406 CompileRun("(testIC())");
5403 } 5407 }
5404 heap->CollectAllGarbage(); 5408 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5405 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC); 5409 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
5406 { 5410 {
5407 v8::HandleScope scope(CcTest::isolate()); 5411 v8::HandleScope scope(CcTest::isolate());
5408 CompileRun("(testIC())"); 5412 CompileRun("(testIC())");
5409 } 5413 }
5410 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC); 5414 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
5411 } 5415 }
5412 5416
5413 5417
5414 TEST(PolymorphicStaysPolymorphicAfterGC) { 5418 TEST(PolymorphicStaysPolymorphicAfterGC) {
5415 if (FLAG_always_opt) return; 5419 if (FLAG_always_opt) return;
5416 CcTest::InitializeVM(); 5420 CcTest::InitializeVM();
5417 Isolate* isolate = CcTest::i_isolate(); 5421 Isolate* isolate = CcTest::i_isolate();
5418 Heap* heap = isolate->heap();
5419 v8::HandleScope scope(CcTest::isolate()); 5422 v8::HandleScope scope(CcTest::isolate());
5420 CompileRun( 5423 CompileRun(
5421 "function loadIC(obj) {" 5424 "function loadIC(obj) {"
5422 " return obj.name;" 5425 " return obj.name;"
5423 "}" 5426 "}"
5424 "function testIC() {" 5427 "function testIC() {"
5425 " var proto = {'name' : 'weak'};" 5428 " var proto = {'name' : 'weak'};"
5426 " var obj = Object.create(proto);" 5429 " var obj = Object.create(proto);"
5427 " loadIC(obj);" 5430 " loadIC(obj);"
5428 " loadIC(obj);" 5431 " loadIC(obj);"
5429 " loadIC(obj);" 5432 " loadIC(obj);"
5430 " var poly = Object.create(proto);" 5433 " var poly = Object.create(proto);"
5431 " poly.x = true;" 5434 " poly.x = true;"
5432 " loadIC(poly);" 5435 " loadIC(poly);"
5433 " return proto;" 5436 " return proto;"
5434 "};"); 5437 "};");
5435 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC"); 5438 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5436 { 5439 {
5437 v8::HandleScope scope(CcTest::isolate()); 5440 v8::HandleScope scope(CcTest::isolate());
5438 CompileRun("(testIC())"); 5441 CompileRun("(testIC())");
5439 } 5442 }
5440 heap->CollectAllGarbage(); 5443 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5441 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC); 5444 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
5442 { 5445 {
5443 v8::HandleScope scope(CcTest::isolate()); 5446 v8::HandleScope scope(CcTest::isolate());
5444 CompileRun("(testIC())"); 5447 CompileRun("(testIC())");
5445 } 5448 }
5446 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC); 5449 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
5447 } 5450 }
5448 5451
5449 5452
5450 TEST(WeakCell) { 5453 TEST(WeakCell) {
5451 CcTest::InitializeVM(); 5454 CcTest::InitializeVM();
5452 Isolate* isolate = CcTest::i_isolate(); 5455 Isolate* isolate = CcTest::i_isolate();
5453 v8::internal::Heap* heap = CcTest::heap();
5454 v8::internal::Factory* factory = isolate->factory(); 5456 v8::internal::Factory* factory = isolate->factory();
5455 5457
5456 HandleScope outer_scope(isolate); 5458 HandleScope outer_scope(isolate);
5457 Handle<WeakCell> weak_cell1; 5459 Handle<WeakCell> weak_cell1;
5458 { 5460 {
5459 HandleScope inner_scope(isolate); 5461 HandleScope inner_scope(isolate);
5460 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED); 5462 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5461 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value)); 5463 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5462 } 5464 }
5463 5465
5464 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5466 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5465 Handle<WeakCell> weak_cell2; 5467 Handle<WeakCell> weak_cell2;
5466 { 5468 {
5467 HandleScope inner_scope(isolate); 5469 HandleScope inner_scope(isolate);
5468 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); 5470 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5469 } 5471 }
5470 CHECK(weak_cell1->value()->IsFixedArray()); 5472 CHECK(weak_cell1->value()->IsFixedArray());
5471 CHECK_EQ(*survivor, weak_cell2->value()); 5473 CHECK_EQ(*survivor, weak_cell2->value());
5472 heap->CollectGarbage(NEW_SPACE); 5474 CcTest::CollectGarbage(NEW_SPACE);
5473 CHECK(weak_cell1->value()->IsFixedArray()); 5475 CHECK(weak_cell1->value()->IsFixedArray());
5474 CHECK_EQ(*survivor, weak_cell2->value()); 5476 CHECK_EQ(*survivor, weak_cell2->value());
5475 heap->CollectGarbage(NEW_SPACE); 5477 CcTest::CollectGarbage(NEW_SPACE);
5476 CHECK(weak_cell1->value()->IsFixedArray()); 5478 CHECK(weak_cell1->value()->IsFixedArray());
5477 CHECK_EQ(*survivor, weak_cell2->value()); 5479 CHECK_EQ(*survivor, weak_cell2->value());
5478 heap->CollectAllAvailableGarbage(); 5480 CcTest::CollectAllAvailableGarbage();
5479 CHECK(weak_cell1->cleared()); 5481 CHECK(weak_cell1->cleared());
5480 CHECK_EQ(*survivor, weak_cell2->value()); 5482 CHECK_EQ(*survivor, weak_cell2->value());
5481 } 5483 }
5482 5484
5483 5485
5484 TEST(WeakCellsWithIncrementalMarking) { 5486 TEST(WeakCellsWithIncrementalMarking) {
5485 CcTest::InitializeVM(); 5487 CcTest::InitializeVM();
5486 Isolate* isolate = CcTest::i_isolate(); 5488 Isolate* isolate = CcTest::i_isolate();
5487 v8::internal::Heap* heap = CcTest::heap(); 5489 v8::internal::Heap* heap = CcTest::heap();
5488 v8::internal::Factory* factory = isolate->factory(); 5490 v8::internal::Factory* factory = isolate->factory();
5489 5491
5490 const int N = 16; 5492 const int N = 16;
5491 HandleScope outer_scope(isolate); 5493 HandleScope outer_scope(isolate);
5492 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5494 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5493 Handle<WeakCell> weak_cells[N]; 5495 Handle<WeakCell> weak_cells[N];
5494 5496
5495 for (int i = 0; i < N; i++) { 5497 for (int i = 0; i < N; i++) {
5496 HandleScope inner_scope(isolate); 5498 HandleScope inner_scope(isolate);
5497 Handle<HeapObject> value = 5499 Handle<HeapObject> value =
5498 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); 5500 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5499 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); 5501 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5500 CHECK(weak_cell->value()->IsFixedArray()); 5502 CHECK(weak_cell->value()->IsFixedArray());
5501 IncrementalMarking* marking = heap->incremental_marking(); 5503 IncrementalMarking* marking = heap->incremental_marking();
5502 if (marking->IsStopped()) { 5504 if (marking->IsStopped()) {
5503 heap->StartIncrementalMarking(); 5505 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5506 i::GarbageCollectionReason::kTesting);
5504 } 5507 }
5505 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 5508 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5506 IncrementalMarking::FORCE_COMPLETION); 5509 IncrementalMarking::FORCE_COMPLETION);
5507 heap->CollectGarbage(NEW_SPACE); 5510 CcTest::CollectGarbage(NEW_SPACE);
5508 CHECK(weak_cell->value()->IsFixedArray()); 5511 CHECK(weak_cell->value()->IsFixedArray());
5509 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); 5512 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5510 } 5513 }
5511 // Call collect all twice to make sure that we also cleared 5514 // Call collect all twice to make sure that we also cleared
5512 // weak cells that were allocated on black pages. 5515 // weak cells that were allocated on black pages.
5513 heap->CollectAllGarbage(); 5516 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5514 heap->CollectAllGarbage(); 5517 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5515 CHECK_EQ(*survivor, weak_cells[0]->value()); 5518 CHECK_EQ(*survivor, weak_cells[0]->value());
5516 for (int i = 1; i < N; i++) { 5519 for (int i = 1; i < N; i++) {
5517 CHECK(weak_cells[i]->cleared()); 5520 CHECK(weak_cells[i]->cleared());
5518 } 5521 }
5519 } 5522 }
5520 5523
5521 5524
5522 #ifdef DEBUG 5525 #ifdef DEBUG
5523 TEST(AddInstructionChangesNewSpacePromotion) { 5526 TEST(AddInstructionChangesNewSpacePromotion) {
5524 i::FLAG_allow_natives_syntax = true; 5527 i::FLAG_allow_natives_syntax = true;
(...skipping 26 matching lines...) Expand all
5551 "%OptimizeFunctionOnNextCall(crash);" 5554 "%OptimizeFunctionOnNextCall(crash);"
5552 "crash(1);"); 5555 "crash(1);");
5553 5556
5554 v8::Local<v8::Object> global = CcTest::global(); 5557 v8::Local<v8::Object> global = CcTest::global();
5555 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast( 5558 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5556 global->Get(env.local(), v8_str("crash")).ToLocalChecked()); 5559 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5557 v8::Local<v8::Value> args1[] = {v8_num(1)}; 5560 v8::Local<v8::Value> args1[] = {v8_num(1)};
5558 heap->DisableInlineAllocation(); 5561 heap->DisableInlineAllocation();
5559 heap->set_allocation_timeout(1); 5562 heap->set_allocation_timeout(1);
5560 g->Call(env.local(), global, 1, args1).ToLocalChecked(); 5563 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5561 heap->CollectAllGarbage(); 5564 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5562 } 5565 }
5563 5566
5564 5567
5565 void OnFatalErrorExpectOOM(const char* location, const char* message) { 5568 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5566 // Exit with 0 if the location matches our expectation. 5569 // Exit with 0 if the location matches our expectation.
5567 exit(strcmp(location, "CALL_AND_RETRY_LAST")); 5570 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5568 } 5571 }
5569 5572
5570 5573
5571 TEST(CEntryStubOOM) { 5574 TEST(CEntryStubOOM) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5612 const int kFixedArrayLen = 512; 5615 const int kFixedArrayLen = 512;
5613 Handle<FixedArray> objects[kMaxObjects]; 5616 Handle<FixedArray> objects[kMaxObjects];
5614 for (int i = 0; (i < kMaxObjects) && 5617 for (int i = 0; (i < kMaxObjects) &&
5615 heap->CanExpandOldGeneration(old_space->AreaSize()); 5618 heap->CanExpandOldGeneration(old_space->AreaSize());
5616 i++) { 5619 i++) {
5617 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED); 5620 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5618 Page::FromAddress(objects[i]->address()) 5621 Page::FromAddress(objects[i]->address())
5619 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 5622 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5620 } 5623 }
5621 heap::SimulateFullSpace(old_space); 5624 heap::SimulateFullSpace(old_space);
5622 heap->CollectGarbage(OLD_SPACE); 5625 heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
5623 // If we get this far, we've successfully aborted compaction. Any further 5626 // If we get this far, we've successfully aborted compaction. Any further
5624 // allocations might trigger OOM. 5627 // allocations might trigger OOM.
5625 } 5628 }
5626 isolate->Exit(); 5629 isolate->Exit();
5627 isolate->Dispose(); 5630 isolate->Dispose();
5628 } 5631 }
5629 5632
5630 5633
5631 TEST(Regress357137) { 5634 TEST(Regress357137) {
5632 CcTest::InitializeVM(); 5635 CcTest::InitializeVM();
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
5708 // semi-space page. The second allocation in (3) will not fit into the 5711 // semi-space page. The second allocation in (3) will not fit into the
5709 // first semi-space page, but it will overwrite the promotion queue which 5712 // first semi-space page, but it will overwrite the promotion queue which
5710 // are in the second semi-space page. If the right guards are in place, the 5713 // are in the second semi-space page. If the right guards are in place, the
5711 // promotion queue will be evacuated in that case. 5714 // promotion queue will be evacuated in that case.
5712 5715
5713 5716
5714 CHECK(new_space->IsAtMaximumCapacity()); 5717 CHECK(new_space->IsAtMaximumCapacity());
5715 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); 5718 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5716 5719
5717 // Call the scavenger two times to get an empty new space 5720 // Call the scavenger two times to get an empty new space
5718 heap->CollectGarbage(NEW_SPACE); 5721 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5719 heap->CollectGarbage(NEW_SPACE); 5722 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5720 5723
5721 // First create a few objects which will survive a scavenge, and will get 5724 // First create a few objects which will survive a scavenge, and will get
5722 // promoted to the old generation later on. These objects will create 5725 // promoted to the old generation later on. These objects will create
5723 // promotion queue entries at the end of the second semi-space page. 5726 // promotion queue entries at the end of the second semi-space page.
5724 const int number_handles = 12; 5727 const int number_handles = 12;
5725 Handle<FixedArray> handles[number_handles]; 5728 Handle<FixedArray> handles[number_handles];
5726 for (int i = 0; i < number_handles; i++) { 5729 for (int i = 0; i < number_handles; i++) {
5727 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5730 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5728 } 5731 }
5729 5732
5730 heap->CollectGarbage(NEW_SPACE); 5733 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5731 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); 5734 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5732 5735
5733 // Fill-up the first semi-space page. 5736 // Fill-up the first semi-space page.
5734 heap::FillUpOnePage(new_space); 5737 heap::FillUpOnePage(new_space);
5735 5738
5736 // Create a small object to initialize the bump pointer on the second 5739 // Create a small object to initialize the bump pointer on the second
5737 // semi-space page. 5740 // semi-space page.
5738 Handle<FixedArray> small = 5741 Handle<FixedArray> small =
5739 i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5742 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5740 CHECK(heap->InNewSpace(*small)); 5743 CHECK(heap->InNewSpace(*small));
5741 5744
5742 // Fill-up the second semi-space page. 5745 // Fill-up the second semi-space page.
5743 heap::FillUpOnePage(new_space); 5746 heap::FillUpOnePage(new_space);
5744 5747
5745 // This scavenge will corrupt memory if the promotion queue is not 5748 // This scavenge will corrupt memory if the promotion queue is not
5746 // evacuated. 5749 // evacuated.
5747 heap->CollectGarbage(NEW_SPACE); 5750 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5748 } 5751 }
5749 isolate->Dispose(); 5752 isolate->Dispose();
5750 } 5753 }
5751 5754
5752 5755
5753 TEST(Regress388880) { 5756 TEST(Regress388880) {
5754 i::FLAG_expose_gc = true; 5757 i::FLAG_expose_gc = true;
5755 CcTest::InitializeVM(); 5758 CcTest::InitializeVM();
5756 v8::HandleScope scope(CcTest::isolate()); 5759 v8::HandleScope scope(CcTest::isolate());
5757 Isolate* isolate = CcTest::i_isolate(); 5760 Isolate* isolate = CcTest::i_isolate();
(...skipping 22 matching lines...) Expand all
5780 // Ensure that the object allocated where we need it. 5783 // Ensure that the object allocated where we need it.
5781 Page* page = Page::FromAddress(o->address()); 5784 Page* page = Page::FromAddress(o->address());
5782 CHECK_EQ(desired_offset, page->Offset(o->address())); 5785 CHECK_EQ(desired_offset, page->Offset(o->address()));
5783 5786
5784 // Now we have an object right at the end of the page. 5787 // Now we have an object right at the end of the page.
5785 5788
5786 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes() 5789 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5787 // that would cause crash. 5790 // that would cause crash.
5788 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 5791 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5789 marking->Stop(); 5792 marking->Stop();
5790 CcTest::heap()->StartIncrementalMarking(); 5793 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
5794 i::GarbageCollectionReason::kTesting);
5791 CHECK(marking->IsMarking()); 5795 CHECK(marking->IsMarking());
5792 5796
5793 // Now everything is set up for crashing in JSObject::MigrateFastToFast() 5797 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5794 // when it calls heap->AdjustLiveBytes(...). 5798 // when it calls heap->AdjustLiveBytes(...).
5795 JSObject::MigrateToMap(o, map2); 5799 JSObject::MigrateToMap(o, map2);
5796 } 5800 }
5797 5801
5798 5802
5799 TEST(Regress3631) { 5803 TEST(Regress3631) {
5800 i::FLAG_expose_gc = true; 5804 i::FLAG_expose_gc = true;
5801 CcTest::InitializeVM(); 5805 CcTest::InitializeVM();
5802 v8::HandleScope scope(CcTest::isolate()); 5806 v8::HandleScope scope(CcTest::isolate());
5803 Isolate* isolate = CcTest::i_isolate(); 5807 Isolate* isolate = CcTest::i_isolate();
5804 Heap* heap = isolate->heap(); 5808 Heap* heap = isolate->heap();
5805 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 5809 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5806 v8::Local<v8::Value> result = CompileRun( 5810 v8::Local<v8::Value> result = CompileRun(
5807 "var weak_map = new WeakMap();" 5811 "var weak_map = new WeakMap();"
5808 "var future_keys = [];" 5812 "var future_keys = [];"
5809 "for (var i = 0; i < 50; i++) {" 5813 "for (var i = 0; i < 50; i++) {"
5810 " var key = {'k' : i + 0.1};" 5814 " var key = {'k' : i + 0.1};"
5811 " weak_map.set(key, 1);" 5815 " weak_map.set(key, 1);"
5812 " future_keys.push({'x' : i + 0.2});" 5816 " future_keys.push({'x' : i + 0.2});"
5813 "}" 5817 "}"
5814 "weak_map"); 5818 "weak_map");
5815 if (marking->IsStopped()) { 5819 if (marking->IsStopped()) {
5816 CcTest::heap()->StartIncrementalMarking(); 5820 CcTest::heap()->StartIncrementalMarking(
5821 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
5817 } 5822 }
5818 // Incrementally mark the backing store. 5823 // Incrementally mark the backing store.
5819 Handle<JSReceiver> obj = 5824 Handle<JSReceiver> obj =
5820 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5825 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5821 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj)); 5826 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5822 while (!Marking::IsBlack( 5827 while (!Marking::IsBlack(
5823 ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) && 5828 ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5824 !marking->IsStopped()) { 5829 !marking->IsStopped()) {
5825 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 5830 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5826 IncrementalMarking::FORCE_COMPLETION); 5831 IncrementalMarking::FORCE_COMPLETION);
5827 } 5832 }
5828 // Stash the backing store in a handle. 5833 // Stash the backing store in a handle.
5829 Handle<Object> save(weak_map->table(), isolate); 5834 Handle<Object> save(weak_map->table(), isolate);
5830 // The following line will update the backing store. 5835 // The following line will update the backing store.
5831 CompileRun( 5836 CompileRun(
5832 "for (var i = 0; i < 50; i++) {" 5837 "for (var i = 0; i < 50; i++) {"
5833 " weak_map.set(future_keys[i], i);" 5838 " weak_map.set(future_keys[i], i);"
5834 "}"); 5839 "}");
5835 heap->incremental_marking()->set_should_hurry(true); 5840 heap->incremental_marking()->set_should_hurry(true);
5836 heap->CollectGarbage(OLD_SPACE); 5841 CcTest::CollectGarbage(OLD_SPACE);
5837 } 5842 }
5838 5843
5839 5844
5840 TEST(Regress442710) { 5845 TEST(Regress442710) {
5841 CcTest::InitializeVM(); 5846 CcTest::InitializeVM();
5842 Isolate* isolate = CcTest::i_isolate(); 5847 Isolate* isolate = CcTest::i_isolate();
5843 Heap* heap = isolate->heap();
5844 Factory* factory = isolate->factory(); 5848 Factory* factory = isolate->factory();
5845 5849
5846 HandleScope sc(isolate); 5850 HandleScope sc(isolate);
5847 Handle<JSGlobalObject> global( 5851 Handle<JSGlobalObject> global(
5848 CcTest::i_isolate()->context()->global_object()); 5852 CcTest::i_isolate()->context()->global_object());
5849 Handle<JSArray> array = factory->NewJSArray(2); 5853 Handle<JSArray> array = factory->NewJSArray(2);
5850 5854
5851 Handle<String> name = factory->InternalizeUtf8String("testArray"); 5855 Handle<String> name = factory->InternalizeUtf8String("testArray");
5852 JSReceiver::SetProperty(global, name, array, SLOPPY).Check(); 5856 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5853 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();"); 5857 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5854 heap->CollectGarbage(OLD_SPACE); 5858 CcTest::CollectGarbage(OLD_SPACE);
5855 } 5859 }
5856 5860
5857 5861
5858 HEAP_TEST(NumberStringCacheSize) { 5862 HEAP_TEST(NumberStringCacheSize) {
5859 // Test that the number-string cache has not been resized in the snapshot. 5863 // Test that the number-string cache has not been resized in the snapshot.
5860 CcTest::InitializeVM(); 5864 CcTest::InitializeVM();
5861 Isolate* isolate = CcTest::i_isolate(); 5865 Isolate* isolate = CcTest::i_isolate();
5862 if (!isolate->snapshot_available()) return; 5866 if (!isolate->snapshot_available()) return;
5863 Heap* heap = isolate->heap(); 5867 Heap* heap = isolate->heap();
5864 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2, 5868 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5865 heap->number_string_cache()->length()); 5869 heap->number_string_cache()->length());
5866 } 5870 }
5867 5871
5868 5872
5869 TEST(Regress3877) { 5873 TEST(Regress3877) {
5870 CcTest::InitializeVM(); 5874 CcTest::InitializeVM();
5871 Isolate* isolate = CcTest::i_isolate(); 5875 Isolate* isolate = CcTest::i_isolate();
5872 Heap* heap = isolate->heap();
5873 Factory* factory = isolate->factory(); 5876 Factory* factory = isolate->factory();
5874 HandleScope scope(isolate); 5877 HandleScope scope(isolate);
5875 CompileRun("function cls() { this.x = 10; }"); 5878 CompileRun("function cls() { this.x = 10; }");
5876 Handle<WeakCell> weak_prototype; 5879 Handle<WeakCell> weak_prototype;
5877 { 5880 {
5878 HandleScope inner_scope(isolate); 5881 HandleScope inner_scope(isolate);
5879 v8::Local<v8::Value> result = CompileRun("cls.prototype"); 5882 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5880 Handle<JSReceiver> proto = 5883 Handle<JSReceiver> proto =
5881 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5884 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5882 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto)); 5885 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5883 } 5886 }
5884 CHECK(!weak_prototype->cleared()); 5887 CHECK(!weak_prototype->cleared());
5885 CompileRun( 5888 CompileRun(
5886 "var a = { };" 5889 "var a = { };"
5887 "a.x = new cls();" 5890 "a.x = new cls();"
5888 "cls.prototype = null;"); 5891 "cls.prototype = null;");
5889 for (int i = 0; i < 4; i++) { 5892 for (int i = 0; i < 4; i++) {
5890 heap->CollectAllGarbage(); 5893 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5891 } 5894 }
5892 // The map of a.x keeps prototype alive 5895 // The map of a.x keeps prototype alive
5893 CHECK(!weak_prototype->cleared()); 5896 CHECK(!weak_prototype->cleared());
5894 // Change the map of a.x and make the previous map garbage collectable. 5897 // Change the map of a.x and make the previous map garbage collectable.
5895 CompileRun("a.x.__proto__ = {};"); 5898 CompileRun("a.x.__proto__ = {};");
5896 for (int i = 0; i < 4; i++) { 5899 for (int i = 0; i < 4; i++) {
5897 heap->CollectAllGarbage(); 5900 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5898 } 5901 }
5899 CHECK(weak_prototype->cleared()); 5902 CHECK(weak_prototype->cleared());
5900 } 5903 }
5901 5904
5902 5905
5903 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) { 5906 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5904 HandleScope inner_scope(isolate); 5907 HandleScope inner_scope(isolate);
5905 Handle<Map> map = Map::Create(isolate, 1); 5908 Handle<Map> map = Map::Create(isolate, 1);
5906 v8::Local<v8::Value> result = 5909 v8::Local<v8::Value> result =
5907 CompileRun("(function () { return {x : 10}; })();"); 5910 CompileRun("(function () { return {x : 10}; })();");
5908 Handle<JSReceiver> proto = 5911 Handle<JSReceiver> proto =
5909 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5912 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5910 Map::SetPrototype(map, proto); 5913 Map::SetPrototype(map, proto);
5911 heap->AddRetainedMap(map); 5914 heap->AddRetainedMap(map);
5912 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map)); 5915 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5913 } 5916 }
5914 5917
5915 5918
5916 void CheckMapRetainingFor(int n) { 5919 void CheckMapRetainingFor(int n) {
5917 FLAG_retain_maps_for_n_gc = n; 5920 FLAG_retain_maps_for_n_gc = n;
5918 Isolate* isolate = CcTest::i_isolate(); 5921 Isolate* isolate = CcTest::i_isolate();
5919 Heap* heap = isolate->heap(); 5922 Heap* heap = isolate->heap();
5920 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap); 5923 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5921 CHECK(!weak_cell->cleared()); 5924 CHECK(!weak_cell->cleared());
5922 for (int i = 0; i < n; i++) { 5925 for (int i = 0; i < n; i++) {
5923 heap::SimulateIncrementalMarking(heap); 5926 heap::SimulateIncrementalMarking(heap);
5924 heap->CollectGarbage(OLD_SPACE); 5927 CcTest::CollectGarbage(OLD_SPACE);
5925 } 5928 }
5926 CHECK(!weak_cell->cleared()); 5929 CHECK(!weak_cell->cleared());
5927 heap::SimulateIncrementalMarking(heap); 5930 heap::SimulateIncrementalMarking(heap);
5928 heap->CollectGarbage(OLD_SPACE); 5931 CcTest::CollectGarbage(OLD_SPACE);
5929 CHECK(weak_cell->cleared()); 5932 CHECK(weak_cell->cleared());
5930 } 5933 }
5931 5934
5932 5935
5933 TEST(MapRetaining) { 5936 TEST(MapRetaining) {
5934 CcTest::InitializeVM(); 5937 CcTest::InitializeVM();
5935 v8::HandleScope scope(CcTest::isolate()); 5938 v8::HandleScope scope(CcTest::isolate());
5936 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc); 5939 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5937 CheckMapRetainingFor(0); 5940 CheckMapRetainingFor(0);
5938 CheckMapRetainingFor(1); 5941 CheckMapRetainingFor(1);
5939 CheckMapRetainingFor(7); 5942 CheckMapRetainingFor(7);
5940 } 5943 }
5941 5944
5942 5945
5943 TEST(RegressArrayListGC) { 5946 TEST(RegressArrayListGC) {
5944 FLAG_retain_maps_for_n_gc = 1; 5947 FLAG_retain_maps_for_n_gc = 1;
5945 FLAG_incremental_marking = 0; 5948 FLAG_incremental_marking = 0;
5946 FLAG_gc_global = true; 5949 FLAG_gc_global = true;
5947 CcTest::InitializeVM(); 5950 CcTest::InitializeVM();
5948 v8::HandleScope scope(CcTest::isolate()); 5951 v8::HandleScope scope(CcTest::isolate());
5949 Isolate* isolate = CcTest::i_isolate(); 5952 Isolate* isolate = CcTest::i_isolate();
5950 Heap* heap = isolate->heap(); 5953 Heap* heap = isolate->heap();
5951 AddRetainedMap(isolate, heap); 5954 AddRetainedMap(isolate, heap);
5952 Handle<Map> map = Map::Create(isolate, 1); 5955 Handle<Map> map = Map::Create(isolate, 1);
5953 heap->CollectGarbage(OLD_SPACE); 5956 CcTest::CollectGarbage(OLD_SPACE);
5954 // Force GC in old space on next addition of retained map. 5957 // Force GC in old space on next addition of retained map.
5955 Map::WeakCellForMap(map); 5958 Map::WeakCellForMap(map);
5956 heap::SimulateFullSpace(CcTest::heap()->new_space()); 5959 heap::SimulateFullSpace(CcTest::heap()->new_space());
5957 for (int i = 0; i < 10; i++) { 5960 for (int i = 0; i < 10; i++) {
5958 heap->AddRetainedMap(map); 5961 heap->AddRetainedMap(map);
5959 } 5962 }
5960 heap->CollectGarbage(OLD_SPACE); 5963 CcTest::CollectGarbage(OLD_SPACE);
5961 } 5964 }
5962 5965
5963 5966
5964 #ifdef DEBUG 5967 #ifdef DEBUG
5965 TEST(PathTracer) { 5968 TEST(PathTracer) {
5966 CcTest::InitializeVM(); 5969 CcTest::InitializeVM();
5967 v8::HandleScope scope(CcTest::isolate()); 5970 v8::HandleScope scope(CcTest::isolate());
5968 5971
5969 v8::Local<v8::Value> result = CompileRun("'abc'"); 5972 v8::Local<v8::Value> result = CompileRun("'abc'");
5970 Handle<Object> o = v8::Utils::OpenHandle(*result); 5973 Handle<Object> o = v8::Utils::OpenHandle(*result);
(...skipping 30 matching lines...) Expand all
6001 // Check that free space filler is at the right place and did not smash the 6004 // Check that free space filler is at the right place and did not smash the
6002 // array header. 6005 // array header.
6003 CHECK(array->IsFixedArrayBase()); 6006 CHECK(array->IsFixedArrayBase());
6004 CHECK_EQ(initial_length - elements_to_trim, array->length()); 6007 CHECK_EQ(initial_length - elements_to_trim, array->length());
6005 int new_size = array->size(); 6008 int new_size = array->size();
6006 if (new_size != old_size) { 6009 if (new_size != old_size) {
6007 // Free space filler should be created in this case. 6010 // Free space filler should be created in this case.
6008 Address next_obj_address = array->address() + array->size(); 6011 Address next_obj_address = array->address() + array->size();
6009 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller()); 6012 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
6010 } 6013 }
6011 heap->CollectAllAvailableGarbage(); 6014 CcTest::CollectAllAvailableGarbage();
6012 } 6015 }
6013 6016
6014 6017
6015 TEST(Regress472513) { 6018 TEST(Regress472513) {
6016 CcTest::InitializeVM(); 6019 CcTest::InitializeVM();
6017 v8::HandleScope scope(CcTest::isolate()); 6020 v8::HandleScope scope(CcTest::isolate());
6018 6021
6019 // The combination of type/initial_length/elements_to_trim triggered 6022 // The combination of type/initial_length/elements_to_trim triggered
6020 // typed array header smashing with free space filler (crbug/472513). 6023 // typed array header smashing with free space filler (crbug/472513).
6021 6024
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
6058 CHECK(try_catch.HasCaught()); 6061 CHECK(try_catch.HasCaught());
6059 Isolate* isolate = CcTest::i_isolate(); 6062 Isolate* isolate = CcTest::i_isolate();
6060 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception()); 6063 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6061 Handle<Name> key = isolate->factory()->stack_trace_symbol(); 6064 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6062 Handle<Object> stack_trace = 6065 Handle<Object> stack_trace =
6063 Object::GetProperty(exception, key).ToHandleChecked(); 6066 Object::GetProperty(exception, key).ToHandleChecked();
6064 Handle<Object> code = 6067 Handle<Object> code =
6065 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); 6068 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6066 CHECK(code->IsCode()); 6069 CHECK(code->IsCode());
6067 6070
6068 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing"); 6071 CcTest::CollectAllAvailableGarbage();
6069 6072
6070 Handle<Object> pos = 6073 Handle<Object> pos =
6071 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); 6074 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6072 CHECK(pos->IsSmi()); 6075 CHECK(pos->IsSmi());
6073 6076
6074 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace); 6077 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6075 int array_length = Smi::cast(stack_trace_array->length())->value(); 6078 int array_length = Smi::cast(stack_trace_array->length())->value();
6076 for (int i = 0; i < array_length; i++) { 6079 for (int i = 0; i < array_length; i++) {
6077 Handle<Object> element = 6080 Handle<Object> element =
6078 Object::GetElement(isolate, stack_trace, i).ToHandleChecked(); 6081 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
6111 ->Get(env.local(), name) 6114 ->Get(env.local(), name)
6112 .ToLocalChecked() 6115 .ToLocalChecked()
6113 ->ToObject(env.local()) 6116 ->ToObject(env.local())
6114 .ToLocalChecked()); 6117 .ToLocalChecked());
6115 CHECK(CcTest::global()->Delete(env.local(), name).FromJust()); 6118 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6116 } 6119 }
6117 6120
6118 utils.SetWeak(&utils, UtilsHasBeenCollected, 6121 utils.SetWeak(&utils, UtilsHasBeenCollected,
6119 v8::WeakCallbackType::kParameter); 6122 v8::WeakCallbackType::kParameter);
6120 6123
6121 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks"); 6124 CcTest::CollectAllAvailableGarbage();
6122 6125
6123 CHECK(utils_has_been_collected); 6126 CHECK(utils_has_been_collected);
6124 } 6127 }
6125 6128
6126 6129
6127 TEST(Regress1878) { 6130 TEST(Regress1878) {
6128 FLAG_allow_natives_syntax = true; 6131 FLAG_allow_natives_syntax = true;
6129 CcTest::InitializeVM(); 6132 CcTest::InitializeVM();
6130 v8::Isolate* isolate = CcTest::isolate(); 6133 v8::Isolate* isolate = CcTest::isolate();
6131 v8::HandleScope scope(isolate); 6134 v8::HandleScope scope(isolate);
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
6169 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); 6172 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6170 } 6173 }
6171 6174
6172 6175
6173 TEST(NewSpaceAllocationCounter) { 6176 TEST(NewSpaceAllocationCounter) {
6174 CcTest::InitializeVM(); 6177 CcTest::InitializeVM();
6175 v8::HandleScope scope(CcTest::isolate()); 6178 v8::HandleScope scope(CcTest::isolate());
6176 Isolate* isolate = CcTest::i_isolate(); 6179 Isolate* isolate = CcTest::i_isolate();
6177 Heap* heap = isolate->heap(); 6180 Heap* heap = isolate->heap();
6178 size_t counter1 = heap->NewSpaceAllocationCounter(); 6181 size_t counter1 = heap->NewSpaceAllocationCounter();
6179 heap->CollectGarbage(NEW_SPACE); 6182 CcTest::CollectGarbage(NEW_SPACE);
6180 const size_t kSize = 1024; 6183 const size_t kSize = 1024;
6181 AllocateInSpace(isolate, kSize, NEW_SPACE); 6184 AllocateInSpace(isolate, kSize, NEW_SPACE);
6182 size_t counter2 = heap->NewSpaceAllocationCounter(); 6185 size_t counter2 = heap->NewSpaceAllocationCounter();
6183 CHECK_EQ(kSize, counter2 - counter1); 6186 CHECK_EQ(kSize, counter2 - counter1);
6184 heap->CollectGarbage(NEW_SPACE); 6187 CcTest::CollectGarbage(NEW_SPACE);
6185 size_t counter3 = heap->NewSpaceAllocationCounter(); 6188 size_t counter3 = heap->NewSpaceAllocationCounter();
6186 CHECK_EQ(0U, counter3 - counter2); 6189 CHECK_EQ(0U, counter3 - counter2);
6187 // Test counter overflow. 6190 // Test counter overflow.
6188 size_t max_counter = -1; 6191 size_t max_counter = -1;
6189 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); 6192 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6190 size_t start = heap->NewSpaceAllocationCounter(); 6193 size_t start = heap->NewSpaceAllocationCounter();
6191 for (int i = 0; i < 20; i++) { 6194 for (int i = 0; i < 20; i++) {
6192 AllocateInSpace(isolate, kSize, NEW_SPACE); 6195 AllocateInSpace(isolate, kSize, NEW_SPACE);
6193 size_t counter = heap->NewSpaceAllocationCounter(); 6196 size_t counter = heap->NewSpaceAllocationCounter();
6194 CHECK_EQ(kSize, counter - start); 6197 CHECK_EQ(kSize, counter - start);
6195 start = counter; 6198 start = counter;
6196 } 6199 }
6197 } 6200 }
6198 6201
6199 6202
6200 TEST(OldSpaceAllocationCounter) { 6203 TEST(OldSpaceAllocationCounter) {
6201 CcTest::InitializeVM(); 6204 CcTest::InitializeVM();
6202 v8::HandleScope scope(CcTest::isolate()); 6205 v8::HandleScope scope(CcTest::isolate());
6203 Isolate* isolate = CcTest::i_isolate(); 6206 Isolate* isolate = CcTest::i_isolate();
6204 Heap* heap = isolate->heap(); 6207 Heap* heap = isolate->heap();
6205 size_t counter1 = heap->OldGenerationAllocationCounter(); 6208 size_t counter1 = heap->OldGenerationAllocationCounter();
6206 heap->CollectGarbage(NEW_SPACE); 6209 CcTest::CollectGarbage(NEW_SPACE);
6207 heap->CollectGarbage(NEW_SPACE); 6210 CcTest::CollectGarbage(NEW_SPACE);
6208 const size_t kSize = 1024; 6211 const size_t kSize = 1024;
6209 AllocateInSpace(isolate, kSize, OLD_SPACE); 6212 AllocateInSpace(isolate, kSize, OLD_SPACE);
6210 size_t counter2 = heap->OldGenerationAllocationCounter(); 6213 size_t counter2 = heap->OldGenerationAllocationCounter();
6211 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. 6214 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6212 CHECK_LE(kSize, counter2 - counter1); 6215 CHECK_LE(kSize, counter2 - counter1);
6213 heap->CollectGarbage(NEW_SPACE); 6216 CcTest::CollectGarbage(NEW_SPACE);
6214 size_t counter3 = heap->OldGenerationAllocationCounter(); 6217 size_t counter3 = heap->OldGenerationAllocationCounter();
6215 CHECK_EQ(0u, counter3 - counter2); 6218 CHECK_EQ(0u, counter3 - counter2);
6216 AllocateInSpace(isolate, kSize, OLD_SPACE); 6219 AllocateInSpace(isolate, kSize, OLD_SPACE);
6217 heap->CollectGarbage(OLD_SPACE); 6220 CcTest::CollectGarbage(OLD_SPACE);
6218 size_t counter4 = heap->OldGenerationAllocationCounter(); 6221 size_t counter4 = heap->OldGenerationAllocationCounter();
6219 CHECK_LE(kSize, counter4 - counter3); 6222 CHECK_LE(kSize, counter4 - counter3);
6220 // Test counter overflow. 6223 // Test counter overflow.
6221 size_t max_counter = -1; 6224 size_t max_counter = -1;
6222 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); 6225 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6223 size_t start = heap->OldGenerationAllocationCounter(); 6226 size_t start = heap->OldGenerationAllocationCounter();
6224 for (int i = 0; i < 20; i++) { 6227 for (int i = 0; i < 20; i++) {
6225 AllocateInSpace(isolate, kSize, OLD_SPACE); 6228 AllocateInSpace(isolate, kSize, OLD_SPACE);
6226 size_t counter = heap->OldGenerationAllocationCounter(); 6229 size_t counter = heap->OldGenerationAllocationCounter();
6227 CHECK_LE(kSize, counter - start); 6230 CHECK_LE(kSize, counter - start);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
6282 } 6285 }
6283 6286
6284 6287
6285 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) { 6288 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6286 Isolate* isolate = CcTest::i_isolate(); 6289 Isolate* isolate = CcTest::i_isolate();
6287 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]); 6290 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6288 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj); 6291 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6289 fun->ReplaceCode(*isolate->builtins()->CompileLazy()); 6292 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6290 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy()); 6293 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6291 fun->shared()->ClearBytecodeArray(); // Bytecode is code too. 6294 fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
6292 isolate->heap()->CollectAllAvailableGarbage("remove code and gc"); 6295 CcTest::CollectAllAvailableGarbage();
6293 } 6296 }
6294 6297
6295 6298
6296 TEST(CanonicalSharedFunctionInfo) { 6299 TEST(CanonicalSharedFunctionInfo) {
6297 CcTest::InitializeVM(); 6300 CcTest::InitializeVM();
6298 v8::Isolate* isolate = CcTest::isolate(); 6301 v8::Isolate* isolate = CcTest::isolate();
6299 v8::HandleScope scope(isolate); 6302 v8::HandleScope scope(isolate);
6300 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate); 6303 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6301 global->Set(isolate, "check", v8::FunctionTemplate::New( 6304 global->Set(isolate, "check", v8::FunctionTemplate::New(
6302 isolate, CheckEqualSharedFunctionInfos)); 6305 isolate, CheckEqualSharedFunctionInfos));
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
6369 } 6372 }
6370 6373
6371 6374
6372 TEST(ScriptIterator) { 6375 TEST(ScriptIterator) {
6373 CcTest::InitializeVM(); 6376 CcTest::InitializeVM();
6374 v8::HandleScope scope(CcTest::isolate()); 6377 v8::HandleScope scope(CcTest::isolate());
6375 Isolate* isolate = CcTest::i_isolate(); 6378 Isolate* isolate = CcTest::i_isolate();
6376 Heap* heap = CcTest::heap(); 6379 Heap* heap = CcTest::heap();
6377 LocalContext context; 6380 LocalContext context;
6378 6381
6379 heap->CollectAllGarbage(); 6382 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6380 6383
6381 int script_count = 0; 6384 int script_count = 0;
6382 { 6385 {
6383 HeapIterator it(heap); 6386 HeapIterator it(heap);
6384 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { 6387 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6385 if (obj->IsScript()) script_count++; 6388 if (obj->IsScript()) script_count++;
6386 } 6389 }
6387 } 6390 }
6388 6391
6389 { 6392 {
6390 Script::Iterator iterator(isolate); 6393 Script::Iterator iterator(isolate);
6391 while (iterator.Next()) script_count--; 6394 while (iterator.Next()) script_count--;
6392 } 6395 }
6393 6396
6394 CHECK_EQ(0, script_count); 6397 CHECK_EQ(0, script_count);
6395 } 6398 }
6396 6399
6397 6400
6398 TEST(SharedFunctionInfoIterator) { 6401 TEST(SharedFunctionInfoIterator) {
6399 CcTest::InitializeVM(); 6402 CcTest::InitializeVM();
6400 v8::HandleScope scope(CcTest::isolate()); 6403 v8::HandleScope scope(CcTest::isolate());
6401 Isolate* isolate = CcTest::i_isolate(); 6404 Isolate* isolate = CcTest::i_isolate();
6402 Heap* heap = CcTest::heap(); 6405 Heap* heap = CcTest::heap();
6403 LocalContext context; 6406 LocalContext context;
6404 6407
6405 heap->CollectAllGarbage(); 6408 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6406 heap->CollectAllGarbage(); 6409 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6407 6410
6408 int sfi_count = 0; 6411 int sfi_count = 0;
6409 { 6412 {
6410 HeapIterator it(heap); 6413 HeapIterator it(heap);
6411 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { 6414 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6412 if (!obj->IsSharedFunctionInfo()) continue; 6415 if (!obj->IsSharedFunctionInfo()) continue;
6413 sfi_count++; 6416 sfi_count++;
6414 } 6417 }
6415 } 6418 }
6416 6419
(...skipping 19 matching lines...) Expand all
6436 Heap* heap = CcTest::heap(); 6439 Heap* heap = CcTest::heap();
6437 LocalContext context; 6440 LocalContext context;
6438 6441
6439 v8::Persistent<Value> parent; 6442 v8::Persistent<Value> parent;
6440 v8::Persistent<Value> child; 6443 v8::Persistent<Value> child;
6441 6444
6442 parent.Reset(isolate, v8::Object::New(isolate)); 6445 parent.Reset(isolate, v8::Object::New(isolate));
6443 child.Reset(isolate, v8::Object::New(isolate)); 6446 child.Reset(isolate, v8::Object::New(isolate));
6444 6447
6445 heap::SimulateFullSpace(heap->old_space()); 6448 heap::SimulateFullSpace(heap->old_space());
6446 heap->CollectGarbage(OLD_SPACE); 6449 CcTest::CollectGarbage(OLD_SPACE);
6447 { 6450 {
6448 UniqueId id = MakeUniqueId(parent); 6451 UniqueId id = MakeUniqueId(parent);
6449 isolate->SetObjectGroupId(parent, id); 6452 isolate->SetObjectGroupId(parent, id);
6450 isolate->SetReferenceFromGroup(id, child); 6453 isolate->SetReferenceFromGroup(id, child);
6451 } 6454 }
6452 // The CollectGarbage call above starts sweeper threads. 6455 // The CollectGarbage call above starts sweeper threads.
6453 // The crash will happen if the following two functions 6456 // The crash will happen if the following two functions
6454 // are called before sweeping finishes. 6457 // are called before sweeping finishes.
6455 heap->StartIncrementalMarking(); 6458 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6456 heap->FinalizeIncrementalMarkingIfComplete("test"); 6459 i::GarbageCollectionReason::kTesting);
6460 heap->FinalizeIncrementalMarkingIfComplete(
6461 i::GarbageCollectionReason::kTesting);
6457 } 6462 }
6458 6463
6459 6464
6460 HEAP_TEST(TestMemoryReducerSampleJsCalls) { 6465 HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6461 CcTest::InitializeVM(); 6466 CcTest::InitializeVM();
6462 v8::HandleScope scope(CcTest::isolate()); 6467 v8::HandleScope scope(CcTest::isolate());
6463 Heap* heap = CcTest::heap(); 6468 Heap* heap = CcTest::heap();
6464 Isolate* isolate = CcTest::i_isolate(); 6469 Isolate* isolate = CcTest::i_isolate();
6465 MemoryReducer* memory_reducer = heap->memory_reducer_; 6470 MemoryReducer* memory_reducer = heap->memory_reducer_;
6466 memory_reducer->SampleAndGetJsCallsPerMs(0); 6471 memory_reducer->SampleAndGetJsCallsPerMs(0);
(...skipping 26 matching lines...) Expand all
6493 Factory* factory = isolate->factory(); 6498 Factory* factory = isolate->factory();
6494 const int N = 6499 const int N =
6495 (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize; 6500 (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize;
6496 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED); 6501 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6497 CHECK(heap->old_space()->Contains(*array)); 6502 CHECK(heap->old_space()->Contains(*array));
6498 Handle<Object> number = factory->NewHeapNumber(1.0); 6503 Handle<Object> number = factory->NewHeapNumber(1.0);
6499 CHECK(heap->InNewSpace(*number)); 6504 CHECK(heap->InNewSpace(*number));
6500 for (int i = 0; i < N; i++) { 6505 for (int i = 0; i < N; i++) {
6501 array->set(i, *number); 6506 array->set(i, *number);
6502 } 6507 }
6503 heap->CollectGarbage(OLD_SPACE); 6508 CcTest::CollectGarbage(OLD_SPACE);
6504 heap::SimulateFullSpace(heap->old_space()); 6509 heap::SimulateFullSpace(heap->old_space());
6505 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1); 6510 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6506 heap->mark_compact_collector()->EnsureSweepingCompleted(); 6511 heap->mark_compact_collector()->EnsureSweepingCompleted();
6507 ByteArray* byte_array; 6512 ByteArray* byte_array;
6508 const int M = 256; 6513 const int M = 256;
6509 // Don't allow old space expansion. The test works without this flag too, 6514 // Don't allow old space expansion. The test works without this flag too,
6510 // but becomes very slow. 6515 // but becomes very slow.
6511 heap->set_force_oom(true); 6516 heap->set_force_oom(true);
6512 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) { 6517 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6513 for (int j = 0; j < M; j++) { 6518 for (int j = 0; j < M; j++) {
6514 byte_array->set(j, 0x31); 6519 byte_array->set(j, 0x31);
6515 } 6520 }
6516 } 6521 }
6517 // Re-enable old space expansion to avoid OOM crash. 6522 // Re-enable old space expansion to avoid OOM crash.
6518 heap->set_force_oom(false); 6523 heap->set_force_oom(false);
6519 heap->CollectGarbage(NEW_SPACE); 6524 CcTest::CollectGarbage(NEW_SPACE);
6520 } 6525 }
6521 6526
6522 HEAP_TEST(Regress589413) { 6527 HEAP_TEST(Regress589413) {
6523 FLAG_stress_compaction = true; 6528 FLAG_stress_compaction = true;
6524 FLAG_manual_evacuation_candidates_selection = true; 6529 FLAG_manual_evacuation_candidates_selection = true;
6525 FLAG_parallel_compaction = false; 6530 FLAG_parallel_compaction = false;
6526 FLAG_concurrent_sweeping = false; 6531 FLAG_concurrent_sweeping = false;
6527 CcTest::InitializeVM(); 6532 CcTest::InitializeVM();
6528 v8::HandleScope scope(CcTest::isolate()); 6533 v8::HandleScope scope(CcTest::isolate());
6529 Heap* heap = CcTest::heap(); 6534 Heap* heap = CcTest::heap();
6530 // Get the heap in clean state. 6535 // Get the heap in clean state.
6531 heap->CollectGarbage(OLD_SPACE); 6536 CcTest::CollectGarbage(OLD_SPACE);
6532 heap->CollectGarbage(OLD_SPACE); 6537 CcTest::CollectGarbage(OLD_SPACE);
6533 Isolate* isolate = CcTest::i_isolate(); 6538 Isolate* isolate = CcTest::i_isolate();
6534 Factory* factory = isolate->factory(); 6539 Factory* factory = isolate->factory();
6535 // Fill the new space with byte arrays with elements looking like pointers. 6540 // Fill the new space with byte arrays with elements looking like pointers.
6536 const int M = 256; 6541 const int M = 256;
6537 ByteArray* byte_array; 6542 ByteArray* byte_array;
6538 while (heap->AllocateByteArray(M).To(&byte_array)) { 6543 while (heap->AllocateByteArray(M).To(&byte_array)) {
6539 for (int j = 0; j < M; j++) { 6544 for (int j = 0; j < M; j++) {
6540 byte_array->set(j, 0x31); 6545 byte_array->set(j, 0x31);
6541 } 6546 }
6542 // Add the array in root set. 6547 // Add the array in root set.
6543 handle(byte_array); 6548 handle(byte_array);
6544 } 6549 }
6545 // Make sure the byte arrays will be promoted on the next GC. 6550 // Make sure the byte arrays will be promoted on the next GC.
6546 heap->CollectGarbage(NEW_SPACE); 6551 CcTest::CollectGarbage(NEW_SPACE);
6547 // This number is close to large free list category threshold. 6552 // This number is close to large free list category threshold.
6548 const int N = 0x3eee; 6553 const int N = 0x3eee;
6549 { 6554 {
6550 std::vector<FixedArray*> arrays; 6555 std::vector<FixedArray*> arrays;
6551 std::set<Page*> pages; 6556 std::set<Page*> pages;
6552 FixedArray* array; 6557 FixedArray* array;
6553 // Fill all pages with fixed arrays. 6558 // Fill all pages with fixed arrays.
6554 heap->set_force_oom(true); 6559 heap->set_force_oom(true);
6555 while (heap->AllocateFixedArray(N, TENURED).To(&array)) { 6560 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6556 arrays.push_back(array); 6561 arrays.push_back(array);
(...skipping 27 matching lines...) Expand all
6584 } 6589 }
6585 } 6590 }
6586 } 6591 }
6587 heap::SimulateIncrementalMarking(heap); 6592 heap::SimulateIncrementalMarking(heap);
6588 for (size_t j = 0; j < arrays.size(); j++) { 6593 for (size_t j = 0; j < arrays.size(); j++) {
6589 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1); 6594 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6590 } 6595 }
6591 } 6596 }
6592 // Force allocation from the free list. 6597 // Force allocation from the free list.
6593 heap->set_force_oom(true); 6598 heap->set_force_oom(true);
6594 heap->CollectGarbage(OLD_SPACE); 6599 CcTest::CollectGarbage(OLD_SPACE);
6595 } 6600 }
6596 6601
6597 TEST(Regress598319) { 6602 TEST(Regress598319) {
6598 // This test ensures that no white objects can cross the progress bar of large 6603 // This test ensures that no white objects can cross the progress bar of large
6599 // objects during incremental marking. It checks this by using Shift() during 6604 // objects during incremental marking. It checks this by using Shift() during
6600 // incremental marking. 6605 // incremental marking.
6601 CcTest::InitializeVM(); 6606 CcTest::InitializeVM();
6602 v8::HandleScope scope(CcTest::isolate()); 6607 v8::HandleScope scope(CcTest::isolate());
6603 Heap* heap = CcTest::heap(); 6608 Heap* heap = CcTest::heap();
6604 Isolate* isolate = heap->isolate(); 6609 Isolate* isolate = heap->isolate();
(...skipping 20 matching lines...) Expand all
6625 6630
6626 Handle<FixedArray> root; 6631 Handle<FixedArray> root;
6627 } arr(isolate, kNumberOfObjects); 6632 } arr(isolate, kNumberOfObjects);
6628 6633
6629 CHECK_EQ(arr.get()->length(), kNumberOfObjects); 6634 CHECK_EQ(arr.get()->length(), kNumberOfObjects);
6630 CHECK(heap->lo_space()->Contains(arr.get())); 6635 CHECK(heap->lo_space()->Contains(arr.get()));
6631 LargePage* page = heap->lo_space()->FindPage(arr.get()->address()); 6636 LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
6632 CHECK_NOT_NULL(page); 6637 CHECK_NOT_NULL(page);
6633 6638
6634 // GC to cleanup state 6639 // GC to cleanup state
6635 heap->CollectGarbage(OLD_SPACE); 6640 CcTest::CollectGarbage(OLD_SPACE);
6636 MarkCompactCollector* collector = heap->mark_compact_collector(); 6641 MarkCompactCollector* collector = heap->mark_compact_collector();
6637 if (collector->sweeping_in_progress()) { 6642 if (collector->sweeping_in_progress()) {
6638 collector->EnsureSweepingCompleted(); 6643 collector->EnsureSweepingCompleted();
6639 } 6644 }
6640 6645
6641 CHECK(heap->lo_space()->Contains(arr.get())); 6646 CHECK(heap->lo_space()->Contains(arr.get()));
6642 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get()))); 6647 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get())));
6643 for (int i = 0; i < arr.get()->length(); i++) { 6648 for (int i = 0; i < arr.get()->length(); i++) {
6644 CHECK(Marking::IsWhite( 6649 CHECK(Marking::IsWhite(
6645 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); 6650 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6646 } 6651 }
6647 6652
6648 // Start incremental marking. 6653 // Start incremental marking.
6649 IncrementalMarking* marking = heap->incremental_marking(); 6654 IncrementalMarking* marking = heap->incremental_marking();
6650 CHECK(marking->IsMarking() || marking->IsStopped()); 6655 CHECK(marking->IsMarking() || marking->IsStopped());
6651 if (marking->IsStopped()) { 6656 if (marking->IsStopped()) {
6652 heap->StartIncrementalMarking(); 6657 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6658 i::GarbageCollectionReason::kTesting);
6653 } 6659 }
6654 CHECK(marking->IsMarking()); 6660 CHECK(marking->IsMarking());
6655 6661
6656 // Check that we have not marked the interesting array during root scanning. 6662 // Check that we have not marked the interesting array during root scanning.
6657 for (int i = 0; i < arr.get()->length(); i++) { 6663 for (int i = 0; i < arr.get()->length(); i++) {
6658 CHECK(Marking::IsWhite( 6664 CHECK(Marking::IsWhite(
6659 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); 6665 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6660 } 6666 }
6661 6667
6662 // Now we search for a state where we are in incremental marking and have 6668 // Now we search for a state where we are in incremental marking and have
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
6708 intptr_t size_after = heap->SizeOfObjects(); 6714 intptr_t size_after = heap->SizeOfObjects();
6709 CHECK_EQ(size_after, size_before + array->Size()); 6715 CHECK_EQ(size_after, size_before + array->Size());
6710 } 6716 }
6711 6717
6712 TEST(Regress615489) { 6718 TEST(Regress615489) {
6713 FLAG_black_allocation = true; 6719 FLAG_black_allocation = true;
6714 CcTest::InitializeVM(); 6720 CcTest::InitializeVM();
6715 v8::HandleScope scope(CcTest::isolate()); 6721 v8::HandleScope scope(CcTest::isolate());
6716 Heap* heap = CcTest::heap(); 6722 Heap* heap = CcTest::heap();
6717 Isolate* isolate = heap->isolate(); 6723 Isolate* isolate = heap->isolate();
6718 heap->CollectAllGarbage(); 6724 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6719 6725
6720 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6726 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6721 i::IncrementalMarking* marking = heap->incremental_marking(); 6727 i::IncrementalMarking* marking = heap->incremental_marking();
6722 if (collector->sweeping_in_progress()) { 6728 if (collector->sweeping_in_progress()) {
6723 collector->EnsureSweepingCompleted(); 6729 collector->EnsureSweepingCompleted();
6724 } 6730 }
6725 CHECK(marking->IsMarking() || marking->IsStopped()); 6731 CHECK(marking->IsMarking() || marking->IsStopped());
6726 if (marking->IsStopped()) { 6732 if (marking->IsStopped()) {
6727 heap->StartIncrementalMarking(); 6733 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6734 i::GarbageCollectionReason::kTesting);
6728 } 6735 }
6729 CHECK(marking->IsMarking()); 6736 CHECK(marking->IsMarking());
6730 marking->StartBlackAllocationForTesting(); 6737 marking->StartBlackAllocationForTesting();
6731 { 6738 {
6732 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); 6739 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
6733 v8::HandleScope inner(CcTest::isolate()); 6740 v8::HandleScope inner(CcTest::isolate());
6734 isolate->factory()->NewFixedArray(500, TENURED)->Size(); 6741 isolate->factory()->NewFixedArray(500, TENURED)->Size();
6735 } 6742 }
6736 while (!marking->IsComplete()) { 6743 while (!marking->IsComplete()) {
6737 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD, 6744 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
6738 IncrementalMarking::FORCE_COMPLETION); 6745 IncrementalMarking::FORCE_COMPLETION);
6739 if (marking->IsReadyToOverApproximateWeakClosure()) { 6746 if (marking->IsReadyToOverApproximateWeakClosure()) {
6740 marking->FinalizeIncrementally(); 6747 marking->FinalizeIncrementally();
6741 } 6748 }
6742 } 6749 }
6743 CHECK(marking->IsComplete()); 6750 CHECK(marking->IsComplete());
6744 intptr_t size_before = heap->SizeOfObjects(); 6751 intptr_t size_before = heap->SizeOfObjects();
6745 CcTest::heap()->CollectAllGarbage(); 6752 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6746 intptr_t size_after = heap->SizeOfObjects(); 6753 intptr_t size_after = heap->SizeOfObjects();
6747 // Live size does not increase after garbage collection. 6754 // Live size does not increase after garbage collection.
6748 CHECK_LE(size_after, size_before); 6755 CHECK_LE(size_after, size_before);
6749 } 6756 }
6750 6757
6751 class StaticOneByteResource : public v8::String::ExternalOneByteStringResource { 6758 class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
6752 public: 6759 public:
6753 explicit StaticOneByteResource(const char* data) : data_(data) {} 6760 explicit StaticOneByteResource(const char* data) : data_(data) {}
6754 6761
6755 ~StaticOneByteResource() {} 6762 ~StaticOneByteResource() {}
6756 6763
6757 const char* data() const { return data_; } 6764 const char* data() const { return data_; }
6758 6765
6759 size_t length() const { return strlen(data_); } 6766 size_t length() const { return strlen(data_); }
6760 6767
6761 private: 6768 private:
6762 const char* data_; 6769 const char* data_;
6763 }; 6770 };
6764 6771
6765 TEST(Regress631969) { 6772 TEST(Regress631969) {
6766 FLAG_manual_evacuation_candidates_selection = true; 6773 FLAG_manual_evacuation_candidates_selection = true;
6767 FLAG_parallel_compaction = false; 6774 FLAG_parallel_compaction = false;
6768 FLAG_concurrent_sweeping = false; 6775 FLAG_concurrent_sweeping = false;
6769 CcTest::InitializeVM(); 6776 CcTest::InitializeVM();
6770 v8::HandleScope scope(CcTest::isolate()); 6777 v8::HandleScope scope(CcTest::isolate());
6771 Heap* heap = CcTest::heap(); 6778 Heap* heap = CcTest::heap();
6772 // Get the heap in clean state. 6779 // Get the heap in clean state.
6773 heap->CollectGarbage(OLD_SPACE); 6780 CcTest::CollectGarbage(OLD_SPACE);
6774 heap->CollectGarbage(OLD_SPACE); 6781 CcTest::CollectGarbage(OLD_SPACE);
6775 Isolate* isolate = CcTest::i_isolate(); 6782 Isolate* isolate = CcTest::i_isolate();
6776 Factory* factory = isolate->factory(); 6783 Factory* factory = isolate->factory();
6777 // Allocate two strings in a fresh page and mark the page as evacuation 6784 // Allocate two strings in a fresh page and mark the page as evacuation
6778 // candidate. 6785 // candidate.
6779 heap::SimulateFullSpace(heap->old_space()); 6786 heap::SimulateFullSpace(heap->old_space());
6780 Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED); 6787 Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
6781 Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED); 6788 Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
6782 Page::FromAddress(s1->address()) 6789 Page::FromAddress(s1->address())
6783 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 6790 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6784 6791
6785 heap::SimulateIncrementalMarking(heap, false); 6792 heap::SimulateIncrementalMarking(heap, false);
6786 6793
6787 // Allocate a cons string and promote it to a fresh page in the old space. 6794 // Allocate a cons string and promote it to a fresh page in the old space.
6788 heap::SimulateFullSpace(heap->old_space()); 6795 heap::SimulateFullSpace(heap->old_space());
6789 Handle<String> s3; 6796 Handle<String> s3;
6790 factory->NewConsString(s1, s2).ToHandle(&s3); 6797 factory->NewConsString(s1, s2).ToHandle(&s3);
6791 heap->CollectGarbage(NEW_SPACE); 6798 CcTest::CollectGarbage(NEW_SPACE);
6792 heap->CollectGarbage(NEW_SPACE); 6799 CcTest::CollectGarbage(NEW_SPACE);
6793 6800
6794 // Finish incremental marking. 6801 // Finish incremental marking.
6795 IncrementalMarking* marking = heap->incremental_marking(); 6802 IncrementalMarking* marking = heap->incremental_marking();
6796 while (!marking->IsComplete()) { 6803 while (!marking->IsComplete()) {
6797 marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD, 6804 marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
6798 IncrementalMarking::FORCE_COMPLETION); 6805 IncrementalMarking::FORCE_COMPLETION);
6799 if (marking->IsReadyToOverApproximateWeakClosure()) { 6806 if (marking->IsReadyToOverApproximateWeakClosure()) {
6800 marking->FinalizeIncrementally(); 6807 marking->FinalizeIncrementally();
6801 } 6808 }
6802 } 6809 }
6803 6810
6804 { 6811 {
6805 StaticOneByteResource external_string("12345678901234"); 6812 StaticOneByteResource external_string("12345678901234");
6806 s3->MakeExternal(&external_string); 6813 s3->MakeExternal(&external_string);
6807 heap->CollectGarbage(OLD_SPACE); 6814 CcTest::CollectGarbage(OLD_SPACE);
6808 } 6815 }
6809 } 6816 }
6810 6817
6811 TEST(LeftTrimFixedArrayInBlackArea) { 6818 TEST(LeftTrimFixedArrayInBlackArea) {
6812 FLAG_black_allocation = true; 6819 FLAG_black_allocation = true;
6813 CcTest::InitializeVM(); 6820 CcTest::InitializeVM();
6814 v8::HandleScope scope(CcTest::isolate()); 6821 v8::HandleScope scope(CcTest::isolate());
6815 Heap* heap = CcTest::heap(); 6822 Heap* heap = CcTest::heap();
6816 Isolate* isolate = heap->isolate(); 6823 Isolate* isolate = heap->isolate();
6817 heap->CollectAllGarbage(); 6824 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6818 6825
6819 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6826 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6820 i::IncrementalMarking* marking = heap->incremental_marking(); 6827 i::IncrementalMarking* marking = heap->incremental_marking();
6821 if (collector->sweeping_in_progress()) { 6828 if (collector->sweeping_in_progress()) {
6822 collector->EnsureSweepingCompleted(); 6829 collector->EnsureSweepingCompleted();
6823 } 6830 }
6824 CHECK(marking->IsMarking() || marking->IsStopped()); 6831 CHECK(marking->IsMarking() || marking->IsStopped());
6825 if (marking->IsStopped()) { 6832 if (marking->IsStopped()) {
6826 heap->StartIncrementalMarking(); 6833 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6834 i::GarbageCollectionReason::kTesting);
6827 } 6835 }
6828 CHECK(marking->IsMarking()); 6836 CHECK(marking->IsMarking());
6829 marking->StartBlackAllocationForTesting(); 6837 marking->StartBlackAllocationForTesting();
6830 6838
6831 // Ensure that we allocate a new page, set up a bump pointer area, and 6839 // Ensure that we allocate a new page, set up a bump pointer area, and
6832 // perform the allocation in a black area. 6840 // perform the allocation in a black area.
6833 heap::SimulateFullSpace(heap->old_space()); 6841 heap::SimulateFullSpace(heap->old_space());
6834 isolate->factory()->NewFixedArray(4, TENURED); 6842 isolate->factory()->NewFixedArray(4, TENURED);
6835 Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED); 6843 Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
6836 CHECK(heap->old_space()->Contains(*array)); 6844 CHECK(heap->old_space()->Contains(*array));
6837 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array))); 6845 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
6838 6846
6839 // Now left trim the allocated black area. A filler has to be installed 6847 // Now left trim the allocated black area. A filler has to be installed
6840 // for the trimmed area and all mark bits of the trimmed area have to be 6848 // for the trimmed area and all mark bits of the trimmed area have to be
6841 // cleared. 6849 // cleared.
6842 FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10); 6850 FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
6843 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed))); 6851 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
6844 6852
6845 heap::GcAndSweep(heap, OLD_SPACE); 6853 heap::GcAndSweep(heap, OLD_SPACE);
6846 } 6854 }
6847 6855
6848 TEST(ContinuousLeftTrimFixedArrayInBlackArea) { 6856 TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
6849 FLAG_black_allocation = true; 6857 FLAG_black_allocation = true;
6850 CcTest::InitializeVM(); 6858 CcTest::InitializeVM();
6851 v8::HandleScope scope(CcTest::isolate()); 6859 v8::HandleScope scope(CcTest::isolate());
6852 Heap* heap = CcTest::heap(); 6860 Heap* heap = CcTest::heap();
6853 Isolate* isolate = heap->isolate(); 6861 Isolate* isolate = heap->isolate();
6854 heap->CollectAllGarbage(); 6862 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6855 6863
6856 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6864 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6857 i::IncrementalMarking* marking = heap->incremental_marking(); 6865 i::IncrementalMarking* marking = heap->incremental_marking();
6858 if (collector->sweeping_in_progress()) { 6866 if (collector->sweeping_in_progress()) {
6859 collector->EnsureSweepingCompleted(); 6867 collector->EnsureSweepingCompleted();
6860 } 6868 }
6861 CHECK(marking->IsMarking() || marking->IsStopped()); 6869 CHECK(marking->IsMarking() || marking->IsStopped());
6862 if (marking->IsStopped()) { 6870 if (marking->IsStopped()) {
6863 heap->StartIncrementalMarking(); 6871 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6872 i::GarbageCollectionReason::kTesting);
6864 } 6873 }
6865 CHECK(marking->IsMarking()); 6874 CHECK(marking->IsMarking());
6866 marking->StartBlackAllocationForTesting(); 6875 marking->StartBlackAllocationForTesting();
6867 6876
6868 // Ensure that we allocate a new page, set up a bump pointer area, and 6877 // Ensure that we allocate a new page, set up a bump pointer area, and
6869 // perform the allocation in a black area. 6878 // perform the allocation in a black area.
6870 heap::SimulateFullSpace(heap->old_space()); 6879 heap::SimulateFullSpace(heap->old_space());
6871 isolate->factory()->NewFixedArray(10, TENURED); 6880 isolate->factory()->NewFixedArray(10, TENURED);
6872 6881
6873 // Allocate the fixed array that will be trimmed later. 6882 // Allocate the fixed array that will be trimmed later.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
6908 6917
6909 heap::GcAndSweep(heap, OLD_SPACE); 6918 heap::GcAndSweep(heap, OLD_SPACE);
6910 } 6919 }
6911 6920
6912 TEST(ContinuousRightTrimFixedArrayInBlackArea) { 6921 TEST(ContinuousRightTrimFixedArrayInBlackArea) {
6913 FLAG_black_allocation = true; 6922 FLAG_black_allocation = true;
6914 CcTest::InitializeVM(); 6923 CcTest::InitializeVM();
6915 v8::HandleScope scope(CcTest::isolate()); 6924 v8::HandleScope scope(CcTest::isolate());
6916 Heap* heap = CcTest::heap(); 6925 Heap* heap = CcTest::heap();
6917 Isolate* isolate = heap->isolate(); 6926 Isolate* isolate = heap->isolate();
6918 heap->CollectAllGarbage(); 6927 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6919 6928
6920 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6929 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6921 i::IncrementalMarking* marking = heap->incremental_marking(); 6930 i::IncrementalMarking* marking = heap->incremental_marking();
6922 if (collector->sweeping_in_progress()) { 6931 if (collector->sweeping_in_progress()) {
6923 collector->EnsureSweepingCompleted(); 6932 collector->EnsureSweepingCompleted();
6924 } 6933 }
6925 CHECK(marking->IsMarking() || marking->IsStopped()); 6934 CHECK(marking->IsMarking() || marking->IsStopped());
6926 if (marking->IsStopped()) { 6935 if (marking->IsStopped()) {
6927 heap->StartIncrementalMarking(); 6936 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6937 i::GarbageCollectionReason::kTesting);
6928 } 6938 }
6929 CHECK(marking->IsMarking()); 6939 CHECK(marking->IsMarking());
6930 marking->StartBlackAllocationForTesting(); 6940 marking->StartBlackAllocationForTesting();
6931 6941
6932 // Ensure that we allocate a new page, set up a bump pointer area, and 6942 // Ensure that we allocate a new page, set up a bump pointer area, and
6933 // perform the allocation in a black area. 6943 // perform the allocation in a black area.
6934 heap::SimulateFullSpace(heap->old_space()); 6944 heap::SimulateFullSpace(heap->old_space());
6935 isolate->factory()->NewFixedArray(10, TENURED); 6945 isolate->factory()->NewFixedArray(10, TENURED);
6936 6946
6937 // Allocate the fixed array that will be trimmed later. 6947 // Allocate the fixed array that will be trimmed later.
(...skipping 28 matching lines...) Expand all
6966 heap::GcAndSweep(heap, OLD_SPACE); 6976 heap::GcAndSweep(heap, OLD_SPACE);
6967 } 6977 }
6968 6978
6969 TEST(SlotFilteringAfterBlackAreas) { 6979 TEST(SlotFilteringAfterBlackAreas) {
6970 FLAG_black_allocation = true; 6980 FLAG_black_allocation = true;
6971 CcTest::InitializeVM(); 6981 CcTest::InitializeVM();
6972 v8::HandleScope scope(CcTest::isolate()); 6982 v8::HandleScope scope(CcTest::isolate());
6973 Heap* heap = CcTest::heap(); 6983 Heap* heap = CcTest::heap();
6974 Isolate* isolate = heap->isolate(); 6984 Isolate* isolate = heap->isolate();
6975 MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector(); 6985 MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector();
6976 heap->CollectAllGarbage(); 6986 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6977 6987
6978 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6988 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6979 i::IncrementalMarking* marking = heap->incremental_marking(); 6989 i::IncrementalMarking* marking = heap->incremental_marking();
6980 if (collector->sweeping_in_progress()) { 6990 if (collector->sweeping_in_progress()) {
6981 collector->EnsureSweepingCompleted(); 6991 collector->EnsureSweepingCompleted();
6982 } 6992 }
6983 CHECK(marking->IsMarking() || marking->IsStopped()); 6993 CHECK(marking->IsMarking() || marking->IsStopped());
6984 if (marking->IsStopped()) { 6994 if (marking->IsStopped()) {
6985 heap->StartIncrementalMarking(); 6995 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6996 i::GarbageCollectionReason::kTesting);
6986 } 6997 }
6987 CHECK(marking->IsMarking()); 6998 CHECK(marking->IsMarking());
6988 marking->StartBlackAllocationForTesting(); 6999 marking->StartBlackAllocationForTesting();
6989 7000
6990 // Ensure that we allocate a new page, set up a bump pointer area, and 7001 // Ensure that we allocate a new page, set up a bump pointer area, and
6991 // perform the allocation in a black area. 7002 // perform the allocation in a black area.
6992 heap::SimulateFullSpace(heap->old_space()); 7003 heap::SimulateFullSpace(heap->old_space());
6993 Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED); 7004 Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED);
6994 Page* page = Page::FromAddress(array->address()); 7005 Page* page = Page::FromAddress(array->address());
6995 7006
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
7035 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000); 7046 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
7036 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address()); 7047 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
7037 CHECK(chunk->owner()->identity() == LO_SPACE); 7048 CHECK(chunk->owner()->identity() == LO_SPACE);
7038 7049
7039 intptr_t size_before = array->Size(); 7050 intptr_t size_before = array->Size();
7040 size_t committed_memory_before = chunk->CommittedPhysicalMemory(); 7051 size_t committed_memory_before = chunk->CommittedPhysicalMemory();
7041 7052
7042 array->Shrink(1); 7053 array->Shrink(1);
7043 CHECK(array->Size() < size_before); 7054 CHECK(array->Size() < size_before);
7044 7055
7045 CcTest::heap()->CollectAllGarbage(); 7056 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
7046 CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before); 7057 CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
7047 size_t shrinked_size = 7058 size_t shrinked_size =
7048 RoundUp((array->address() - chunk->address()) + array->Size(), 7059 RoundUp((array->address() - chunk->address()) + array->Size(),
7049 base::OS::CommitPageSize()); 7060 base::OS::CommitPageSize());
7050 CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory()); 7061 CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
7051 } 7062 }
7052 7063
7053 TEST(RememberedSetRemoveRange) { 7064 TEST(RememberedSetRemoveRange) {
7054 CcTest::InitializeVM(); 7065 CcTest::InitializeVM();
7055 v8::HandleScope scope(CcTest::isolate()); 7066 v8::HandleScope scope(CcTest::isolate());
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
7107 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); 7118 chunk, chunk->area_end() - kPointerSize, chunk->area_end());
7108 slots[chunk->area_end() - kPointerSize] = false; 7119 slots[chunk->area_end() - kPointerSize] = false;
7109 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { 7120 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) {
7110 CHECK(slots[addr]); 7121 CHECK(slots[addr]);
7111 return KEEP_SLOT; 7122 return KEEP_SLOT;
7112 }); 7123 });
7113 } 7124 }
7114 7125
7115 } // namespace internal 7126 } // namespace internal
7116 } // namespace v8 7127 } // namespace v8
OLDNEW
« no previous file with comments | « test/cctest/heap/test-compaction.cc ('k') | test/cctest/heap/test-incremental-marking.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698