Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(144)

Side by Side Diff: test/cctest/heap/test-heap.cc

Issue 2310143002: [heap] Introduce enum of garbage collection reasons. (Closed)
Patch Set: fix win Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 CodeDesc desc; 131 CodeDesc desc;
132 masm.GetCode(&desc); 132 masm.GetCode(&desc);
133 Handle<Code> code = isolate->factory()->NewCode( 133 Handle<Code> code = isolate->factory()->NewCode(
134 desc, Code::ComputeFlags(Code::STUB), Handle<Code>()); 134 desc, Code::ComputeFlags(Code::STUB), Handle<Code>());
135 135
136 Code* tmp = nullptr; 136 Code* tmp = nullptr;
137 heap->CopyCode(*code).To(&tmp); 137 heap->CopyCode(*code).To(&tmp);
138 Handle<Code> copy(tmp); 138 Handle<Code> copy(tmp);
139 139
140 CheckEmbeddedObjectsAreEqual(code, copy); 140 CheckEmbeddedObjectsAreEqual(code, copy);
141 heap->CollectAllAvailableGarbage(); 141 CcTest::CollectAllAvailableGarbage();
142 CheckEmbeddedObjectsAreEqual(code, copy); 142 CheckEmbeddedObjectsAreEqual(code, copy);
143 } 143 }
144 144
145 static void CheckFindCodeObject(Isolate* isolate) { 145 static void CheckFindCodeObject(Isolate* isolate) {
146 // Test FindCodeObject 146 // Test FindCodeObject
147 #define __ assm. 147 #define __ assm.
148 148
149 Assembler assm(isolate, NULL, 0); 149 Assembler assm(isolate, NULL, 0);
150 150
151 __ nop(); // supported on all architectures 151 __ nop(); // supported on all architectures
(...skipping 319 matching lines...) Expand 10 before | Expand all | Expand 10 after
471 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request))); 471 CHECK_EQ(request, static_cast<int>(OBJECT_POINTER_ALIGN(request)));
472 CHECK(Smi::FromInt(42)->IsSmi()); 472 CHECK(Smi::FromInt(42)->IsSmi());
473 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi()); 473 CHECK(Smi::FromInt(Smi::kMinValue)->IsSmi());
474 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi()); 474 CHECK(Smi::FromInt(Smi::kMaxValue)->IsSmi());
475 } 475 }
476 476
477 477
478 TEST(GarbageCollection) { 478 TEST(GarbageCollection) {
479 CcTest::InitializeVM(); 479 CcTest::InitializeVM();
480 Isolate* isolate = CcTest::i_isolate(); 480 Isolate* isolate = CcTest::i_isolate();
481 Heap* heap = isolate->heap();
482 Factory* factory = isolate->factory(); 481 Factory* factory = isolate->factory();
483 482
484 HandleScope sc(isolate); 483 HandleScope sc(isolate);
485 // Check GC. 484 // Check GC.
486 heap->CollectGarbage(NEW_SPACE); 485 CcTest::CollectGarbage(NEW_SPACE);
487 486
488 Handle<JSGlobalObject> global( 487 Handle<JSGlobalObject> global(
489 CcTest::i_isolate()->context()->global_object()); 488 CcTest::i_isolate()->context()->global_object());
490 Handle<String> name = factory->InternalizeUtf8String("theFunction"); 489 Handle<String> name = factory->InternalizeUtf8String("theFunction");
491 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot"); 490 Handle<String> prop_name = factory->InternalizeUtf8String("theSlot");
492 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx"); 491 Handle<String> prop_namex = factory->InternalizeUtf8String("theSlotx");
493 Handle<String> obj_name = factory->InternalizeUtf8String("theObject"); 492 Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
494 Handle<Smi> twenty_three(Smi::FromInt(23), isolate); 493 Handle<Smi> twenty_three(Smi::FromInt(23), isolate);
495 Handle<Smi> twenty_four(Smi::FromInt(24), isolate); 494 Handle<Smi> twenty_four(Smi::FromInt(24), isolate);
496 495
497 { 496 {
498 HandleScope inner_scope(isolate); 497 HandleScope inner_scope(isolate);
499 // Allocate a function and keep it in global object's property. 498 // Allocate a function and keep it in global object's property.
500 Handle<JSFunction> function = factory->NewFunction(name); 499 Handle<JSFunction> function = factory->NewFunction(name);
501 JSReceiver::SetProperty(global, name, function, SLOPPY).Check(); 500 JSReceiver::SetProperty(global, name, function, SLOPPY).Check();
502 // Allocate an object. Unrooted after leaving the scope. 501 // Allocate an object. Unrooted after leaving the scope.
503 Handle<JSObject> obj = factory->NewJSObject(function); 502 Handle<JSObject> obj = factory->NewJSObject(function);
504 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 503 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
505 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check(); 504 JSReceiver::SetProperty(obj, prop_namex, twenty_four, SLOPPY).Check();
506 505
507 CHECK_EQ(Smi::FromInt(23), 506 CHECK_EQ(Smi::FromInt(23),
508 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 507 *Object::GetProperty(obj, prop_name).ToHandleChecked());
509 CHECK_EQ(Smi::FromInt(24), 508 CHECK_EQ(Smi::FromInt(24),
510 *Object::GetProperty(obj, prop_namex).ToHandleChecked()); 509 *Object::GetProperty(obj, prop_namex).ToHandleChecked());
511 } 510 }
512 511
513 heap->CollectGarbage(NEW_SPACE); 512 CcTest::CollectGarbage(NEW_SPACE);
514 513
515 // Function should be alive. 514 // Function should be alive.
516 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name)); 515 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, name));
517 // Check function is retained. 516 // Check function is retained.
518 Handle<Object> func_value = 517 Handle<Object> func_value =
519 Object::GetProperty(global, name).ToHandleChecked(); 518 Object::GetProperty(global, name).ToHandleChecked();
520 CHECK(func_value->IsJSFunction()); 519 CHECK(func_value->IsJSFunction());
521 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 520 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
522 521
523 { 522 {
524 HandleScope inner_scope(isolate); 523 HandleScope inner_scope(isolate);
525 // Allocate another object, make it reachable from global. 524 // Allocate another object, make it reachable from global.
526 Handle<JSObject> obj = factory->NewJSObject(function); 525 Handle<JSObject> obj = factory->NewJSObject(function);
527 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check(); 526 JSReceiver::SetProperty(global, obj_name, obj, SLOPPY).Check();
528 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check(); 527 JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
529 } 528 }
530 529
531 // After gc, it should survive. 530 // After gc, it should survive.
532 heap->CollectGarbage(NEW_SPACE); 531 CcTest::CollectGarbage(NEW_SPACE);
533 532
534 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name)); 533 CHECK(Just(true) == JSReceiver::HasOwnProperty(global, obj_name));
535 Handle<Object> obj = 534 Handle<Object> obj =
536 Object::GetProperty(global, obj_name).ToHandleChecked(); 535 Object::GetProperty(global, obj_name).ToHandleChecked();
537 CHECK(obj->IsJSObject()); 536 CHECK(obj->IsJSObject());
538 CHECK_EQ(Smi::FromInt(23), 537 CHECK_EQ(Smi::FromInt(23),
539 *Object::GetProperty(obj, prop_name).ToHandleChecked()); 538 *Object::GetProperty(obj, prop_name).ToHandleChecked());
540 } 539 }
541 540
542 541
(...skipping 28 matching lines...) Expand all
571 v8::HandleScope scope(CcTest::isolate()); 570 v8::HandleScope scope(CcTest::isolate());
572 const char* name = "Kasper the spunky"; 571 const char* name = "Kasper the spunky";
573 Handle<String> string = factory->NewStringFromAsciiChecked(name); 572 Handle<String> string = factory->NewStringFromAsciiChecked(name);
574 CHECK_EQ(StrLength(name), string->length()); 573 CHECK_EQ(StrLength(name), string->length());
575 } 574 }
576 575
577 576
578 TEST(GlobalHandles) { 577 TEST(GlobalHandles) {
579 CcTest::InitializeVM(); 578 CcTest::InitializeVM();
580 Isolate* isolate = CcTest::i_isolate(); 579 Isolate* isolate = CcTest::i_isolate();
581 Heap* heap = isolate->heap();
582 Factory* factory = isolate->factory(); 580 Factory* factory = isolate->factory();
583 GlobalHandles* global_handles = isolate->global_handles(); 581 GlobalHandles* global_handles = isolate->global_handles();
584 582
585 Handle<Object> h1; 583 Handle<Object> h1;
586 Handle<Object> h2; 584 Handle<Object> h2;
587 Handle<Object> h3; 585 Handle<Object> h3;
588 Handle<Object> h4; 586 Handle<Object> h4;
589 587
590 { 588 {
591 HandleScope scope(isolate); 589 HandleScope scope(isolate);
592 590
593 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 591 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
594 Handle<Object> u = factory->NewNumber(1.12344); 592 Handle<Object> u = factory->NewNumber(1.12344);
595 593
596 h1 = global_handles->Create(*i); 594 h1 = global_handles->Create(*i);
597 h2 = global_handles->Create(*u); 595 h2 = global_handles->Create(*u);
598 h3 = global_handles->Create(*i); 596 h3 = global_handles->Create(*i);
599 h4 = global_handles->Create(*u); 597 h4 = global_handles->Create(*u);
600 } 598 }
601 599
602 // after gc, it should survive 600 // after gc, it should survive
603 heap->CollectGarbage(NEW_SPACE); 601 CcTest::CollectGarbage(NEW_SPACE);
604 602
605 CHECK((*h1)->IsString()); 603 CHECK((*h1)->IsString());
606 CHECK((*h2)->IsHeapNumber()); 604 CHECK((*h2)->IsHeapNumber());
607 CHECK((*h3)->IsString()); 605 CHECK((*h3)->IsString());
608 CHECK((*h4)->IsHeapNumber()); 606 CHECK((*h4)->IsHeapNumber());
609 607
610 CHECK_EQ(*h3, *h1); 608 CHECK_EQ(*h3, *h1);
611 GlobalHandles::Destroy(h1.location()); 609 GlobalHandles::Destroy(h1.location());
612 GlobalHandles::Destroy(h3.location()); 610 GlobalHandles::Destroy(h3.location());
613 611
(...skipping 12 matching lines...) Expand all
626 data.GetParameter()); 624 data.GetParameter());
627 if (p->second == 1234) WeakPointerCleared = true; 625 if (p->second == 1234) WeakPointerCleared = true;
628 p->first->Reset(); 626 p->first->Reset();
629 } 627 }
630 628
631 629
632 TEST(WeakGlobalHandlesScavenge) { 630 TEST(WeakGlobalHandlesScavenge) {
633 i::FLAG_stress_compaction = false; 631 i::FLAG_stress_compaction = false;
634 CcTest::InitializeVM(); 632 CcTest::InitializeVM();
635 Isolate* isolate = CcTest::i_isolate(); 633 Isolate* isolate = CcTest::i_isolate();
636 Heap* heap = isolate->heap();
637 Factory* factory = isolate->factory(); 634 Factory* factory = isolate->factory();
638 GlobalHandles* global_handles = isolate->global_handles(); 635 GlobalHandles* global_handles = isolate->global_handles();
639 636
640 WeakPointerCleared = false; 637 WeakPointerCleared = false;
641 638
642 Handle<Object> h1; 639 Handle<Object> h1;
643 Handle<Object> h2; 640 Handle<Object> h2;
644 641
645 { 642 {
646 HandleScope scope(isolate); 643 HandleScope scope(isolate);
647 644
648 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 645 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
649 Handle<Object> u = factory->NewNumber(1.12344); 646 Handle<Object> u = factory->NewNumber(1.12344);
650 647
651 h1 = global_handles->Create(*i); 648 h1 = global_handles->Create(*i);
652 h2 = global_handles->Create(*u); 649 h2 = global_handles->Create(*u);
653 } 650 }
654 651
655 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 652 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
656 GlobalHandles::MakeWeak( 653 GlobalHandles::MakeWeak(
657 h2.location(), reinterpret_cast<void*>(&handle_and_id), 654 h2.location(), reinterpret_cast<void*>(&handle_and_id),
658 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter); 655 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
659 656
660 // Scavenge treats weak pointers as normal roots. 657 // Scavenge treats weak pointers as normal roots.
661 heap->CollectGarbage(NEW_SPACE); 658 CcTest::CollectGarbage(NEW_SPACE);
662 659
663 CHECK((*h1)->IsString()); 660 CHECK((*h1)->IsString());
664 CHECK((*h2)->IsHeapNumber()); 661 CHECK((*h2)->IsHeapNumber());
665 662
666 CHECK(!WeakPointerCleared); 663 CHECK(!WeakPointerCleared);
667 CHECK(!global_handles->IsNearDeath(h2.location())); 664 CHECK(!global_handles->IsNearDeath(h2.location()));
668 CHECK(!global_handles->IsNearDeath(h1.location())); 665 CHECK(!global_handles->IsNearDeath(h1.location()));
669 666
670 GlobalHandles::Destroy(h1.location()); 667 GlobalHandles::Destroy(h1.location());
671 GlobalHandles::Destroy(h2.location()); 668 GlobalHandles::Destroy(h2.location());
(...skipping 16 matching lines...) Expand all
688 HandleScope scope(isolate); 685 HandleScope scope(isolate);
689 686
690 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 687 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
691 Handle<Object> u = factory->NewNumber(1.12344); 688 Handle<Object> u = factory->NewNumber(1.12344);
692 689
693 h1 = global_handles->Create(*i); 690 h1 = global_handles->Create(*i);
694 h2 = global_handles->Create(*u); 691 h2 = global_handles->Create(*u);
695 } 692 }
696 693
697 // Make sure the objects are promoted. 694 // Make sure the objects are promoted.
698 heap->CollectGarbage(OLD_SPACE); 695 CcTest::CollectGarbage(OLD_SPACE);
699 heap->CollectGarbage(NEW_SPACE); 696 CcTest::CollectGarbage(NEW_SPACE);
700 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2)); 697 CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
701 698
702 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234); 699 std::pair<Handle<Object>*, int> handle_and_id(&h2, 1234);
703 GlobalHandles::MakeWeak( 700 GlobalHandles::MakeWeak(
704 h2.location(), reinterpret_cast<void*>(&handle_and_id), 701 h2.location(), reinterpret_cast<void*>(&handle_and_id),
705 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter); 702 &TestWeakGlobalHandleCallback, v8::WeakCallbackType::kParameter);
706 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 703 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
707 CHECK(!GlobalHandles::IsNearDeath(h2.location())); 704 CHECK(!GlobalHandles::IsNearDeath(h2.location()));
708 705
709 // Incremental marking potentially marked handles before they turned weak. 706 // Incremental marking potentially marked handles before they turned weak.
710 heap->CollectAllGarbage(); 707 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
711 708
712 CHECK((*h1)->IsString()); 709 CHECK((*h1)->IsString());
713 710
714 CHECK(WeakPointerCleared); 711 CHECK(WeakPointerCleared);
715 CHECK(!GlobalHandles::IsNearDeath(h1.location())); 712 CHECK(!GlobalHandles::IsNearDeath(h1.location()));
716 713
717 GlobalHandles::Destroy(h1.location()); 714 GlobalHandles::Destroy(h1.location());
718 } 715 }
719 716
720 717
721 TEST(DeleteWeakGlobalHandle) { 718 TEST(DeleteWeakGlobalHandle) {
722 i::FLAG_stress_compaction = false; 719 i::FLAG_stress_compaction = false;
723 CcTest::InitializeVM(); 720 CcTest::InitializeVM();
724 Isolate* isolate = CcTest::i_isolate(); 721 Isolate* isolate = CcTest::i_isolate();
725 Heap* heap = isolate->heap();
726 Factory* factory = isolate->factory(); 722 Factory* factory = isolate->factory();
727 GlobalHandles* global_handles = isolate->global_handles(); 723 GlobalHandles* global_handles = isolate->global_handles();
728 724
729 WeakPointerCleared = false; 725 WeakPointerCleared = false;
730 726
731 Handle<Object> h; 727 Handle<Object> h;
732 728
733 { 729 {
734 HandleScope scope(isolate); 730 HandleScope scope(isolate);
735 731
736 Handle<Object> i = factory->NewStringFromStaticChars("fisk"); 732 Handle<Object> i = factory->NewStringFromStaticChars("fisk");
737 h = global_handles->Create(*i); 733 h = global_handles->Create(*i);
738 } 734 }
739 735
740 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234); 736 std::pair<Handle<Object>*, int> handle_and_id(&h, 1234);
741 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id), 737 GlobalHandles::MakeWeak(h.location(), reinterpret_cast<void*>(&handle_and_id),
742 &TestWeakGlobalHandleCallback, 738 &TestWeakGlobalHandleCallback,
743 v8::WeakCallbackType::kParameter); 739 v8::WeakCallbackType::kParameter);
744 740
745 // Scanvenge does not recognize weak reference. 741 // Scanvenge does not recognize weak reference.
746 heap->CollectGarbage(NEW_SPACE); 742 CcTest::CollectGarbage(NEW_SPACE);
747 743
748 CHECK(!WeakPointerCleared); 744 CHECK(!WeakPointerCleared);
749 745
750 // Mark-compact treats weak reference properly. 746 // Mark-compact treats weak reference properly.
751 heap->CollectGarbage(OLD_SPACE); 747 CcTest::CollectGarbage(OLD_SPACE);
752 748
753 CHECK(WeakPointerCleared); 749 CHECK(WeakPointerCleared);
754 } 750 }
755 751
756 TEST(DoNotPromoteWhiteObjectsOnScavenge) { 752 TEST(DoNotPromoteWhiteObjectsOnScavenge) {
757 CcTest::InitializeVM(); 753 CcTest::InitializeVM();
758 Isolate* isolate = CcTest::i_isolate(); 754 Isolate* isolate = CcTest::i_isolate();
759 Heap* heap = isolate->heap(); 755 Heap* heap = isolate->heap();
760 Factory* factory = isolate->factory(); 756 Factory* factory = isolate->factory();
761 757
762 HandleScope scope(isolate); 758 HandleScope scope(isolate);
763 Handle<Object> white = factory->NewStringFromStaticChars("white"); 759 Handle<Object> white = factory->NewStringFromStaticChars("white");
764 760
765 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white)))); 761 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white))));
766 762
767 heap->CollectGarbage(NEW_SPACE); 763 CcTest::CollectGarbage(NEW_SPACE);
768 764
769 CHECK(heap->InNewSpace(*white)); 765 CHECK(heap->InNewSpace(*white));
770 } 766 }
771 767
772 TEST(PromoteGreyOrBlackObjectsOnScavenge) { 768 TEST(PromoteGreyOrBlackObjectsOnScavenge) {
773 CcTest::InitializeVM(); 769 CcTest::InitializeVM();
774 Isolate* isolate = CcTest::i_isolate(); 770 Isolate* isolate = CcTest::i_isolate();
775 Heap* heap = isolate->heap(); 771 Heap* heap = isolate->heap();
776 Factory* factory = isolate->factory(); 772 Factory* factory = isolate->factory();
777 773
778 HandleScope scope(isolate); 774 HandleScope scope(isolate);
779 Handle<Object> marked = factory->NewStringFromStaticChars("marked"); 775 Handle<Object> marked = factory->NewStringFromStaticChars("marked");
780 776
781 IncrementalMarking* marking = heap->incremental_marking(); 777 IncrementalMarking* marking = heap->incremental_marking();
782 marking->Stop(); 778 marking->Stop();
783 heap->StartIncrementalMarking(); 779 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
780 i::GarbageCollectionReason::kTesting);
784 while ( 781 while (
785 Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) { 782 Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) {
786 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 783 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
787 IncrementalMarking::DO_NOT_FORCE_COMPLETION); 784 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
788 } 785 }
789 786
790 heap->CollectGarbage(NEW_SPACE); 787 CcTest::CollectGarbage(NEW_SPACE);
791 788
792 CHECK(!heap->InNewSpace(*marked)); 789 CHECK(!heap->InNewSpace(*marked));
793 } 790 }
794 791
795 TEST(BytecodeArray) { 792 TEST(BytecodeArray) {
796 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a}; 793 static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a};
797 static const int kRawBytesSize = sizeof(kRawBytes); 794 static const int kRawBytesSize = sizeof(kRawBytes);
798 static const int kFrameSize = 32; 795 static const int kFrameSize = 32;
799 static const int kParameterCount = 2; 796 static const int kParameterCount = 2;
800 797
(...skipping 27 matching lines...) Expand all
828 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]); 825 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
829 CHECK_EQ(array->get(i), kRawBytes[i]); 826 CHECK_EQ(array->get(i), kRawBytes[i]);
830 } 827 }
831 828
832 FixedArray* old_constant_pool_address = *constant_pool; 829 FixedArray* old_constant_pool_address = *constant_pool;
833 830
834 // Perform a full garbage collection and force the constant pool to be on an 831 // Perform a full garbage collection and force the constant pool to be on an
835 // evacuation candidate. 832 // evacuation candidate.
836 Page* evac_page = Page::FromAddress(constant_pool->address()); 833 Page* evac_page = Page::FromAddress(constant_pool->address());
837 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 834 evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
838 heap->CollectAllGarbage(); 835 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
839 836
840 // BytecodeArray should survive. 837 // BytecodeArray should survive.
841 CHECK_EQ(array->length(), kRawBytesSize); 838 CHECK_EQ(array->length(), kRawBytesSize);
842 CHECK_EQ(array->frame_size(), kFrameSize); 839 CHECK_EQ(array->frame_size(), kFrameSize);
843 for (int i = 0; i < kRawBytesSize; i++) { 840 for (int i = 0; i < kRawBytesSize; i++) {
844 CHECK_EQ(array->get(i), kRawBytes[i]); 841 CHECK_EQ(array->get(i), kRawBytes[i]);
845 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]); 842 CHECK_EQ(array->GetFirstBytecodeAddress()[i], kRawBytes[i]);
846 } 843 }
847 844
848 // Constant pool should have been migrated. 845 // Constant pool should have been migrated.
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
1298 } 1295 }
1299 1296
1300 // Check function is compiled. 1297 // Check function is compiled.
1301 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(), 1298 Handle<Object> func_value = Object::GetProperty(i_isolate->global_object(),
1302 foo_name).ToHandleChecked(); 1299 foo_name).ToHandleChecked();
1303 CHECK(func_value->IsJSFunction()); 1300 CHECK(func_value->IsJSFunction());
1304 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1301 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1305 CHECK(function->shared()->is_compiled()); 1302 CHECK(function->shared()->is_compiled());
1306 1303
1307 // The code will survive at least two GCs. 1304 // The code will survive at least two GCs.
1308 i_isolate->heap()->CollectAllGarbage(); 1305 i_isolate->heap()->CollectAllGarbage(
1309 i_isolate->heap()->CollectAllGarbage(); 1306 i::Heap::kFinalizeIncrementalMarkingMask,
1307 i::GarbageCollectionReason::kTesting);
1308 i_isolate->heap()->CollectAllGarbage(
1309 i::Heap::kFinalizeIncrementalMarkingMask,
1310 i::GarbageCollectionReason::kTesting);
1310 CHECK(function->shared()->is_compiled()); 1311 CHECK(function->shared()->is_compiled());
1311 1312
1312 // Simulate several GCs that use full marking. 1313 // Simulate several GCs that use full marking.
1313 const int kAgingThreshold = 6; 1314 const int kAgingThreshold = 6;
1314 for (int i = 0; i < kAgingThreshold; i++) { 1315 for (int i = 0; i < kAgingThreshold; i++) {
1315 i_isolate->heap()->CollectAllGarbage(); 1316 i_isolate->heap()->CollectAllGarbage(
1317 i::Heap::kFinalizeIncrementalMarkingMask,
1318 i::GarbageCollectionReason::kTesting);
1316 } 1319 }
1317 1320
1318 // foo should no longer be in the compilation cache 1321 // foo should no longer be in the compilation cache
1319 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1322 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1320 CHECK(!function->is_compiled() || function->IsOptimized()); 1323 CHECK(!function->is_compiled() || function->IsOptimized());
1321 // Call foo to get it recompiled. 1324 // Call foo to get it recompiled.
1322 CompileRun("foo()"); 1325 CompileRun("foo()");
1323 CHECK(function->shared()->is_compiled()); 1326 CHECK(function->shared()->is_compiled());
1324 CHECK(function->is_compiled()); 1327 CHECK(function->is_compiled());
1325 } 1328 }
(...skipping 25 matching lines...) Expand all
1351 } 1354 }
1352 1355
1353 // Check function is compiled. 1356 // Check function is compiled.
1354 Handle<Object> func_value = 1357 Handle<Object> func_value =
1355 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1358 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1356 CHECK(func_value->IsJSFunction()); 1359 CHECK(func_value->IsJSFunction());
1357 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1360 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1358 CHECK(function->shared()->is_compiled()); 1361 CHECK(function->shared()->is_compiled());
1359 1362
1360 // The code has been run so will survive at least one GC. 1363 // The code has been run so will survive at least one GC.
1361 CcTest::heap()->CollectAllGarbage(); 1364 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1362 CHECK(function->shared()->is_compiled()); 1365 CHECK(function->shared()->is_compiled());
1363 1366
1364 // The code was only run once, so it should be pre-aged and collected on the 1367 // The code was only run once, so it should be pre-aged and collected on the
1365 // next GC. 1368 // next GC.
1366 CcTest::heap()->CollectAllGarbage(); 1369 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1367 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1370 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1368 1371
1369 // Execute the function again twice, and ensure it is reset to the young age. 1372 // Execute the function again twice, and ensure it is reset to the young age.
1370 { v8::HandleScope scope(CcTest::isolate()); 1373 { v8::HandleScope scope(CcTest::isolate());
1371 CompileRun("foo();" 1374 CompileRun("foo();"
1372 "foo();"); 1375 "foo();");
1373 } 1376 }
1374 1377
1375 // The code will survive at least two GC now that it is young again. 1378 // The code will survive at least two GC now that it is young again.
1376 CcTest::heap()->CollectAllGarbage(); 1379 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1377 CcTest::heap()->CollectAllGarbage(); 1380 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1378 CHECK(function->shared()->is_compiled()); 1381 CHECK(function->shared()->is_compiled());
1379 1382
1380 // Simulate several GCs that use full marking. 1383 // Simulate several GCs that use full marking.
1381 const int kAgingThreshold = 6; 1384 const int kAgingThreshold = 6;
1382 for (int i = 0; i < kAgingThreshold; i++) { 1385 for (int i = 0; i < kAgingThreshold; i++) {
1383 CcTest::heap()->CollectAllGarbage(); 1386 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1384 } 1387 }
1385 1388
1386 // foo should no longer be in the compilation cache 1389 // foo should no longer be in the compilation cache
1387 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1390 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1388 CHECK(!function->is_compiled() || function->IsOptimized()); 1391 CHECK(!function->is_compiled() || function->IsOptimized());
1389 // Call foo to get it recompiled. 1392 // Call foo to get it recompiled.
1390 CompileRun("foo()"); 1393 CompileRun("foo()");
1391 CHECK(function->shared()->is_compiled()); 1394 CHECK(function->shared()->is_compiled());
1392 CHECK(function->is_compiled()); 1395 CHECK(function->is_compiled());
1393 } 1396 }
(...skipping 22 matching lines...) Expand all
1416 } 1419 }
1417 1420
1418 // Check function is compiled. 1421 // Check function is compiled.
1419 Handle<Object> func_value = 1422 Handle<Object> func_value =
1420 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1423 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1421 CHECK(func_value->IsJSFunction()); 1424 CHECK(func_value->IsJSFunction());
1422 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1425 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1423 CHECK(function->shared()->is_compiled()); 1426 CHECK(function->shared()->is_compiled());
1424 1427
1425 // The code will survive at least two GCs. 1428 // The code will survive at least two GCs.
1426 CcTest::heap()->CollectAllGarbage(); 1429 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1427 CcTest::heap()->CollectAllGarbage(); 1430 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1428 CHECK(function->shared()->is_compiled()); 1431 CHECK(function->shared()->is_compiled());
1429 1432
1430 // Simulate several GCs that use incremental marking. 1433 // Simulate several GCs that use incremental marking.
1431 const int kAgingThreshold = 6; 1434 const int kAgingThreshold = 6;
1432 for (int i = 0; i < kAgingThreshold; i++) { 1435 for (int i = 0; i < kAgingThreshold; i++) {
1433 heap::SimulateIncrementalMarking(CcTest::heap()); 1436 heap::SimulateIncrementalMarking(CcTest::heap());
1434 CcTest::heap()->CollectAllGarbage(); 1437 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1435 } 1438 }
1436 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1439 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1437 CHECK(!function->is_compiled() || function->IsOptimized()); 1440 CHECK(!function->is_compiled() || function->IsOptimized());
1438 1441
1439 // This compile will compile the function again. 1442 // This compile will compile the function again.
1440 { v8::HandleScope scope(CcTest::isolate()); 1443 { v8::HandleScope scope(CcTest::isolate());
1441 CompileRun("foo();"); 1444 CompileRun("foo();");
1442 } 1445 }
1443 1446
1444 // Simulate several GCs that use incremental marking but make sure 1447 // Simulate several GCs that use incremental marking but make sure
1445 // the loop breaks once the function is enqueued as a candidate. 1448 // the loop breaks once the function is enqueued as a candidate.
1446 for (int i = 0; i < kAgingThreshold; i++) { 1449 for (int i = 0; i < kAgingThreshold; i++) {
1447 heap::SimulateIncrementalMarking(CcTest::heap()); 1450 heap::SimulateIncrementalMarking(CcTest::heap());
1448 if (!function->next_function_link()->IsUndefined(CcTest::i_isolate())) 1451 if (!function->next_function_link()->IsUndefined(CcTest::i_isolate()))
1449 break; 1452 break;
1450 CcTest::heap()->CollectAllGarbage(); 1453 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1451 } 1454 }
1452 1455
1453 // Force optimization while incremental marking is active and while 1456 // Force optimization while incremental marking is active and while
1454 // the function is enqueued as a candidate. 1457 // the function is enqueued as a candidate.
1455 { v8::HandleScope scope(CcTest::isolate()); 1458 { v8::HandleScope scope(CcTest::isolate());
1456 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1459 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1457 } 1460 }
1458 1461
1459 // Simulate one final GC to make sure the candidate queue is sane. 1462 // Simulate one final GC to make sure the candidate queue is sane.
1460 CcTest::heap()->CollectAllGarbage(); 1463 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1461 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); 1464 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1462 CHECK(function->is_compiled() || !function->IsOptimized()); 1465 CHECK(function->is_compiled() || !function->IsOptimized());
1463 } 1466 }
1464 1467
1465 1468
1466 TEST(TestCodeFlushingIncrementalScavenge) { 1469 TEST(TestCodeFlushingIncrementalScavenge) {
1467 // If we do not flush code this test is invalid. 1470 // If we do not flush code this test is invalid.
1468 if (!FLAG_flush_code) return; 1471 if (!FLAG_flush_code) return;
1469 i::FLAG_allow_natives_syntax = true; 1472 i::FLAG_allow_natives_syntax = true;
1470 i::FLAG_optimize_for_size = false; 1473 i::FLAG_optimize_for_size = false;
1471 CcTest::InitializeVM(); 1474 CcTest::InitializeVM();
1472 Isolate* isolate = CcTest::i_isolate(); 1475 Isolate* isolate = CcTest::i_isolate();
1473 Factory* factory = isolate->factory(); 1476 Factory* factory = isolate->factory();
1474 v8::HandleScope scope(CcTest::isolate()); 1477 v8::HandleScope scope(CcTest::isolate());
1475 const char* source = "var foo = function() {" 1478 const char* source = "var foo = function() {"
1476 " var x = 42;" 1479 " var x = 42;"
1477 " var y = 42;" 1480 " var y = 42;"
1478 " var z = x + y;" 1481 " var z = x + y;"
1479 "};" 1482 "};"
1480 "foo();" 1483 "foo();"
1481 "var bar = function() {" 1484 "var bar = function() {"
1482 " var x = 23;" 1485 " var x = 23;"
1483 "};" 1486 "};"
1484 "bar();"; 1487 "bar();";
1485 Handle<String> foo_name = factory->InternalizeUtf8String("foo"); 1488 Handle<String> foo_name = factory->InternalizeUtf8String("foo");
1486 Handle<String> bar_name = factory->InternalizeUtf8String("bar"); 1489 Handle<String> bar_name = factory->InternalizeUtf8String("bar");
1487 1490
1488 // Perfrom one initial GC to enable code flushing. 1491 // Perfrom one initial GC to enable code flushing.
1489 CcTest::heap()->CollectAllGarbage(); 1492 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1490 1493
1491 // This compile will add the code to the compilation cache. 1494 // This compile will add the code to the compilation cache.
1492 { v8::HandleScope scope(CcTest::isolate()); 1495 { v8::HandleScope scope(CcTest::isolate());
1493 CompileRun(source); 1496 CompileRun(source);
1494 } 1497 }
1495 1498
1496 // Check functions are compiled. 1499 // Check functions are compiled.
1497 Handle<Object> func_value = 1500 Handle<Object> func_value =
1498 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1501 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1499 CHECK(func_value->IsJSFunction()); 1502 CHECK(func_value->IsJSFunction());
(...skipping 16 matching lines...) Expand all
1516 for (int i = 0; i < kAgingThreshold; i++) { 1519 for (int i = 0; i < kAgingThreshold; i++) {
1517 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1520 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1518 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1521 function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1519 } 1522 }
1520 1523
1521 // Simulate incremental marking so that the functions are enqueued as 1524 // Simulate incremental marking so that the functions are enqueued as
1522 // code flushing candidates. Then kill one of the functions. Finally 1525 // code flushing candidates. Then kill one of the functions. Finally
1523 // perform a scavenge while incremental marking is still running. 1526 // perform a scavenge while incremental marking is still running.
1524 heap::SimulateIncrementalMarking(CcTest::heap(), false); 1527 heap::SimulateIncrementalMarking(CcTest::heap(), false);
1525 *function2.location() = NULL; 1528 *function2.location() = NULL;
1526 CcTest::heap()->CollectGarbage(NEW_SPACE, "test scavenge while marking"); 1529 CcTest::CollectGarbage(NEW_SPACE);
1527 1530
1528 // Simulate one final GC to make sure the candidate queue is sane. 1531 // Simulate one final GC to make sure the candidate queue is sane.
1529 CcTest::heap()->CollectAllGarbage(); 1532 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1530 CHECK(!function->shared()->is_compiled() || function->IsOptimized()); 1533 CHECK(!function->shared()->is_compiled() || function->IsOptimized());
1531 CHECK(!function->is_compiled() || function->IsOptimized()); 1534 CHECK(!function->is_compiled() || function->IsOptimized());
1532 } 1535 }
1533 1536
1534 1537
1535 TEST(TestCodeFlushingIncrementalAbort) { 1538 TEST(TestCodeFlushingIncrementalAbort) {
1536 // If we do not flush code this test is invalid. 1539 // If we do not flush code this test is invalid.
1537 if (!FLAG_flush_code) return; 1540 if (!FLAG_flush_code) return;
1538 i::FLAG_allow_natives_syntax = true; 1541 i::FLAG_allow_natives_syntax = true;
1539 i::FLAG_optimize_for_size = false; 1542 i::FLAG_optimize_for_size = false;
(...skipping 16 matching lines...) Expand all
1556 } 1559 }
1557 1560
1558 // Check function is compiled. 1561 // Check function is compiled.
1559 Handle<Object> func_value = 1562 Handle<Object> func_value =
1560 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked(); 1563 Object::GetProperty(isolate->global_object(), foo_name).ToHandleChecked();
1561 CHECK(func_value->IsJSFunction()); 1564 CHECK(func_value->IsJSFunction());
1562 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value); 1565 Handle<JSFunction> function = Handle<JSFunction>::cast(func_value);
1563 CHECK(function->shared()->is_compiled()); 1566 CHECK(function->shared()->is_compiled());
1564 1567
1565 // The code will survive at least two GCs. 1568 // The code will survive at least two GCs.
1566 heap->CollectAllGarbage(); 1569 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1567 heap->CollectAllGarbage(); 1570 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1568 CHECK(function->shared()->is_compiled()); 1571 CHECK(function->shared()->is_compiled());
1569 1572
1570 // Bump the code age so that flushing is triggered. 1573 // Bump the code age so that flushing is triggered.
1571 const int kAgingThreshold = 6; 1574 const int kAgingThreshold = 6;
1572 for (int i = 0; i < kAgingThreshold; i++) { 1575 for (int i = 0; i < kAgingThreshold; i++) {
1573 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 1576 function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
1574 } 1577 }
1575 1578
1576 // Simulate incremental marking so that the function is enqueued as 1579 // Simulate incremental marking so that the function is enqueued as
1577 // code flushing candidate. 1580 // code flushing candidate.
1578 heap::SimulateIncrementalMarking(heap); 1581 heap::SimulateIncrementalMarking(heap);
1579 1582
1580 // Enable the debugger and add a breakpoint while incremental marking 1583 // Enable the debugger and add a breakpoint while incremental marking
1581 // is running so that incremental marking aborts and code flushing is 1584 // is running so that incremental marking aborts and code flushing is
1582 // disabled. 1585 // disabled.
1583 int position = function->shared()->start_position(); 1586 int position = function->shared()->start_position();
1584 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate); 1587 Handle<Object> breakpoint_object(Smi::FromInt(0), isolate);
1585 EnableDebugger(CcTest::isolate()); 1588 EnableDebugger(CcTest::isolate());
1586 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); 1589 isolate->debug()->SetBreakPoint(function, breakpoint_object, &position);
1587 isolate->debug()->ClearBreakPoint(breakpoint_object); 1590 isolate->debug()->ClearBreakPoint(breakpoint_object);
1588 DisableDebugger(CcTest::isolate()); 1591 DisableDebugger(CcTest::isolate());
1589 1592
1590 // Force optimization now that code flushing is disabled. 1593 // Force optimization now that code flushing is disabled.
1591 { v8::HandleScope scope(CcTest::isolate()); 1594 { v8::HandleScope scope(CcTest::isolate());
1592 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();"); 1595 CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
1593 } 1596 }
1594 1597
1595 // Simulate one final GC to make sure the candidate queue is sane. 1598 // Simulate one final GC to make sure the candidate queue is sane.
1596 heap->CollectAllGarbage(); 1599 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1597 CHECK(function->shared()->is_compiled() || !function->IsOptimized()); 1600 CHECK(function->shared()->is_compiled() || !function->IsOptimized());
1598 CHECK(function->is_compiled() || !function->IsOptimized()); 1601 CHECK(function->is_compiled() || !function->IsOptimized());
1599 } 1602 }
1600 1603
1601 TEST(TestUseOfIncrementalBarrierOnCompileLazy) { 1604 TEST(TestUseOfIncrementalBarrierOnCompileLazy) {
1602 // Turn off always_opt because it interferes with running the built-in for 1605 // Turn off always_opt because it interferes with running the built-in for
1603 // the last call to g(). 1606 // the last call to g().
1604 i::FLAG_always_opt = false; 1607 i::FLAG_always_opt = false;
1605 i::FLAG_allow_natives_syntax = true; 1608 i::FLAG_allow_natives_syntax = true;
1606 CcTest::InitializeVM(); 1609 CcTest::InitializeVM();
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after
1642 1645
1643 TEST(CompilationCacheCachingBehavior) { 1646 TEST(CompilationCacheCachingBehavior) {
1644 // If we do not flush code, or have the compilation cache turned off, this 1647 // If we do not flush code, or have the compilation cache turned off, this
1645 // test is invalid. 1648 // test is invalid.
1646 if (!FLAG_flush_code || !FLAG_compilation_cache) { 1649 if (!FLAG_flush_code || !FLAG_compilation_cache) {
1647 return; 1650 return;
1648 } 1651 }
1649 CcTest::InitializeVM(); 1652 CcTest::InitializeVM();
1650 Isolate* isolate = CcTest::i_isolate(); 1653 Isolate* isolate = CcTest::i_isolate();
1651 Factory* factory = isolate->factory(); 1654 Factory* factory = isolate->factory();
1652 Heap* heap = isolate->heap();
1653 CompilationCache* compilation_cache = isolate->compilation_cache(); 1655 CompilationCache* compilation_cache = isolate->compilation_cache();
1654 LanguageMode language_mode = construct_language_mode(FLAG_use_strict); 1656 LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1655 1657
1656 v8::HandleScope scope(CcTest::isolate()); 1658 v8::HandleScope scope(CcTest::isolate());
1657 const char* raw_source = 1659 const char* raw_source =
1658 "function foo() {" 1660 "function foo() {"
1659 " var x = 42;" 1661 " var x = 42;"
1660 " var y = 42;" 1662 " var y = 42;"
1661 " var z = x + y;" 1663 " var z = x + y;"
1662 "};" 1664 "};"
(...skipping 10 matching lines...) Expand all
1673 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript( 1675 MaybeHandle<SharedFunctionInfo> info = compilation_cache->LookupScript(
1674 source, Handle<Object>(), 0, 0, 1676 source, Handle<Object>(), 0, 0,
1675 v8::ScriptOriginOptions(false, true, false), native_context, 1677 v8::ScriptOriginOptions(false, true, false), native_context,
1676 language_mode); 1678 language_mode);
1677 CHECK(!info.is_null()); 1679 CHECK(!info.is_null());
1678 1680
1679 // Check that the code cache entry survives at least on GC. 1681 // Check that the code cache entry survives at least on GC.
1680 // (Unless --optimize-for-size, in which case it might get collected 1682 // (Unless --optimize-for-size, in which case it might get collected
1681 // immediately.) 1683 // immediately.)
1682 if (!FLAG_optimize_for_size) { 1684 if (!FLAG_optimize_for_size) {
1683 heap->CollectAllGarbage(); 1685 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1684 info = compilation_cache->LookupScript( 1686 info = compilation_cache->LookupScript(
1685 source, Handle<Object>(), 0, 0, 1687 source, Handle<Object>(), 0, 0,
1686 v8::ScriptOriginOptions(false, true, false), native_context, 1688 v8::ScriptOriginOptions(false, true, false), native_context,
1687 language_mode); 1689 language_mode);
1688 CHECK(!info.is_null()); 1690 CHECK(!info.is_null());
1689 } 1691 }
1690 1692
1691 // Progress code age until it's old and ready for GC. 1693 // Progress code age until it's old and ready for GC.
1692 while (!info.ToHandleChecked()->code()->IsOld()) { 1694 while (!info.ToHandleChecked()->code()->IsOld()) {
1693 // To guarantee progress, we have to MakeOlder with different parities. 1695 // To guarantee progress, we have to MakeOlder with different parities.
1694 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is 1696 // We can't just use NO_MARKING_PARITY, since e.g. kExecutedOnceCodeAge is
1695 // always NO_MARKING_PARITY and the code age only progresses if the parity 1697 // always NO_MARKING_PARITY and the code age only progresses if the parity
1696 // is different. 1698 // is different.
1697 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY); 1699 info.ToHandleChecked()->code()->MakeOlder(ODD_MARKING_PARITY);
1698 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY); 1700 info.ToHandleChecked()->code()->MakeOlder(EVEN_MARKING_PARITY);
1699 } 1701 }
1700 1702
1701 heap->CollectAllGarbage(); 1703 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1702 // Ensure code aging cleared the entry from the cache. 1704 // Ensure code aging cleared the entry from the cache.
1703 info = compilation_cache->LookupScript( 1705 info = compilation_cache->LookupScript(
1704 source, Handle<Object>(), 0, 0, 1706 source, Handle<Object>(), 0, 0,
1705 v8::ScriptOriginOptions(false, true, false), native_context, 1707 v8::ScriptOriginOptions(false, true, false), native_context,
1706 language_mode); 1708 language_mode);
1707 CHECK(info.is_null()); 1709 CHECK(info.is_null());
1708 } 1710 }
1709 1711
1710 1712
1711 static void OptimizeEmptyFunction(const char* name) { 1713 static void OptimizeEmptyFunction(const char* name) {
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
1754 v8::V8::Initialize(); 1756 v8::V8::Initialize();
1755 1757
1756 // Some flags turn Scavenge collections into Mark-sweep collections 1758 // Some flags turn Scavenge collections into Mark-sweep collections
1757 // and hence are incompatible with this test case. 1759 // and hence are incompatible with this test case.
1758 if (FLAG_gc_global || FLAG_stress_compaction) return; 1760 if (FLAG_gc_global || FLAG_stress_compaction) return;
1759 FLAG_retain_maps_for_n_gc = 0; 1761 FLAG_retain_maps_for_n_gc = 0;
1760 1762
1761 static const int kNumTestContexts = 10; 1763 static const int kNumTestContexts = 10;
1762 1764
1763 Isolate* isolate = CcTest::i_isolate(); 1765 Isolate* isolate = CcTest::i_isolate();
1764 Heap* heap = isolate->heap();
1765 HandleScope scope(isolate); 1766 HandleScope scope(isolate);
1766 v8::Local<v8::Context> ctx[kNumTestContexts]; 1767 v8::Local<v8::Context> ctx[kNumTestContexts];
1767 if (!isolate->use_crankshaft()) return; 1768 if (!isolate->use_crankshaft()) return;
1768 1769
1769 CHECK_EQ(0, CountNativeContexts()); 1770 CHECK_EQ(0, CountNativeContexts());
1770 1771
1771 // Create a number of global contests which gets linked together. 1772 // Create a number of global contests which gets linked together.
1772 for (int i = 0; i < kNumTestContexts; i++) { 1773 for (int i = 0; i < kNumTestContexts; i++) {
1773 ctx[i] = v8::Context::New(CcTest::isolate()); 1774 ctx[i] = v8::Context::New(CcTest::isolate());
1774 1775
1775 // Collect garbage that might have been created by one of the 1776 // Collect garbage that might have been created by one of the
1776 // installed extensions. 1777 // installed extensions.
1777 isolate->compilation_cache()->Clear(); 1778 isolate->compilation_cache()->Clear();
1778 heap->CollectAllGarbage(); 1779 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1779 1780
1780 CHECK_EQ(i + 1, CountNativeContexts()); 1781 CHECK_EQ(i + 1, CountNativeContexts());
1781 1782
1782 ctx[i]->Enter(); 1783 ctx[i]->Enter();
1783 1784
1784 // Create a handle scope so no function objects get stuck in the outer 1785 // Create a handle scope so no function objects get stuck in the outer
1785 // handle scope. 1786 // handle scope.
1786 HandleScope scope(isolate); 1787 HandleScope scope(isolate);
1787 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i])); 1788 CHECK_EQ(0, CountOptimizedUserFunctions(ctx[i]));
1788 OptimizeEmptyFunction("f1"); 1789 OptimizeEmptyFunction("f1");
1789 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i])); 1790 CHECK_EQ(1, CountOptimizedUserFunctions(ctx[i]));
1790 OptimizeEmptyFunction("f2"); 1791 OptimizeEmptyFunction("f2");
1791 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i])); 1792 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1792 OptimizeEmptyFunction("f3"); 1793 OptimizeEmptyFunction("f3");
1793 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1794 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1794 OptimizeEmptyFunction("f4"); 1795 OptimizeEmptyFunction("f4");
1795 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1796 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1796 OptimizeEmptyFunction("f5"); 1797 OptimizeEmptyFunction("f5");
1797 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1798 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1798 1799
1799 // Remove function f1, and 1800 // Remove function f1, and
1800 CompileRun("f1=null"); 1801 CompileRun("f1=null");
1801 1802
1802 // Scavenge treats these references as strong. 1803 // Scavenge treats these references as strong.
1803 for (int j = 0; j < 10; j++) { 1804 for (int j = 0; j < 10; j++) {
1804 CcTest::heap()->CollectGarbage(NEW_SPACE); 1805 CcTest::CollectGarbage(NEW_SPACE);
1805 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i])); 1806 CHECK_EQ(5, CountOptimizedUserFunctions(ctx[i]));
1806 } 1807 }
1807 1808
1808 // Mark compact handles the weak references. 1809 // Mark compact handles the weak references.
1809 isolate->compilation_cache()->Clear(); 1810 isolate->compilation_cache()->Clear();
1810 heap->CollectAllGarbage(); 1811 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1811 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1812 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1812 1813
1813 // Get rid of f3 and f5 in the same way. 1814 // Get rid of f3 and f5 in the same way.
1814 CompileRun("f3=null"); 1815 CompileRun("f3=null");
1815 for (int j = 0; j < 10; j++) { 1816 for (int j = 0; j < 10; j++) {
1816 CcTest::heap()->CollectGarbage(NEW_SPACE); 1817 CcTest::CollectGarbage(NEW_SPACE);
1817 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i])); 1818 CHECK_EQ(4, CountOptimizedUserFunctions(ctx[i]));
1818 } 1819 }
1819 CcTest::heap()->CollectAllGarbage(); 1820 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1820 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1821 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1821 CompileRun("f5=null"); 1822 CompileRun("f5=null");
1822 for (int j = 0; j < 10; j++) { 1823 for (int j = 0; j < 10; j++) {
1823 CcTest::heap()->CollectGarbage(NEW_SPACE); 1824 CcTest::CollectGarbage(NEW_SPACE);
1824 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i])); 1825 CHECK_EQ(3, CountOptimizedUserFunctions(ctx[i]));
1825 } 1826 }
1826 CcTest::heap()->CollectAllGarbage(); 1827 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1827 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i])); 1828 CHECK_EQ(2, CountOptimizedUserFunctions(ctx[i]));
1828 1829
1829 ctx[i]->Exit(); 1830 ctx[i]->Exit();
1830 } 1831 }
1831 1832
1832 // Force compilation cache cleanup. 1833 // Force compilation cache cleanup.
1833 CcTest::heap()->NotifyContextDisposed(true); 1834 CcTest::heap()->NotifyContextDisposed(true);
1834 CcTest::heap()->CollectAllGarbage(); 1835 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1835 1836
1836 // Dispose the native contexts one by one. 1837 // Dispose the native contexts one by one.
1837 for (int i = 0; i < kNumTestContexts; i++) { 1838 for (int i = 0; i < kNumTestContexts; i++) {
1838 // TODO(dcarney): is there a better way to do this? 1839 // TODO(dcarney): is there a better way to do this?
1839 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]); 1840 i::Object** unsafe = reinterpret_cast<i::Object**>(*ctx[i]);
1840 *unsafe = CcTest::heap()->undefined_value(); 1841 *unsafe = CcTest::heap()->undefined_value();
1841 ctx[i].Clear(); 1842 ctx[i].Clear();
1842 1843
1843 // Scavenge treats these references as strong. 1844 // Scavenge treats these references as strong.
1844 for (int j = 0; j < 10; j++) { 1845 for (int j = 0; j < 10; j++) {
1845 CcTest::heap()->CollectGarbage(i::NEW_SPACE); 1846 CcTest::CollectGarbage(i::NEW_SPACE);
1846 CHECK_EQ(kNumTestContexts - i, CountNativeContexts()); 1847 CHECK_EQ(kNumTestContexts - i, CountNativeContexts());
1847 } 1848 }
1848 1849
1849 // Mark compact handles the weak references. 1850 // Mark compact handles the weak references.
1850 CcTest::heap()->CollectAllGarbage(); 1851 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1851 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts()); 1852 CHECK_EQ(kNumTestContexts - i - 1, CountNativeContexts());
1852 } 1853 }
1853 1854
1854 CHECK_EQ(0, CountNativeContexts()); 1855 CHECK_EQ(0, CountNativeContexts());
1855 } 1856 }
1856 1857
1857 1858
1858 // Count the number of native contexts in the weak list of native contexts 1859 // Count the number of native contexts in the weak list of native contexts
1859 // causing a GC after the specified number of elements. 1860 // causing a GC after the specified number of elements.
1860 static int CountNativeContextsWithGC(Isolate* isolate, int n) { 1861 static int CountNativeContextsWithGC(Isolate* isolate, int n) {
1861 Heap* heap = isolate->heap(); 1862 Heap* heap = isolate->heap();
1862 int count = 0; 1863 int count = 0;
1863 Handle<Object> object(heap->native_contexts_list(), isolate); 1864 Handle<Object> object(heap->native_contexts_list(), isolate);
1864 while (!object->IsUndefined(isolate)) { 1865 while (!object->IsUndefined(isolate)) {
1865 count++; 1866 count++;
1866 if (count == n) heap->CollectAllGarbage(); 1867 if (count == n)
1868 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1867 object = 1869 object =
1868 Handle<Object>(Context::cast(*object)->next_context_link(), isolate); 1870 Handle<Object>(Context::cast(*object)->next_context_link(), isolate);
1869 } 1871 }
1870 return count; 1872 return count;
1871 } 1873 }
1872 1874
1873 1875
1874 // Count the number of user functions in the weak list of optimized 1876 // Count the number of user functions in the weak list of optimized
1875 // functions attached to a native context causing a GC after the 1877 // functions attached to a native context causing a GC after the
1876 // specified number of elements. 1878 // specified number of elements.
1877 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context, 1879 static int CountOptimizedUserFunctionsWithGC(v8::Local<v8::Context> context,
1878 int n) { 1880 int n) {
1879 int count = 0; 1881 int count = 0;
1880 Handle<Context> icontext = v8::Utils::OpenHandle(*context); 1882 Handle<Context> icontext = v8::Utils::OpenHandle(*context);
1881 Isolate* isolate = icontext->GetIsolate(); 1883 Isolate* isolate = icontext->GetIsolate();
1882 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST), 1884 Handle<Object> object(icontext->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1883 isolate); 1885 isolate);
1884 while (object->IsJSFunction() && 1886 while (object->IsJSFunction() &&
1885 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) { 1887 !Handle<JSFunction>::cast(object)->shared()->IsBuiltin()) {
1886 count++; 1888 count++;
1887 if (count == n) isolate->heap()->CollectAllGarbage(); 1889 if (count == n)
1890 isolate->heap()->CollectAllGarbage(
1891 i::Heap::kFinalizeIncrementalMarkingMask,
1892 i::GarbageCollectionReason::kTesting);
1888 object = Handle<Object>( 1893 object = Handle<Object>(
1889 Object::cast(JSFunction::cast(*object)->next_function_link()), 1894 Object::cast(JSFunction::cast(*object)->next_function_link()),
1890 isolate); 1895 isolate);
1891 } 1896 }
1892 return count; 1897 return count;
1893 } 1898 }
1894 1899
1895 1900
1896 TEST(TestInternalWeakListsTraverseWithGC) { 1901 TEST(TestInternalWeakListsTraverseWithGC) {
1897 FLAG_always_opt = false; 1902 FLAG_always_opt = false;
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after
1960 "var half_size_reg_exp;" 1965 "var half_size_reg_exp;"
1961 "while (reg_exp_source.length < 20 * 1024) {" 1966 "while (reg_exp_source.length < 20 * 1024) {"
1962 " half_size_reg_exp = reg_exp_source;" 1967 " half_size_reg_exp = reg_exp_source;"
1963 " reg_exp_source = reg_exp_source + reg_exp_source;" 1968 " reg_exp_source = reg_exp_source + reg_exp_source;"
1964 "}" 1969 "}"
1965 // Flatten string. 1970 // Flatten string.
1966 "reg_exp_source.match(/f/);"); 1971 "reg_exp_source.match(/f/);");
1967 1972
1968 // Get initial heap size after several full GCs, which will stabilize 1973 // Get initial heap size after several full GCs, which will stabilize
1969 // the heap size and return with sweeping finished completely. 1974 // the heap size and return with sweeping finished completely.
1970 CcTest::heap()->CollectAllAvailableGarbage("initial cleanup"); 1975 CcTest::CollectAllAvailableGarbage();
1971 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector(); 1976 MarkCompactCollector* collector = CcTest::heap()->mark_compact_collector();
1972 if (collector->sweeping_in_progress()) { 1977 if (collector->sweeping_in_progress()) {
1973 collector->EnsureSweepingCompleted(); 1978 collector->EnsureSweepingCompleted();
1974 } 1979 }
1975 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects()); 1980 int initial_size = static_cast<int>(CcTest::heap()->SizeOfObjects());
1976 1981
1977 CompileRun("'foo'.match(reg_exp_source);"); 1982 CompileRun("'foo'.match(reg_exp_source);");
1978 CcTest::heap()->CollectAllGarbage(); 1983 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1979 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects()); 1984 int size_with_regexp = static_cast<int>(CcTest::heap()->SizeOfObjects());
1980 1985
1981 CompileRun("'foo'.match(half_size_reg_exp);"); 1986 CompileRun("'foo'.match(half_size_reg_exp);");
1982 CcTest::heap()->CollectAllGarbage(); 1987 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
1983 int size_with_optimized_regexp = 1988 int size_with_optimized_regexp =
1984 static_cast<int>(CcTest::heap()->SizeOfObjects()); 1989 static_cast<int>(CcTest::heap()->SizeOfObjects());
1985 1990
1986 int size_of_regexp_code = size_with_regexp - initial_size; 1991 int size_of_regexp_code = size_with_regexp - initial_size;
1987 1992
1988 // On some platforms the debug-code flag causes huge amounts of regexp code 1993 // On some platforms the debug-code flag causes huge amounts of regexp code
1989 // to be emitted, breaking this test. 1994 // to be emitted, breaking this test.
1990 if (!FLAG_debug_code) { 1995 if (!FLAG_debug_code) {
1991 CHECK_LE(size_of_regexp_code, 1 * MB); 1996 CHECK_LE(size_of_regexp_code, 1 * MB);
1992 } 1997 }
1993 1998
1994 // Small regexp is half the size, but compiles to more than twice the code 1999 // Small regexp is half the size, but compiles to more than twice the code
1995 // due to the optimization steps. 2000 // due to the optimization steps.
1996 CHECK_GE(size_with_optimized_regexp, 2001 CHECK_GE(size_with_optimized_regexp,
1997 size_with_regexp + size_of_regexp_code * 2); 2002 size_with_regexp + size_of_regexp_code * 2);
1998 } 2003 }
1999 2004
2000 2005
2001 HEAP_TEST(TestSizeOfObjects) { 2006 HEAP_TEST(TestSizeOfObjects) {
2002 v8::V8::Initialize(); 2007 v8::V8::Initialize();
2003 Heap* heap = CcTest::heap(); 2008 Heap* heap = CcTest::heap();
2004 MarkCompactCollector* collector = heap->mark_compact_collector(); 2009 MarkCompactCollector* collector = heap->mark_compact_collector();
2005 2010
2006 // Get initial heap size after several full GCs, which will stabilize 2011 // Get initial heap size after several full GCs, which will stabilize
2007 // the heap size and return with sweeping finished completely. 2012 // the heap size and return with sweeping finished completely.
2008 heap->CollectAllAvailableGarbage("initial cleanup"); 2013 CcTest::CollectAllAvailableGarbage();
2009 if (collector->sweeping_in_progress()) { 2014 if (collector->sweeping_in_progress()) {
2010 collector->EnsureSweepingCompleted(); 2015 collector->EnsureSweepingCompleted();
2011 } 2016 }
2012 int initial_size = static_cast<int>(heap->SizeOfObjects()); 2017 int initial_size = static_cast<int>(heap->SizeOfObjects());
2013 2018
2014 { 2019 {
2015 // Allocate objects on several different old-space pages so that 2020 // Allocate objects on several different old-space pages so that
2016 // concurrent sweeper threads will be busy sweeping the old space on 2021 // concurrent sweeper threads will be busy sweeping the old space on
2017 // subsequent GC runs. 2022 // subsequent GC runs.
2018 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); 2023 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
2019 int filler_size = static_cast<int>(FixedArray::SizeFor(8192)); 2024 int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
2020 for (int i = 1; i <= 100; i++) { 2025 for (int i = 1; i <= 100; i++) {
2021 heap->AllocateFixedArray(8192, TENURED).ToObjectChecked(); 2026 heap->AllocateFixedArray(8192, TENURED).ToObjectChecked();
2022 CHECK_EQ(initial_size + i * filler_size, 2027 CHECK_EQ(initial_size + i * filler_size,
2023 static_cast<int>(heap->SizeOfObjects())); 2028 static_cast<int>(heap->SizeOfObjects()));
2024 } 2029 }
2025 } 2030 }
2026 2031
2027 // The heap size should go back to initial size after a full GC, even 2032 // The heap size should go back to initial size after a full GC, even
2028 // though sweeping didn't finish yet. 2033 // though sweeping didn't finish yet.
2029 heap->CollectAllGarbage(); 2034 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2030 // Normally sweeping would not be complete here, but no guarantees. 2035 // Normally sweeping would not be complete here, but no guarantees.
2031 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); 2036 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
2032 // Waiting for sweeper threads should not change heap size. 2037 // Waiting for sweeper threads should not change heap size.
2033 if (collector->sweeping_in_progress()) { 2038 if (collector->sweeping_in_progress()) {
2034 collector->EnsureSweepingCompleted(); 2039 collector->EnsureSweepingCompleted();
2035 } 2040 }
2036 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects())); 2041 CHECK_EQ(initial_size, static_cast<int>(heap->SizeOfObjects()));
2037 } 2042 }
2038 2043
2039 2044
(...skipping 349 matching lines...) Expand 10 before | Expand all | Expand 10 after
2389 new_capacity = new_space->TotalCapacity(); 2394 new_capacity = new_space->TotalCapacity();
2390 CHECK(old_capacity == new_capacity); 2395 CHECK(old_capacity == new_capacity);
2391 2396
2392 // Explicitly shrinking should not affect space capacity. 2397 // Explicitly shrinking should not affect space capacity.
2393 old_capacity = new_space->TotalCapacity(); 2398 old_capacity = new_space->TotalCapacity();
2394 new_space->Shrink(); 2399 new_space->Shrink();
2395 new_capacity = new_space->TotalCapacity(); 2400 new_capacity = new_space->TotalCapacity();
2396 CHECK(old_capacity == new_capacity); 2401 CHECK(old_capacity == new_capacity);
2397 2402
2398 // Let the scavenger empty the new space. 2403 // Let the scavenger empty the new space.
2399 heap->CollectGarbage(NEW_SPACE); 2404 CcTest::CollectGarbage(NEW_SPACE);
2400 CHECK_LE(new_space->Size(), old_capacity); 2405 CHECK_LE(new_space->Size(), old_capacity);
2401 2406
2402 // Explicitly shrinking should halve the space capacity. 2407 // Explicitly shrinking should halve the space capacity.
2403 old_capacity = new_space->TotalCapacity(); 2408 old_capacity = new_space->TotalCapacity();
2404 new_space->Shrink(); 2409 new_space->Shrink();
2405 new_capacity = new_space->TotalCapacity(); 2410 new_capacity = new_space->TotalCapacity();
2406 CHECK(old_capacity == 2 * new_capacity); 2411 CHECK(old_capacity == 2 * new_capacity);
2407 2412
2408 // Consecutive shrinking should not affect space capacity. 2413 // Consecutive shrinking should not affect space capacity.
2409 old_capacity = new_space->TotalCapacity(); 2414 old_capacity = new_space->TotalCapacity();
(...skipping 13 matching lines...) Expand all
2423 } 2428 }
2424 2429
2425 v8::HandleScope scope(CcTest::isolate()); 2430 v8::HandleScope scope(CcTest::isolate());
2426 NewSpace* new_space = heap->new_space(); 2431 NewSpace* new_space = heap->new_space();
2427 intptr_t old_capacity, new_capacity; 2432 intptr_t old_capacity, new_capacity;
2428 old_capacity = new_space->TotalCapacity(); 2433 old_capacity = new_space->TotalCapacity();
2429 new_space->Grow(); 2434 new_space->Grow();
2430 new_capacity = new_space->TotalCapacity(); 2435 new_capacity = new_space->TotalCapacity();
2431 CHECK(2 * old_capacity == new_capacity); 2436 CHECK(2 * old_capacity == new_capacity);
2432 FillUpNewSpace(new_space); 2437 FillUpNewSpace(new_space);
2433 heap->CollectAllAvailableGarbage(); 2438 CcTest::CollectAllAvailableGarbage();
2434 new_capacity = new_space->TotalCapacity(); 2439 new_capacity = new_space->TotalCapacity();
2435 CHECK(old_capacity == new_capacity); 2440 CHECK(old_capacity == new_capacity);
2436 } 2441 }
2437 2442
2438 2443
2439 static int NumberOfGlobalObjects() { 2444 static int NumberOfGlobalObjects() {
2440 int count = 0; 2445 int count = 0;
2441 HeapIterator iterator(CcTest::heap()); 2446 HeapIterator iterator(CcTest::heap());
2442 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 2447 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
2443 if (obj->IsJSGlobalObject()) count++; 2448 if (obj->IsJSGlobalObject()) count++;
(...skipping 10 matching lines...) Expand all
2454 v8::HandleScope outer_scope(isolate); 2459 v8::HandleScope outer_scope(isolate);
2455 v8::Persistent<v8::Context> ctx1p; 2460 v8::Persistent<v8::Context> ctx1p;
2456 v8::Persistent<v8::Context> ctx2p; 2461 v8::Persistent<v8::Context> ctx2p;
2457 { 2462 {
2458 v8::HandleScope scope(isolate); 2463 v8::HandleScope scope(isolate);
2459 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2464 ctx1p.Reset(isolate, v8::Context::New(isolate));
2460 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2465 ctx2p.Reset(isolate, v8::Context::New(isolate));
2461 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2466 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2462 } 2467 }
2463 2468
2464 CcTest::heap()->CollectAllAvailableGarbage(); 2469 CcTest::CollectAllAvailableGarbage();
2465 CHECK_EQ(2, NumberOfGlobalObjects()); 2470 CHECK_EQ(2, NumberOfGlobalObjects());
2466 2471
2467 { 2472 {
2468 v8::HandleScope inner_scope(isolate); 2473 v8::HandleScope inner_scope(isolate);
2469 CompileRun("var v = {x: 42}"); 2474 CompileRun("var v = {x: 42}");
2470 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2475 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2471 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2476 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2472 v8::Local<v8::Value> v = 2477 v8::Local<v8::Value> v =
2473 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2478 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2474 ctx2->Enter(); 2479 ctx2->Enter();
2475 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2480 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2476 v8::Local<v8::Value> res = CompileRun( 2481 v8::Local<v8::Value> res = CompileRun(
2477 "function f() { return o.x; }" 2482 "function f() { return o.x; }"
2478 "for (var i = 0; i < 10; ++i) f();" 2483 "for (var i = 0; i < 10; ++i) f();"
2479 "%OptimizeFunctionOnNextCall(f);" 2484 "%OptimizeFunctionOnNextCall(f);"
2480 "f();"); 2485 "f();");
2481 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2486 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2482 CHECK(ctx2->Global() 2487 CHECK(ctx2->Global()
2483 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2488 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2484 .FromJust()); 2489 .FromJust());
2485 ctx2->Exit(); 2490 ctx2->Exit();
2486 v8::Local<v8::Context>::New(isolate, ctx1)->Exit(); 2491 v8::Local<v8::Context>::New(isolate, ctx1)->Exit();
2487 ctx1p.Reset(); 2492 ctx1p.Reset();
2488 isolate->ContextDisposedNotification(); 2493 isolate->ContextDisposedNotification();
2489 } 2494 }
2490 CcTest::heap()->CollectAllAvailableGarbage(); 2495 CcTest::CollectAllAvailableGarbage();
2491 CHECK_EQ(1, NumberOfGlobalObjects()); 2496 CHECK_EQ(1, NumberOfGlobalObjects());
2492 ctx2p.Reset(); 2497 ctx2p.Reset();
2493 CcTest::heap()->CollectAllAvailableGarbage(); 2498 CcTest::CollectAllAvailableGarbage();
2494 CHECK_EQ(0, NumberOfGlobalObjects()); 2499 CHECK_EQ(0, NumberOfGlobalObjects());
2495 } 2500 }
2496 2501
2497 2502
2498 // Test that we don't embed functions from foreign contexts into 2503 // Test that we don't embed functions from foreign contexts into
2499 // optimized code. 2504 // optimized code.
2500 TEST(LeakNativeContextViaFunction) { 2505 TEST(LeakNativeContextViaFunction) {
2501 i::FLAG_allow_natives_syntax = true; 2506 i::FLAG_allow_natives_syntax = true;
2502 v8::Isolate* isolate = CcTest::isolate(); 2507 v8::Isolate* isolate = CcTest::isolate();
2503 v8::HandleScope outer_scope(isolate); 2508 v8::HandleScope outer_scope(isolate);
2504 v8::Persistent<v8::Context> ctx1p; 2509 v8::Persistent<v8::Context> ctx1p;
2505 v8::Persistent<v8::Context> ctx2p; 2510 v8::Persistent<v8::Context> ctx2p;
2506 { 2511 {
2507 v8::HandleScope scope(isolate); 2512 v8::HandleScope scope(isolate);
2508 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2513 ctx1p.Reset(isolate, v8::Context::New(isolate));
2509 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2514 ctx2p.Reset(isolate, v8::Context::New(isolate));
2510 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2515 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2511 } 2516 }
2512 2517
2513 CcTest::heap()->CollectAllAvailableGarbage(); 2518 CcTest::CollectAllAvailableGarbage();
2514 CHECK_EQ(2, NumberOfGlobalObjects()); 2519 CHECK_EQ(2, NumberOfGlobalObjects());
2515 2520
2516 { 2521 {
2517 v8::HandleScope inner_scope(isolate); 2522 v8::HandleScope inner_scope(isolate);
2518 CompileRun("var v = function() { return 42; }"); 2523 CompileRun("var v = function() { return 42; }");
2519 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2524 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2520 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2525 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2521 v8::Local<v8::Value> v = 2526 v8::Local<v8::Value> v =
2522 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2527 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2523 ctx2->Enter(); 2528 ctx2->Enter();
2524 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2529 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2525 v8::Local<v8::Value> res = CompileRun( 2530 v8::Local<v8::Value> res = CompileRun(
2526 "function f(x) { return x(); }" 2531 "function f(x) { return x(); }"
2527 "for (var i = 0; i < 10; ++i) f(o);" 2532 "for (var i = 0; i < 10; ++i) f(o);"
2528 "%OptimizeFunctionOnNextCall(f);" 2533 "%OptimizeFunctionOnNextCall(f);"
2529 "f(o);"); 2534 "f(o);");
2530 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2535 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2531 CHECK(ctx2->Global() 2536 CHECK(ctx2->Global()
2532 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2537 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2533 .FromJust()); 2538 .FromJust());
2534 ctx2->Exit(); 2539 ctx2->Exit();
2535 ctx1->Exit(); 2540 ctx1->Exit();
2536 ctx1p.Reset(); 2541 ctx1p.Reset();
2537 isolate->ContextDisposedNotification(); 2542 isolate->ContextDisposedNotification();
2538 } 2543 }
2539 CcTest::heap()->CollectAllAvailableGarbage(); 2544 CcTest::CollectAllAvailableGarbage();
2540 CHECK_EQ(1, NumberOfGlobalObjects()); 2545 CHECK_EQ(1, NumberOfGlobalObjects());
2541 ctx2p.Reset(); 2546 ctx2p.Reset();
2542 CcTest::heap()->CollectAllAvailableGarbage(); 2547 CcTest::CollectAllAvailableGarbage();
2543 CHECK_EQ(0, NumberOfGlobalObjects()); 2548 CHECK_EQ(0, NumberOfGlobalObjects());
2544 } 2549 }
2545 2550
2546 2551
2547 TEST(LeakNativeContextViaMapKeyed) { 2552 TEST(LeakNativeContextViaMapKeyed) {
2548 i::FLAG_allow_natives_syntax = true; 2553 i::FLAG_allow_natives_syntax = true;
2549 v8::Isolate* isolate = CcTest::isolate(); 2554 v8::Isolate* isolate = CcTest::isolate();
2550 v8::HandleScope outer_scope(isolate); 2555 v8::HandleScope outer_scope(isolate);
2551 v8::Persistent<v8::Context> ctx1p; 2556 v8::Persistent<v8::Context> ctx1p;
2552 v8::Persistent<v8::Context> ctx2p; 2557 v8::Persistent<v8::Context> ctx2p;
2553 { 2558 {
2554 v8::HandleScope scope(isolate); 2559 v8::HandleScope scope(isolate);
2555 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2560 ctx1p.Reset(isolate, v8::Context::New(isolate));
2556 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2561 ctx2p.Reset(isolate, v8::Context::New(isolate));
2557 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2562 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2558 } 2563 }
2559 2564
2560 CcTest::heap()->CollectAllAvailableGarbage(); 2565 CcTest::CollectAllAvailableGarbage();
2561 CHECK_EQ(2, NumberOfGlobalObjects()); 2566 CHECK_EQ(2, NumberOfGlobalObjects());
2562 2567
2563 { 2568 {
2564 v8::HandleScope inner_scope(isolate); 2569 v8::HandleScope inner_scope(isolate);
2565 CompileRun("var v = [42, 43]"); 2570 CompileRun("var v = [42, 43]");
2566 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2571 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2567 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2572 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2568 v8::Local<v8::Value> v = 2573 v8::Local<v8::Value> v =
2569 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2574 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2570 ctx2->Enter(); 2575 ctx2->Enter();
2571 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2576 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2572 v8::Local<v8::Value> res = CompileRun( 2577 v8::Local<v8::Value> res = CompileRun(
2573 "function f() { return o[0]; }" 2578 "function f() { return o[0]; }"
2574 "for (var i = 0; i < 10; ++i) f();" 2579 "for (var i = 0; i < 10; ++i) f();"
2575 "%OptimizeFunctionOnNextCall(f);" 2580 "%OptimizeFunctionOnNextCall(f);"
2576 "f();"); 2581 "f();");
2577 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2582 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2578 CHECK(ctx2->Global() 2583 CHECK(ctx2->Global()
2579 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2584 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2580 .FromJust()); 2585 .FromJust());
2581 ctx2->Exit(); 2586 ctx2->Exit();
2582 ctx1->Exit(); 2587 ctx1->Exit();
2583 ctx1p.Reset(); 2588 ctx1p.Reset();
2584 isolate->ContextDisposedNotification(); 2589 isolate->ContextDisposedNotification();
2585 } 2590 }
2586 CcTest::heap()->CollectAllAvailableGarbage(); 2591 CcTest::CollectAllAvailableGarbage();
2587 CHECK_EQ(1, NumberOfGlobalObjects()); 2592 CHECK_EQ(1, NumberOfGlobalObjects());
2588 ctx2p.Reset(); 2593 ctx2p.Reset();
2589 CcTest::heap()->CollectAllAvailableGarbage(); 2594 CcTest::CollectAllAvailableGarbage();
2590 CHECK_EQ(0, NumberOfGlobalObjects()); 2595 CHECK_EQ(0, NumberOfGlobalObjects());
2591 } 2596 }
2592 2597
2593 2598
2594 TEST(LeakNativeContextViaMapProto) { 2599 TEST(LeakNativeContextViaMapProto) {
2595 i::FLAG_allow_natives_syntax = true; 2600 i::FLAG_allow_natives_syntax = true;
2596 v8::Isolate* isolate = CcTest::isolate(); 2601 v8::Isolate* isolate = CcTest::isolate();
2597 v8::HandleScope outer_scope(isolate); 2602 v8::HandleScope outer_scope(isolate);
2598 v8::Persistent<v8::Context> ctx1p; 2603 v8::Persistent<v8::Context> ctx1p;
2599 v8::Persistent<v8::Context> ctx2p; 2604 v8::Persistent<v8::Context> ctx2p;
2600 { 2605 {
2601 v8::HandleScope scope(isolate); 2606 v8::HandleScope scope(isolate);
2602 ctx1p.Reset(isolate, v8::Context::New(isolate)); 2607 ctx1p.Reset(isolate, v8::Context::New(isolate));
2603 ctx2p.Reset(isolate, v8::Context::New(isolate)); 2608 ctx2p.Reset(isolate, v8::Context::New(isolate));
2604 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter(); 2609 v8::Local<v8::Context>::New(isolate, ctx1p)->Enter();
2605 } 2610 }
2606 2611
2607 CcTest::heap()->CollectAllAvailableGarbage(); 2612 CcTest::CollectAllAvailableGarbage();
2608 CHECK_EQ(2, NumberOfGlobalObjects()); 2613 CHECK_EQ(2, NumberOfGlobalObjects());
2609 2614
2610 { 2615 {
2611 v8::HandleScope inner_scope(isolate); 2616 v8::HandleScope inner_scope(isolate);
2612 CompileRun("var v = { y: 42}"); 2617 CompileRun("var v = { y: 42}");
2613 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p); 2618 v8::Local<v8::Context> ctx1 = v8::Local<v8::Context>::New(isolate, ctx1p);
2614 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p); 2619 v8::Local<v8::Context> ctx2 = v8::Local<v8::Context>::New(isolate, ctx2p);
2615 v8::Local<v8::Value> v = 2620 v8::Local<v8::Value> v =
2616 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked(); 2621 ctx1->Global()->Get(ctx1, v8_str("v")).ToLocalChecked();
2617 ctx2->Enter(); 2622 ctx2->Enter();
2618 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust()); 2623 CHECK(ctx2->Global()->Set(ctx2, v8_str("o"), v).FromJust());
2619 v8::Local<v8::Value> res = CompileRun( 2624 v8::Local<v8::Value> res = CompileRun(
2620 "function f() {" 2625 "function f() {"
2621 " var p = {x: 42};" 2626 " var p = {x: 42};"
2622 " p.__proto__ = o;" 2627 " p.__proto__ = o;"
2623 " return p.x;" 2628 " return p.x;"
2624 "}" 2629 "}"
2625 "for (var i = 0; i < 10; ++i) f();" 2630 "for (var i = 0; i < 10; ++i) f();"
2626 "%OptimizeFunctionOnNextCall(f);" 2631 "%OptimizeFunctionOnNextCall(f);"
2627 "f();"); 2632 "f();");
2628 CHECK_EQ(42, res->Int32Value(ctx2).FromJust()); 2633 CHECK_EQ(42, res->Int32Value(ctx2).FromJust());
2629 CHECK(ctx2->Global() 2634 CHECK(ctx2->Global()
2630 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0)) 2635 ->Set(ctx2, v8_str("o"), v8::Int32::New(isolate, 0))
2631 .FromJust()); 2636 .FromJust());
2632 ctx2->Exit(); 2637 ctx2->Exit();
2633 ctx1->Exit(); 2638 ctx1->Exit();
2634 ctx1p.Reset(); 2639 ctx1p.Reset();
2635 isolate->ContextDisposedNotification(); 2640 isolate->ContextDisposedNotification();
2636 } 2641 }
2637 CcTest::heap()->CollectAllAvailableGarbage(); 2642 CcTest::CollectAllAvailableGarbage();
2638 CHECK_EQ(1, NumberOfGlobalObjects()); 2643 CHECK_EQ(1, NumberOfGlobalObjects());
2639 ctx2p.Reset(); 2644 ctx2p.Reset();
2640 CcTest::heap()->CollectAllAvailableGarbage(); 2645 CcTest::CollectAllAvailableGarbage();
2641 CHECK_EQ(0, NumberOfGlobalObjects()); 2646 CHECK_EQ(0, NumberOfGlobalObjects());
2642 } 2647 }
2643 2648
2644 2649
2645 TEST(InstanceOfStubWriteBarrier) { 2650 TEST(InstanceOfStubWriteBarrier) {
2646 i::FLAG_allow_natives_syntax = true; 2651 i::FLAG_allow_natives_syntax = true;
2647 #ifdef VERIFY_HEAP 2652 #ifdef VERIFY_HEAP
2648 i::FLAG_verify_heap = true; 2653 i::FLAG_verify_heap = true;
2649 #endif 2654 #endif
2650 2655
(...skipping 10 matching lines...) Expand all
2661 "function mkbar () { return new (new Function(\"\")) (); }" 2666 "function mkbar () { return new (new Function(\"\")) (); }"
2662 "function f (x) { return (x instanceof foo); }" 2667 "function f (x) { return (x instanceof foo); }"
2663 "function g () { f(mkbar()); }" 2668 "function g () { f(mkbar()); }"
2664 "f(new foo()); f(new foo());" 2669 "f(new foo()); f(new foo());"
2665 "%OptimizeFunctionOnNextCall(f);" 2670 "%OptimizeFunctionOnNextCall(f);"
2666 "f(new foo()); g();"); 2671 "f(new foo()); g();");
2667 } 2672 }
2668 2673
2669 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2674 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2670 marking->Stop(); 2675 marking->Stop();
2671 CcTest::heap()->StartIncrementalMarking(); 2676 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2677 i::GarbageCollectionReason::kTesting);
2672 2678
2673 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2679 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2674 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2680 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2675 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2681 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2676 2682
2677 CHECK(f->IsOptimized()); 2683 CHECK(f->IsOptimized());
2678 2684
2679 while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) && 2685 while (!Marking::IsBlack(ObjectMarking::MarkBitFrom(f->code())) &&
2680 !marking->IsStopped()) { 2686 !marking->IsStopped()) {
2681 // Discard any pending GC requests otherwise we will get GC when we enter 2687 // Discard any pending GC requests otherwise we will get GC when we enter
2682 // code below. 2688 // code below.
2683 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 2689 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
2684 IncrementalMarking::FORCE_COMPLETION); 2690 IncrementalMarking::FORCE_COMPLETION);
2685 } 2691 }
2686 2692
2687 CHECK(marking->IsMarking()); 2693 CHECK(marking->IsMarking());
2688 2694
2689 { 2695 {
2690 v8::HandleScope scope(CcTest::isolate()); 2696 v8::HandleScope scope(CcTest::isolate());
2691 v8::Local<v8::Object> global = CcTest::global(); 2697 v8::Local<v8::Object> global = CcTest::global();
2692 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast( 2698 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
2693 global->Get(ctx, v8_str("g")).ToLocalChecked()); 2699 global->Get(ctx, v8_str("g")).ToLocalChecked());
2694 g->Call(ctx, global, 0, nullptr).ToLocalChecked(); 2700 g->Call(ctx, global, 0, nullptr).ToLocalChecked();
2695 } 2701 }
2696 2702
2697 CcTest::heap()->incremental_marking()->set_should_hurry(true); 2703 CcTest::heap()->incremental_marking()->set_should_hurry(true);
2698 CcTest::heap()->CollectGarbage(OLD_SPACE); 2704 CcTest::CollectGarbage(OLD_SPACE);
2699 } 2705 }
2700 2706
2701 namespace { 2707 namespace {
2702 2708
2703 int GetProfilerTicks(SharedFunctionInfo* shared) { 2709 int GetProfilerTicks(SharedFunctionInfo* shared) {
2704 return FLAG_ignition ? shared->profiler_ticks() 2710 return FLAG_ignition ? shared->profiler_ticks()
2705 : shared->code()->profiler_ticks(); 2711 : shared->code()->profiler_ticks();
2706 } 2712 }
2707 2713
2708 } // namespace 2714 } // namespace
(...skipping 25 matching lines...) Expand all
2734 "f();"); 2740 "f();");
2735 } 2741 }
2736 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2742 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2737 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2743 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2738 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2744 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2739 CHECK(f->IsOptimized()); 2745 CHECK(f->IsOptimized());
2740 2746
2741 // Make sure incremental marking it not running. 2747 // Make sure incremental marking it not running.
2742 CcTest::heap()->incremental_marking()->Stop(); 2748 CcTest::heap()->incremental_marking()->Stop();
2743 2749
2744 CcTest::heap()->StartIncrementalMarking(); 2750 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2751 i::GarbageCollectionReason::kTesting);
2745 // The following calls will increment CcTest::heap()->global_ic_age(). 2752 // The following calls will increment CcTest::heap()->global_ic_age().
2746 CcTest::isolate()->ContextDisposedNotification(); 2753 CcTest::isolate()->ContextDisposedNotification();
2747 heap::SimulateIncrementalMarking(CcTest::heap()); 2754 heap::SimulateIncrementalMarking(CcTest::heap());
2748 CcTest::heap()->CollectAllGarbage(); 2755 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2749 2756
2750 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); 2757 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2751 CHECK_EQ(0, f->shared()->opt_count()); 2758 CHECK_EQ(0, f->shared()->opt_count());
2752 CHECK_EQ(0, GetProfilerTicks(f->shared())); 2759 CHECK_EQ(0, GetProfilerTicks(f->shared()));
2753 } 2760 }
2754 2761
2755 2762
2756 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) { 2763 TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
2757 i::FLAG_stress_compaction = false; 2764 i::FLAG_stress_compaction = false;
2758 i::FLAG_allow_natives_syntax = true; 2765 i::FLAG_allow_natives_syntax = true;
(...skipping 23 matching lines...) Expand all
2782 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast( 2789 i::Handle<JSFunction> f = i::Handle<JSFunction>::cast(
2783 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 2790 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
2784 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 2791 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
2785 CHECK(f->IsOptimized()); 2792 CHECK(f->IsOptimized());
2786 2793
2787 // Make sure incremental marking it not running. 2794 // Make sure incremental marking it not running.
2788 CcTest::heap()->incremental_marking()->Stop(); 2795 CcTest::heap()->incremental_marking()->Stop();
2789 2796
2790 // The following two calls will increment CcTest::heap()->global_ic_age(). 2797 // The following two calls will increment CcTest::heap()->global_ic_age().
2791 CcTest::isolate()->ContextDisposedNotification(); 2798 CcTest::isolate()->ContextDisposedNotification();
2792 CcTest::heap()->CollectAllGarbage(); 2799 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
2793 2800
2794 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age()); 2801 CHECK_EQ(CcTest::heap()->global_ic_age(), f->shared()->ic_age());
2795 CHECK_EQ(0, f->shared()->opt_count()); 2802 CHECK_EQ(0, f->shared()->opt_count());
2796 CHECK_EQ(0, GetProfilerTicks(f->shared())); 2803 CHECK_EQ(0, GetProfilerTicks(f->shared()));
2797 } 2804 }
2798 2805
2799 2806
2800 HEAP_TEST(GCFlags) { 2807 HEAP_TEST(GCFlags) {
2801 CcTest::InitializeVM(); 2808 CcTest::InitializeVM();
2802 Heap* heap = CcTest::heap(); 2809 Heap* heap = CcTest::heap();
2803 2810
2804 heap->set_current_gc_flags(Heap::kNoGCFlags); 2811 heap->set_current_gc_flags(Heap::kNoGCFlags);
2805 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2812 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2806 2813
2807 // Set the flags to check whether we appropriately resets them after the GC. 2814 // Set the flags to check whether we appropriately resets them after the GC.
2808 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask); 2815 heap->set_current_gc_flags(Heap::kAbortIncrementalMarkingMask);
2809 heap->CollectAllGarbage(Heap::kReduceMemoryFootprintMask); 2816 CcTest::CollectAllGarbage(Heap::kReduceMemoryFootprintMask);
2810 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2817 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2811 2818
2812 MarkCompactCollector* collector = heap->mark_compact_collector(); 2819 MarkCompactCollector* collector = heap->mark_compact_collector();
2813 if (collector->sweeping_in_progress()) { 2820 if (collector->sweeping_in_progress()) {
2814 collector->EnsureSweepingCompleted(); 2821 collector->EnsureSweepingCompleted();
2815 } 2822 }
2816 2823
2817 IncrementalMarking* marking = heap->incremental_marking(); 2824 IncrementalMarking* marking = heap->incremental_marking();
2818 marking->Stop(); 2825 marking->Stop();
2819 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask); 2826 heap->StartIncrementalMarking(Heap::kReduceMemoryFootprintMask,
2827 i::GarbageCollectionReason::kTesting);
2820 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2828 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2821 2829
2822 heap->CollectGarbage(NEW_SPACE); 2830 CcTest::CollectGarbage(NEW_SPACE);
2823 // NewSpace scavenges should not overwrite the flags. 2831 // NewSpace scavenges should not overwrite the flags.
2824 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask); 2832 CHECK_NE(0, heap->current_gc_flags_ & Heap::kReduceMemoryFootprintMask);
2825 2833
2826 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 2834 CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
2827 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_); 2835 CHECK_EQ(Heap::kNoGCFlags, heap->current_gc_flags_);
2828 } 2836 }
2829 2837
2830 2838
2831 TEST(IdleNotificationFinishMarking) { 2839 TEST(IdleNotificationFinishMarking) {
2832 i::FLAG_allow_natives_syntax = true; 2840 i::FLAG_allow_natives_syntax = true;
2833 CcTest::InitializeVM(); 2841 CcTest::InitializeVM();
2834 const int initial_gc_count = CcTest::heap()->gc_count(); 2842 const int initial_gc_count = CcTest::heap()->gc_count();
2835 heap::SimulateFullSpace(CcTest::heap()->old_space()); 2843 heap::SimulateFullSpace(CcTest::heap()->old_space());
2836 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 2844 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
2837 marking->Stop(); 2845 marking->Stop();
2838 CcTest::heap()->StartIncrementalMarking(); 2846 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
2847 i::GarbageCollectionReason::kTesting);
2839 2848
2840 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count); 2849 CHECK_EQ(CcTest::heap()->gc_count(), initial_gc_count);
2841 2850
2842 // TODO(hpayer): We cannot write proper unit test right now for heap. 2851 // TODO(hpayer): We cannot write proper unit test right now for heap.
2843 // The ideal test would call kMaxIdleMarkingDelayCounter to test the 2852 // The ideal test would call kMaxIdleMarkingDelayCounter to test the
2844 // marking delay counter. 2853 // marking delay counter.
2845 2854
2846 // Perform a huge incremental marking step but don't complete marking. 2855 // Perform a huge incremental marking step but don't complete marking.
2847 do { 2856 do {
2848 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 2857 marking->Step(1 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
(...skipping 501 matching lines...) Expand 10 before | Expand all | Expand 10 after
3350 i::Handle<JSReceiver> root = 3359 i::Handle<JSReceiver> root =
3351 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3360 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3352 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked())); 3361 CcTest::global()->Get(ctx, v8_str("root")).ToLocalChecked()));
3353 3362
3354 // Count number of live transitions before marking. 3363 // Count number of live transitions before marking.
3355 int transitions_before = CountMapTransitions(root->map()); 3364 int transitions_before = CountMapTransitions(root->map());
3356 CompileRun("%DebugPrint(root);"); 3365 CompileRun("%DebugPrint(root);");
3357 CHECK_EQ(transitions_count, transitions_before); 3366 CHECK_EQ(transitions_count, transitions_before);
3358 3367
3359 heap::SimulateIncrementalMarking(CcTest::heap()); 3368 heap::SimulateIncrementalMarking(CcTest::heap());
3360 CcTest::heap()->CollectAllGarbage(); 3369 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3361 3370
3362 // Count number of live transitions after marking. Note that one transition 3371 // Count number of live transitions after marking. Note that one transition
3363 // is left, because 'o' still holds an instance of one transition target. 3372 // is left, because 'o' still holds an instance of one transition target.
3364 int transitions_after = CountMapTransitions(root->map()); 3373 int transitions_after = CountMapTransitions(root->map());
3365 CompileRun("%DebugPrint(root);"); 3374 CompileRun("%DebugPrint(root);");
3366 CHECK_EQ(1, transitions_after); 3375 CHECK_EQ(1, transitions_after);
3367 } 3376 }
3368 3377
3369 3378
3370 #ifdef DEBUG 3379 #ifdef DEBUG
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
3414 3423
3415 // Count number of live transitions before marking. 3424 // Count number of live transitions before marking.
3416 int transitions_before = CountMapTransitions(root->map()); 3425 int transitions_before = CountMapTransitions(root->map());
3417 CHECK_EQ(transitions_count, transitions_before); 3426 CHECK_EQ(transitions_count, transitions_before);
3418 3427
3419 // Get rid of o 3428 // Get rid of o
3420 CompileRun("o = new F;" 3429 CompileRun("o = new F;"
3421 "root = new F"); 3430 "root = new F");
3422 root = GetByName("root"); 3431 root = GetByName("root");
3423 AddPropertyTo(2, root, "funny"); 3432 AddPropertyTo(2, root, "funny");
3424 CcTest::heap()->CollectGarbage(NEW_SPACE); 3433 CcTest::CollectGarbage(NEW_SPACE);
3425 3434
3426 // Count number of live transitions after marking. Note that one transition 3435 // Count number of live transitions after marking. Note that one transition
3427 // is left, because 'o' still holds an instance of one transition target. 3436 // is left, because 'o' still holds an instance of one transition target.
3428 int transitions_after = CountMapTransitions( 3437 int transitions_after = CountMapTransitions(
3429 Map::cast(root->map()->GetBackPointer())); 3438 Map::cast(root->map()->GetBackPointer()));
3430 CHECK_EQ(1, transitions_after); 3439 CHECK_EQ(1, transitions_after);
3431 } 3440 }
3432 3441
3433 3442
3434 TEST(TransitionArrayShrinksDuringAllocToOne) { 3443 TEST(TransitionArrayShrinksDuringAllocToOne) {
3435 i::FLAG_stress_compaction = false; 3444 i::FLAG_stress_compaction = false;
3436 i::FLAG_allow_natives_syntax = true; 3445 i::FLAG_allow_natives_syntax = true;
3437 CcTest::InitializeVM(); 3446 CcTest::InitializeVM();
3438 v8::HandleScope scope(CcTest::isolate()); 3447 v8::HandleScope scope(CcTest::isolate());
3439 static const int transitions_count = 10; 3448 static const int transitions_count = 10;
3440 CompileRun("function F() {}"); 3449 CompileRun("function F() {}");
3441 AddTransitions(transitions_count); 3450 AddTransitions(transitions_count);
3442 CompileRun("var root = new F;"); 3451 CompileRun("var root = new F;");
3443 Handle<JSObject> root = GetByName("root"); 3452 Handle<JSObject> root = GetByName("root");
3444 3453
3445 // Count number of live transitions before marking. 3454 // Count number of live transitions before marking.
3446 int transitions_before = CountMapTransitions(root->map()); 3455 int transitions_before = CountMapTransitions(root->map());
3447 CHECK_EQ(transitions_count, transitions_before); 3456 CHECK_EQ(transitions_count, transitions_before);
3448 3457
3449 root = GetByName("root"); 3458 root = GetByName("root");
3450 AddPropertyTo(2, root, "funny"); 3459 AddPropertyTo(2, root, "funny");
3451 CcTest::heap()->CollectGarbage(NEW_SPACE); 3460 CcTest::CollectGarbage(NEW_SPACE);
3452 3461
3453 // Count number of live transitions after marking. Note that one transition 3462 // Count number of live transitions after marking. Note that one transition
3454 // is left, because 'o' still holds an instance of one transition target. 3463 // is left, because 'o' still holds an instance of one transition target.
3455 int transitions_after = CountMapTransitions( 3464 int transitions_after = CountMapTransitions(
3456 Map::cast(root->map()->GetBackPointer())); 3465 Map::cast(root->map()->GetBackPointer()));
3457 CHECK_EQ(2, transitions_after); 3466 CHECK_EQ(2, transitions_after);
3458 } 3467 }
3459 3468
3460 3469
3461 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) { 3470 TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
3462 i::FLAG_stress_compaction = false; 3471 i::FLAG_stress_compaction = false;
3463 i::FLAG_allow_natives_syntax = true; 3472 i::FLAG_allow_natives_syntax = true;
3464 CcTest::InitializeVM(); 3473 CcTest::InitializeVM();
3465 v8::HandleScope scope(CcTest::isolate()); 3474 v8::HandleScope scope(CcTest::isolate());
3466 static const int transitions_count = 10; 3475 static const int transitions_count = 10;
3467 CompileRun("function F() {}"); 3476 CompileRun("function F() {}");
3468 AddTransitions(transitions_count); 3477 AddTransitions(transitions_count);
3469 CompileRun("var root = new F;"); 3478 CompileRun("var root = new F;");
3470 Handle<JSObject> root = GetByName("root"); 3479 Handle<JSObject> root = GetByName("root");
3471 3480
3472 // Count number of live transitions before marking. 3481 // Count number of live transitions before marking.
3473 int transitions_before = CountMapTransitions(root->map()); 3482 int transitions_before = CountMapTransitions(root->map());
3474 CHECK_EQ(transitions_count, transitions_before); 3483 CHECK_EQ(transitions_count, transitions_before);
3475 3484
3476 root = GetByName("root"); 3485 root = GetByName("root");
3477 AddPropertyTo(0, root, "prop9"); 3486 AddPropertyTo(0, root, "prop9");
3478 CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE); 3487 CcTest::CollectGarbage(OLD_SPACE);
3479 3488
3480 // Count number of live transitions after marking. Note that one transition 3489 // Count number of live transitions after marking. Note that one transition
3481 // is left, because 'o' still holds an instance of one transition target. 3490 // is left, because 'o' still holds an instance of one transition target.
3482 int transitions_after = CountMapTransitions( 3491 int transitions_after = CountMapTransitions(
3483 Map::cast(root->map()->GetBackPointer())); 3492 Map::cast(root->map()->GetBackPointer()));
3484 CHECK_EQ(1, transitions_after); 3493 CHECK_EQ(1, transitions_after);
3485 } 3494 }
3486 3495
3487 3496
3488 TEST(TransitionArraySimpleToFull) { 3497 TEST(TransitionArraySimpleToFull) {
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
3534 CompileRun("function f(o) {" 3543 CompileRun("function f(o) {"
3535 " o.foo = 0;" 3544 " o.foo = 0;"
3536 "}" 3545 "}"
3537 "f(new Object);" 3546 "f(new Object);"
3538 "f(root);"); 3547 "f(root);");
3539 3548
3540 // This bug only triggers with aggressive IC clearing. 3549 // This bug only triggers with aggressive IC clearing.
3541 CcTest::heap()->AgeInlineCaches(); 3550 CcTest::heap()->AgeInlineCaches();
3542 3551
3543 // Explicitly request GC to perform final marking step and sweeping. 3552 // Explicitly request GC to perform final marking step and sweeping.
3544 CcTest::heap()->CollectAllGarbage(); 3553 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3545 3554
3546 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3555 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3547 CcTest::global() 3556 CcTest::global()
3548 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root")) 3557 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3549 .ToLocalChecked())); 3558 .ToLocalChecked()));
3550 3559
3551 // The root object should be in a sane state. 3560 // The root object should be in a sane state.
3552 CHECK(root->IsJSObject()); 3561 CHECK(root->IsJSObject());
3553 CHECK(root->map()->IsMap()); 3562 CHECK(root->map()->IsMap());
3554 } 3563 }
(...skipping 22 matching lines...) Expand all
3577 "f(new Object);" 3586 "f(new Object);"
3578 "f(new Object);" 3587 "f(new Object);"
3579 "%OptimizeFunctionOnNextCall(f);" 3588 "%OptimizeFunctionOnNextCall(f);"
3580 "f(root);" 3589 "f(root);"
3581 "%DeoptimizeFunction(f);"); 3590 "%DeoptimizeFunction(f);");
3582 3591
3583 // This bug only triggers with aggressive IC clearing. 3592 // This bug only triggers with aggressive IC clearing.
3584 CcTest::heap()->AgeInlineCaches(); 3593 CcTest::heap()->AgeInlineCaches();
3585 3594
3586 // Explicitly request GC to perform final marking step and sweeping. 3595 // Explicitly request GC to perform final marking step and sweeping.
3587 CcTest::heap()->CollectAllGarbage(); 3596 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3588 3597
3589 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast( 3598 Handle<JSReceiver> root = v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(
3590 CcTest::global() 3599 CcTest::global()
3591 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root")) 3600 ->Get(CcTest::isolate()->GetCurrentContext(), v8_str("root"))
3592 .ToLocalChecked())); 3601 .ToLocalChecked()));
3593 3602
3594 // The root object should be in a sane state. 3603 // The root object should be in a sane state.
3595 CHECK(root->IsJSObject()); 3604 CHECK(root->IsJSObject());
3596 CHECK(root->map()->IsMap()); 3605 CHECK(root->map()->IsMap());
3597 } 3606 }
(...skipping 27 matching lines...) Expand all
3625 const int overall_page_count = number_of_test_pages + initial_page_count; 3634 const int overall_page_count = number_of_test_pages + initial_page_count;
3626 for (int i = 0; i < number_of_test_pages; i++) { 3635 for (int i = 0; i < number_of_test_pages; i++) {
3627 AlwaysAllocateScope always_allocate(isolate); 3636 AlwaysAllocateScope always_allocate(isolate);
3628 heap::SimulateFullSpace(old_space); 3637 heap::SimulateFullSpace(old_space);
3629 factory->NewFixedArray(1, TENURED); 3638 factory->NewFixedArray(1, TENURED);
3630 } 3639 }
3631 CHECK_EQ(overall_page_count, old_space->CountTotalPages()); 3640 CHECK_EQ(overall_page_count, old_space->CountTotalPages());
3632 3641
3633 // Triggering one GC will cause a lot of garbage to be discovered but 3642 // Triggering one GC will cause a lot of garbage to be discovered but
3634 // even spread across all allocated pages. 3643 // even spread across all allocated pages.
3635 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3644 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3636 "triggered for preparation");
3637 CHECK_GE(overall_page_count, old_space->CountTotalPages()); 3645 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3638 3646
3639 // Triggering subsequent GCs should cause at least half of the pages 3647 // Triggering subsequent GCs should cause at least half of the pages
3640 // to be released to the OS after at most two cycles. 3648 // to be released to the OS after at most two cycles.
3641 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3649 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3642 "triggered by test 1");
3643 CHECK_GE(overall_page_count, old_space->CountTotalPages()); 3650 CHECK_GE(overall_page_count, old_space->CountTotalPages());
3644 heap->CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask, 3651 CcTest::CollectAllGarbage(Heap::kFinalizeIncrementalMarkingMask);
3645 "triggered by test 2");
3646 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2); 3652 CHECK_GE(overall_page_count, old_space->CountTotalPages() * 2);
3647 3653
3648 // Triggering a last-resort GC should cause all pages to be released to the 3654 // Triggering a last-resort GC should cause all pages to be released to the
3649 // OS so that other processes can seize the memory. If we get a failure here 3655 // OS so that other processes can seize the memory. If we get a failure here
3650 // where there are 2 pages left instead of 1, then we should increase the 3656 // where there are 2 pages left instead of 1, then we should increase the
3651 // size of the first page a little in SizeOfFirstPage in spaces.cc. The 3657 // size of the first page a little in SizeOfFirstPage in spaces.cc. The
3652 // first page should be small in order to reduce memory used when the VM 3658 // first page should be small in order to reduce memory used when the VM
3653 // boots, but if the 20 small arrays don't fit on the first page then that's 3659 // boots, but if the 20 small arrays don't fit on the first page then that's
3654 // an indication that it is too small. 3660 // an indication that it is too small.
3655 heap->CollectAllAvailableGarbage("triggered really hard"); 3661 CcTest::CollectAllAvailableGarbage();
3656 CHECK_EQ(initial_page_count, old_space->CountTotalPages()); 3662 CHECK_EQ(initial_page_count, old_space->CountTotalPages());
3657 } 3663 }
3658 3664
3659 static int forced_gc_counter = 0; 3665 static int forced_gc_counter = 0;
3660 3666
3661 void MockUseCounterCallback(v8::Isolate* isolate, 3667 void MockUseCounterCallback(v8::Isolate* isolate,
3662 v8::Isolate::UseCounterFeature feature) { 3668 v8::Isolate::UseCounterFeature feature) {
3663 isolate->GetCurrentContext(); 3669 isolate->GetCurrentContext();
3664 if (feature == v8::Isolate::kForcedGC) { 3670 if (feature == v8::Isolate::kForcedGC) {
3665 forced_gc_counter++; 3671 forced_gc_counter++;
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
3730 FeedbackVectorHelper feedback_helper(feedback_vector); 3736 FeedbackVectorHelper feedback_helper(feedback_vector);
3731 3737
3732 int expected_slots = 2; 3738 int expected_slots = 2;
3733 CHECK_EQ(expected_slots, feedback_helper.slot_count()); 3739 CHECK_EQ(expected_slots, feedback_helper.slot_count());
3734 int slot1 = 0; 3740 int slot1 = 0;
3735 int slot2 = 1; 3741 int slot2 = 1;
3736 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell()); 3742 CHECK(feedback_vector->Get(feedback_helper.slot(slot1))->IsWeakCell());
3737 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell()); 3743 CHECK(feedback_vector->Get(feedback_helper.slot(slot2))->IsWeakCell());
3738 3744
3739 heap::SimulateIncrementalMarking(CcTest::heap()); 3745 heap::SimulateIncrementalMarking(CcTest::heap());
3740 CcTest::heap()->CollectAllGarbage(); 3746 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3741 3747
3742 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1))) 3748 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot1)))
3743 ->cleared()); 3749 ->cleared());
3744 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2))) 3750 CHECK(!WeakCell::cast(feedback_vector->Get(feedback_helper.slot(slot2)))
3745 ->cleared()); 3751 ->cleared());
3746 } 3752 }
3747 3753
3748 3754
3749 static Code* FindFirstIC(Code* code, Code::Kind kind) { 3755 static Code* FindFirstIC(Code* code, Code::Kind kind) {
3750 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | 3756 int mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
3787 "function fun() { this.x = 1; };" 3793 "function fun() { this.x = 1; };"
3788 "function f(o) { return new o(); } f(fun); f(fun);"); 3794 "function f(o) { return new o(); } f(fun); f(fun);");
3789 Handle<JSFunction> f = Handle<JSFunction>::cast( 3795 Handle<JSFunction> f = Handle<JSFunction>::cast(
3790 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3796 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3791 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3797 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3792 3798
3793 Handle<TypeFeedbackVector> vector(f->feedback_vector()); 3799 Handle<TypeFeedbackVector> vector(f->feedback_vector());
3794 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3800 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3795 3801
3796 heap::SimulateIncrementalMarking(CcTest::heap()); 3802 heap::SimulateIncrementalMarking(CcTest::heap());
3797 CcTest::heap()->CollectAllGarbage(); 3803 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3798 3804
3799 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell()); 3805 CHECK(vector->Get(FeedbackVectorSlot(0))->IsWeakCell());
3800 } 3806 }
3801 3807
3802 TEST(IncrementalMarkingPreservesMonomorphicIC) { 3808 TEST(IncrementalMarkingPreservesMonomorphicIC) {
3803 if (i::FLAG_always_opt) return; 3809 if (i::FLAG_always_opt) return;
3804 CcTest::InitializeVM(); 3810 CcTest::InitializeVM();
3805 v8::HandleScope scope(CcTest::isolate()); 3811 v8::HandleScope scope(CcTest::isolate());
3806 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3812 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
3807 // Prepare function f that contains a monomorphic IC for object 3813 // Prepare function f that contains a monomorphic IC for object
3808 // originating from the same native context. 3814 // originating from the same native context.
3809 CompileRun("function fun() { this.x = 1; }; var obj = new fun();" 3815 CompileRun("function fun() { this.x = 1; }; var obj = new fun();"
3810 "function f(o) { return o.x; } f(obj); f(obj);"); 3816 "function f(o) { return o.x; } f(obj); f(obj);");
3811 Handle<JSFunction> f = Handle<JSFunction>::cast( 3817 Handle<JSFunction> f = Handle<JSFunction>::cast(
3812 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3818 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3813 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3819 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3814 3820
3815 CheckVectorIC(f, 0, MONOMORPHIC); 3821 CheckVectorIC(f, 0, MONOMORPHIC);
3816 3822
3817 heap::SimulateIncrementalMarking(CcTest::heap()); 3823 heap::SimulateIncrementalMarking(CcTest::heap());
3818 CcTest::heap()->CollectAllGarbage(); 3824 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3819 3825
3820 CheckVectorIC(f, 0, MONOMORPHIC); 3826 CheckVectorIC(f, 0, MONOMORPHIC);
3821 } 3827 }
3822 3828
3823 TEST(IncrementalMarkingPreservesPolymorphicIC) { 3829 TEST(IncrementalMarkingPreservesPolymorphicIC) {
3824 if (i::FLAG_always_opt) return; 3830 if (i::FLAG_always_opt) return;
3825 CcTest::InitializeVM(); 3831 CcTest::InitializeVM();
3826 v8::HandleScope scope(CcTest::isolate()); 3832 v8::HandleScope scope(CcTest::isolate());
3827 v8::Local<v8::Value> obj1, obj2; 3833 v8::Local<v8::Value> obj1, obj2;
3828 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3834 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
(...skipping 16 matching lines...) Expand all
3845 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust()); 3851 CHECK(CcTest::global()->Set(ctx, v8_str("obj2"), obj2).FromJust());
3846 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3852 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3847 Handle<JSFunction> f = Handle<JSFunction>::cast( 3853 Handle<JSFunction> f = Handle<JSFunction>::cast(
3848 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3854 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3849 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3855 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3850 3856
3851 CheckVectorIC(f, 0, POLYMORPHIC); 3857 CheckVectorIC(f, 0, POLYMORPHIC);
3852 3858
3853 // Fire context dispose notification. 3859 // Fire context dispose notification.
3854 heap::SimulateIncrementalMarking(CcTest::heap()); 3860 heap::SimulateIncrementalMarking(CcTest::heap());
3855 CcTest::heap()->CollectAllGarbage(); 3861 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3856 3862
3857 CheckVectorIC(f, 0, POLYMORPHIC); 3863 CheckVectorIC(f, 0, POLYMORPHIC);
3858 } 3864 }
3859 3865
3860 TEST(ContextDisposeDoesntClearPolymorphicIC) { 3866 TEST(ContextDisposeDoesntClearPolymorphicIC) {
3861 if (i::FLAG_always_opt) return; 3867 if (i::FLAG_always_opt) return;
3862 CcTest::InitializeVM(); 3868 CcTest::InitializeVM();
3863 v8::HandleScope scope(CcTest::isolate()); 3869 v8::HandleScope scope(CcTest::isolate());
3864 v8::Local<v8::Value> obj1, obj2; 3870 v8::Local<v8::Value> obj1, obj2;
3865 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext(); 3871 v8::Local<v8::Context> ctx = CcTest::isolate()->GetCurrentContext();
(...skipping 17 matching lines...) Expand all
3883 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);"); 3889 CompileRun("function f(o) { return o.x; } f(obj1); f(obj1); f(obj2);");
3884 Handle<JSFunction> f = Handle<JSFunction>::cast( 3890 Handle<JSFunction> f = Handle<JSFunction>::cast(
3885 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 3891 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
3886 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked()))); 3892 CcTest::global()->Get(ctx, v8_str("f")).ToLocalChecked())));
3887 3893
3888 CheckVectorIC(f, 0, POLYMORPHIC); 3894 CheckVectorIC(f, 0, POLYMORPHIC);
3889 3895
3890 // Fire context dispose notification. 3896 // Fire context dispose notification.
3891 CcTest::isolate()->ContextDisposedNotification(); 3897 CcTest::isolate()->ContextDisposedNotification();
3892 heap::SimulateIncrementalMarking(CcTest::heap()); 3898 heap::SimulateIncrementalMarking(CcTest::heap());
3893 CcTest::heap()->CollectAllGarbage(); 3899 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
3894 3900
3895 CheckVectorIC(f, 0, POLYMORPHIC); 3901 CheckVectorIC(f, 0, POLYMORPHIC);
3896 } 3902 }
3897 3903
3898 3904
3899 class SourceResource : public v8::String::ExternalOneByteStringResource { 3905 class SourceResource : public v8::String::ExternalOneByteStringResource {
3900 public: 3906 public:
3901 explicit SourceResource(const char* data) 3907 explicit SourceResource(const char* data)
3902 : data_(data), length_(strlen(data)) { } 3908 : data_(data), length_(strlen(data)) { }
3903 3909
(...skipping 21 matching lines...) Expand all
3925 // to check whether the data is being released since the external string 3931 // to check whether the data is being released since the external string
3926 // resource's callback is fired when the external string is GC'ed. 3932 // resource's callback is fired when the external string is GC'ed.
3927 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate); 3933 i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
3928 v8::HandleScope scope(isolate); 3934 v8::HandleScope scope(isolate);
3929 SourceResource* resource = new SourceResource(i::StrDup(source)); 3935 SourceResource* resource = new SourceResource(i::StrDup(source));
3930 { 3936 {
3931 v8::HandleScope scope(isolate); 3937 v8::HandleScope scope(isolate);
3932 v8::Local<v8::Context> ctx = isolate->GetCurrentContext(); 3938 v8::Local<v8::Context> ctx = isolate->GetCurrentContext();
3933 v8::Local<v8::String> source_string = 3939 v8::Local<v8::String> source_string =
3934 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked(); 3940 v8::String::NewExternalOneByte(isolate, resource).ToLocalChecked();
3935 i_isolate->heap()->CollectAllAvailableGarbage(); 3941 i_isolate->heap()->CollectAllAvailableGarbage(
3942 i::GarbageCollectionReason::kTesting);
3936 v8::Script::Compile(ctx, source_string) 3943 v8::Script::Compile(ctx, source_string)
3937 .ToLocalChecked() 3944 .ToLocalChecked()
3938 ->Run(ctx) 3945 ->Run(ctx)
3939 .ToLocalChecked(); 3946 .ToLocalChecked();
3940 CHECK(!resource->IsDisposed()); 3947 CHECK(!resource->IsDisposed());
3941 } 3948 }
3942 // i_isolate->heap()->CollectAllAvailableGarbage(); 3949 // i_isolate->heap()->CollectAllAvailableGarbage();
3943 CHECK(!resource->IsDisposed()); 3950 CHECK(!resource->IsDisposed());
3944 3951
3945 CompileRun(accessor); 3952 CompileRun(accessor);
3946 i_isolate->heap()->CollectAllAvailableGarbage(); 3953 i_isolate->heap()->CollectAllAvailableGarbage(
3954 i::GarbageCollectionReason::kTesting);
3947 3955
3948 // External source has been released. 3956 // External source has been released.
3949 CHECK(resource->IsDisposed()); 3957 CHECK(resource->IsDisposed());
3950 delete resource; 3958 delete resource;
3951 } 3959 }
3952 3960
3953 3961
3954 UNINITIALIZED_TEST(ReleaseStackTraceData) { 3962 UNINITIALIZED_TEST(ReleaseStackTraceData) {
3955 if (i::FLAG_always_opt) { 3963 if (i::FLAG_always_opt) {
3956 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed. 3964 // TODO(ulan): Remove this once the memory leak via code_next_link is fixed.
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
4012 4020
4013 TEST(Regress159140) { 4021 TEST(Regress159140) {
4014 i::FLAG_allow_natives_syntax = true; 4022 i::FLAG_allow_natives_syntax = true;
4015 CcTest::InitializeVM(); 4023 CcTest::InitializeVM();
4016 Isolate* isolate = CcTest::i_isolate(); 4024 Isolate* isolate = CcTest::i_isolate();
4017 LocalContext env; 4025 LocalContext env;
4018 Heap* heap = isolate->heap(); 4026 Heap* heap = isolate->heap();
4019 HandleScope scope(isolate); 4027 HandleScope scope(isolate);
4020 4028
4021 // Perform one initial GC to enable code flushing. 4029 // Perform one initial GC to enable code flushing.
4022 heap->CollectAllGarbage(); 4030 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4023 4031
4024 // Prepare several closures that are all eligible for code flushing 4032 // Prepare several closures that are all eligible for code flushing
4025 // because all reachable ones are not optimized. Make sure that the 4033 // because all reachable ones are not optimized. Make sure that the
4026 // optimized code object is directly reachable through a handle so 4034 // optimized code object is directly reachable through a handle so
4027 // that it is marked black during incremental marking. 4035 // that it is marked black during incremental marking.
4028 Handle<Code> code; 4036 Handle<Code> code;
4029 { 4037 {
4030 HandleScope inner_scope(isolate); 4038 HandleScope inner_scope(isolate);
4031 CompileRun("function h(x) {}" 4039 CompileRun("function h(x) {}"
4032 "function mkClosure() {" 4040 "function mkClosure() {"
(...skipping 23 matching lines...) Expand all
4056 } 4064 }
4057 4065
4058 code = inner_scope.CloseAndEscape(Handle<Code>(f->code())); 4066 code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
4059 } 4067 }
4060 4068
4061 // Simulate incremental marking so that the functions are enqueued as 4069 // Simulate incremental marking so that the functions are enqueued as
4062 // code flushing candidates. Then optimize one function. Finally 4070 // code flushing candidates. Then optimize one function. Finally
4063 // finish the GC to complete code flushing. 4071 // finish the GC to complete code flushing.
4064 heap::SimulateIncrementalMarking(heap); 4072 heap::SimulateIncrementalMarking(heap);
4065 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);"); 4073 CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
4066 heap->CollectAllGarbage(); 4074 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4067 4075
4068 // Unoptimized code is missing and the deoptimizer will go ballistic. 4076 // Unoptimized code is missing and the deoptimizer will go ballistic.
4069 CompileRun("g('bozo');"); 4077 CompileRun("g('bozo');");
4070 } 4078 }
4071 4079
4072 4080
4073 TEST(Regress165495) { 4081 TEST(Regress165495) {
4074 i::FLAG_allow_natives_syntax = true; 4082 i::FLAG_allow_natives_syntax = true;
4075 CcTest::InitializeVM(); 4083 CcTest::InitializeVM();
4076 Isolate* isolate = CcTest::i_isolate(); 4084 Isolate* isolate = CcTest::i_isolate();
4077 Heap* heap = isolate->heap(); 4085 Heap* heap = isolate->heap();
4078 HandleScope scope(isolate); 4086 HandleScope scope(isolate);
4079 4087
4080 // Perform one initial GC to enable code flushing. 4088 // Perform one initial GC to enable code flushing.
4081 heap->CollectAllGarbage(); 4089 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4082 4090
4083 // Prepare an optimized closure that the optimized code map will get 4091 // Prepare an optimized closure that the optimized code map will get
4084 // populated. Then age the unoptimized code to trigger code flushing 4092 // populated. Then age the unoptimized code to trigger code flushing
4085 // but make sure the optimized code is unreachable. 4093 // but make sure the optimized code is unreachable.
4086 { 4094 {
4087 HandleScope inner_scope(isolate); 4095 HandleScope inner_scope(isolate);
4088 LocalContext env; 4096 LocalContext env;
4089 CompileRun("function mkClosure() {" 4097 CompileRun("function mkClosure() {"
4090 " return function(x) { return x + 1; };" 4098 " return function(x) { return x + 1; };"
4091 "}" 4099 "}"
4092 "var f = mkClosure();" 4100 "var f = mkClosure();"
4093 "f(1); f(2);" 4101 "f(1); f(2);"
4094 "%OptimizeFunctionOnNextCall(f); f(3);"); 4102 "%OptimizeFunctionOnNextCall(f); f(3);");
4095 4103
4096 Handle<JSFunction> f = Handle<JSFunction>::cast( 4104 Handle<JSFunction> f = Handle<JSFunction>::cast(
4097 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4105 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4098 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked()))); 4106 CcTest::global()->Get(env.local(), v8_str("f")).ToLocalChecked())));
4099 CHECK(f->is_compiled()); 4107 CHECK(f->is_compiled());
4100 const int kAgingThreshold = 6; 4108 const int kAgingThreshold = 6;
4101 for (int i = 0; i < kAgingThreshold; i++) { 4109 for (int i = 0; i < kAgingThreshold; i++) {
4102 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2)); 4110 f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
4103 } 4111 }
4104 4112
4105 CompileRun("f = null;"); 4113 CompileRun("f = null;");
4106 } 4114 }
4107 4115
4108 // Simulate incremental marking so that unoptimized code is flushed 4116 // Simulate incremental marking so that unoptimized code is flushed
4109 // even though it still is cached in the optimized code map. 4117 // even though it still is cached in the optimized code map.
4110 heap::SimulateIncrementalMarking(heap); 4118 heap::SimulateIncrementalMarking(heap);
4111 heap->CollectAllGarbage(); 4119 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4112 4120
4113 // Make a new closure that will get code installed from the code map. 4121 // Make a new closure that will get code installed from the code map.
4114 // Unoptimized code is missing and the deoptimizer will go ballistic. 4122 // Unoptimized code is missing and the deoptimizer will go ballistic.
4115 CompileRun("var g = mkClosure(); g('bozo');"); 4123 CompileRun("var g = mkClosure(); g('bozo');");
4116 } 4124 }
4117 4125
4118 4126
4119 TEST(Regress169209) { 4127 TEST(Regress169209) {
4120 i::FLAG_stress_compaction = false; 4128 i::FLAG_stress_compaction = false;
4121 i::FLAG_allow_natives_syntax = true; 4129 i::FLAG_allow_natives_syntax = true;
4122 4130
4123 CcTest::InitializeVM(); 4131 CcTest::InitializeVM();
4124 Isolate* isolate = CcTest::i_isolate(); 4132 Isolate* isolate = CcTest::i_isolate();
4125 Heap* heap = isolate->heap(); 4133 Heap* heap = isolate->heap();
4126 HandleScope scope(isolate); 4134 HandleScope scope(isolate);
4127 4135
4128 // Perform one initial GC to enable code flushing. 4136 // Perform one initial GC to enable code flushing.
4129 heap->CollectAllGarbage(); 4137 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4130 4138
4131 // Prepare a shared function info eligible for code flushing for which 4139 // Prepare a shared function info eligible for code flushing for which
4132 // the unoptimized code will be replaced during optimization. 4140 // the unoptimized code will be replaced during optimization.
4133 Handle<SharedFunctionInfo> shared1; 4141 Handle<SharedFunctionInfo> shared1;
4134 { 4142 {
4135 HandleScope inner_scope(isolate); 4143 HandleScope inner_scope(isolate);
4136 LocalContext env; 4144 LocalContext env;
4137 CompileRun("function f() { return 'foobar'; }" 4145 CompileRun("function f() { return 'foobar'; }"
4138 "function g(x) { if (x) f(); }" 4146 "function g(x) { if (x) f(); }"
4139 "f();" 4147 "f();"
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
4176 4184
4177 // Simulate incremental marking and collect code flushing candidates. 4185 // Simulate incremental marking and collect code flushing candidates.
4178 heap::SimulateIncrementalMarking(heap); 4186 heap::SimulateIncrementalMarking(heap);
4179 CHECK(shared1->code()->gc_metadata() != NULL); 4187 CHECK(shared1->code()->gc_metadata() != NULL);
4180 4188
4181 // Optimize function and make sure the unoptimized code is replaced. 4189 // Optimize function and make sure the unoptimized code is replaced.
4182 CompileRun("%OptimizeFunctionOnNextCall(g);" 4190 CompileRun("%OptimizeFunctionOnNextCall(g);"
4183 "g(false);"); 4191 "g(false);");
4184 4192
4185 // Finish garbage collection cycle. 4193 // Finish garbage collection cycle.
4186 heap->CollectAllGarbage(); 4194 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4187 CHECK(shared1->code()->gc_metadata() == NULL); 4195 CHECK(shared1->code()->gc_metadata() == NULL);
4188 } 4196 }
4189 4197
4190 4198
4191 TEST(Regress169928) { 4199 TEST(Regress169928) {
4192 i::FLAG_allow_natives_syntax = true; 4200 i::FLAG_allow_natives_syntax = true;
4193 i::FLAG_crankshaft = false; 4201 i::FLAG_crankshaft = false;
4194 CcTest::InitializeVM(); 4202 CcTest::InitializeVM();
4195 Isolate* isolate = CcTest::i_isolate(); 4203 Isolate* isolate = CcTest::i_isolate();
4196 LocalContext env; 4204 LocalContext env;
(...skipping 20 matching lines...) Expand all
4217 // prepare the heap 4225 // prepare the heap
4218 v8::Local<v8::String> mote_code_string = 4226 v8::Local<v8::String> mote_code_string =
4219 v8_str("fastliteralcase(mote, 2.5);"); 4227 v8_str("fastliteralcase(mote, 2.5);");
4220 4228
4221 v8::Local<v8::String> array_name = v8_str("mote"); 4229 v8::Local<v8::String> array_name = v8_str("mote");
4222 CHECK(CcTest::global() 4230 CHECK(CcTest::global()
4223 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0)) 4231 ->Set(env.local(), array_name, v8::Int32::New(CcTest::isolate(), 0))
4224 .FromJust()); 4232 .FromJust());
4225 4233
4226 // First make sure we flip spaces 4234 // First make sure we flip spaces
4227 CcTest::heap()->CollectGarbage(NEW_SPACE); 4235 CcTest::CollectGarbage(NEW_SPACE);
4228 4236
4229 // Allocate the object. 4237 // Allocate the object.
4230 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED); 4238 Handle<FixedArray> array_data = factory->NewFixedArray(2, NOT_TENURED);
4231 array_data->set(0, Smi::FromInt(1)); 4239 array_data->set(0, Smi::FromInt(1));
4232 array_data->set(1, Smi::FromInt(2)); 4240 array_data->set(1, Smi::FromInt(2));
4233 4241
4234 heap::AllocateAllButNBytes( 4242 heap::AllocateAllButNBytes(
4235 CcTest::heap()->new_space(), 4243 CcTest::heap()->new_space(),
4236 JSArray::kSize + AllocationMemento::kSize + kPointerSize); 4244 JSArray::kSize + AllocationMemento::kSize + kPointerSize);
4237 4245
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
4327 TEST(Regress514122) { 4335 TEST(Regress514122) {
4328 i::FLAG_flush_optimized_code_cache = false; 4336 i::FLAG_flush_optimized_code_cache = false;
4329 i::FLAG_allow_natives_syntax = true; 4337 i::FLAG_allow_natives_syntax = true;
4330 CcTest::InitializeVM(); 4338 CcTest::InitializeVM();
4331 Isolate* isolate = CcTest::i_isolate(); 4339 Isolate* isolate = CcTest::i_isolate();
4332 LocalContext env; 4340 LocalContext env;
4333 Heap* heap = isolate->heap(); 4341 Heap* heap = isolate->heap();
4334 HandleScope scope(isolate); 4342 HandleScope scope(isolate);
4335 4343
4336 // Perfrom one initial GC to enable code flushing. 4344 // Perfrom one initial GC to enable code flushing.
4337 CcTest::heap()->CollectAllGarbage(); 4345 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4338 4346
4339 // Prepare function whose optimized code map we can use. 4347 // Prepare function whose optimized code map we can use.
4340 Handle<SharedFunctionInfo> shared; 4348 Handle<SharedFunctionInfo> shared;
4341 { 4349 {
4342 HandleScope inner_scope(isolate); 4350 HandleScope inner_scope(isolate);
4343 CompileRun("function f() { return 1 }" 4351 CompileRun("function f() { return 1 }"
4344 "f(); %OptimizeFunctionOnNextCall(f); f();"); 4352 "f(); %OptimizeFunctionOnNextCall(f); f();");
4345 4353
4346 Handle<JSFunction> f = Handle<JSFunction>::cast( 4354 Handle<JSFunction> f = Handle<JSFunction>::cast(
4347 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4355 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
4405 4413
4406 // Add the code several times to the optimized code map. This will leave old 4414 // Add the code several times to the optimized code map. This will leave old
4407 // copies of the optimized code map unreachable but still marked. 4415 // copies of the optimized code map unreachable but still marked.
4408 for (int i = 3; i < 6; ++i) { 4416 for (int i = 3; i < 6; ++i) {
4409 HandleScope inner_scope(isolate); 4417 HandleScope inner_scope(isolate);
4410 BailoutId id = BailoutId(i); 4418 BailoutId id = BailoutId(i);
4411 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id); 4419 SharedFunctionInfo::AddToOptimizedCodeMap(shared, context, code, lit, id);
4412 } 4420 }
4413 4421
4414 // Trigger a GC to flush out the bug. 4422 // Trigger a GC to flush out the bug.
4415 heap->CollectGarbage(i::OLD_SPACE, "fire in the hole"); 4423 CcTest::CollectGarbage(i::OLD_SPACE);
4416 boomer->Print(); 4424 boomer->Print();
4417 } 4425 }
4418 4426
4419 4427
4420 TEST(OptimizedCodeMapReuseEntries) { 4428 TEST(OptimizedCodeMapReuseEntries) {
4421 i::FLAG_flush_optimized_code_cache = false; 4429 i::FLAG_flush_optimized_code_cache = false;
4422 i::FLAG_allow_natives_syntax = true; 4430 i::FLAG_allow_natives_syntax = true;
4423 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't 4431 // BUG(v8:4598): Since TurboFan doesn't treat maps in code weakly, we can't
4424 // run this test. 4432 // run this test.
4425 if (i::FLAG_turbo) return; 4433 if (i::FLAG_turbo) return;
4426 CcTest::InitializeVM(); 4434 CcTest::InitializeVM();
4427 v8::Isolate* v8_isolate = CcTest::isolate(); 4435 v8::Isolate* v8_isolate = CcTest::isolate();
4428 Isolate* isolate = CcTest::i_isolate(); 4436 Isolate* isolate = CcTest::i_isolate();
4429 Heap* heap = isolate->heap();
4430 HandleScope scope(isolate); 4437 HandleScope scope(isolate);
4431 4438
4432 // Create 3 contexts, allow the 2nd one to be disposed, and verify that 4439 // Create 3 contexts, allow the 2nd one to be disposed, and verify that
4433 // a 4th context will re-use the weak slots in the optimized code map 4440 // a 4th context will re-use the weak slots in the optimized code map
4434 // to hold data, rather than expanding the map. 4441 // to hold data, rather than expanding the map.
4435 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate); 4442 v8::Local<v8::Context> c1 = v8::Context::New(v8_isolate);
4436 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }"; 4443 const char* source = "function foo(x) { var l = [1]; return x+l[0]; }";
4437 v8::ScriptCompiler::Source script_source( 4444 v8::ScriptCompiler::Source script_source(
4438 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal) 4445 v8::String::NewFromUtf8(v8_isolate, source, v8::NewStringType::kNormal)
4439 .ToLocalChecked()); 4446 .ToLocalChecked());
4440 v8::Local<v8::UnboundScript> indep = 4447 v8::Local<v8::UnboundScript> indep =
4441 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source) 4448 v8::ScriptCompiler::CompileUnboundScript(v8_isolate, &script_source)
4442 .ToLocalChecked(); 4449 .ToLocalChecked();
4443 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);"; 4450 const char* toplevel = "foo(3); %OptimizeFunctionOnNextCall(foo); foo(3);";
4444 // Perfrom one initial GC to enable code flushing. 4451 // Perfrom one initial GC to enable code flushing.
4445 heap->CollectAllGarbage(); 4452 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4446 4453
4447 c1->Enter(); 4454 c1->Enter();
4448 indep->BindToCurrentContext()->Run(c1).ToLocalChecked(); 4455 indep->BindToCurrentContext()->Run(c1).ToLocalChecked();
4449 CompileRun(toplevel); 4456 CompileRun(toplevel);
4450 4457
4451 Handle<SharedFunctionInfo> shared; 4458 Handle<SharedFunctionInfo> shared;
4452 Handle<JSFunction> foo = Handle<JSFunction>::cast( 4459 Handle<JSFunction> foo = Handle<JSFunction>::cast(
4453 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( 4460 v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast(
4454 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked()))); 4461 CcTest::global()->Get(c1, v8_str("foo")).ToLocalChecked())));
4455 CHECK(foo->shared()->is_compiled()); 4462 CHECK(foo->shared()->is_compiled());
(...skipping 13 matching lines...) Expand all
4469 HandleScope scope(isolate); 4476 HandleScope scope(isolate);
4470 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate); 4477 v8::Local<v8::Context> c3 = v8::Context::New(v8_isolate);
4471 c3->Enter(); 4478 c3->Enter();
4472 indep->BindToCurrentContext()->Run(c3).ToLocalChecked(); 4479 indep->BindToCurrentContext()->Run(c3).ToLocalChecked();
4473 CompileRun(toplevel); 4480 CompileRun(toplevel);
4474 c3->Exit(); 4481 c3->Exit();
4475 4482
4476 // Now, collect garbage. Context c2 should have no roots to it, and it's 4483 // Now, collect garbage. Context c2 should have no roots to it, and it's
4477 // entry in the optimized code map should be free for a new context. 4484 // entry in the optimized code map should be free for a new context.
4478 for (int i = 0; i < 4; i++) { 4485 for (int i = 0; i < 4; i++) {
4479 heap->CollectAllGarbage(); 4486 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4480 } 4487 }
4481 4488
4482 Handle<FixedArray> optimized_code_map = 4489 Handle<FixedArray> optimized_code_map =
4483 handle(shared->optimized_code_map()); 4490 handle(shared->optimized_code_map());
4484 // There should be 3 entries in the map. 4491 // There should be 3 entries in the map.
4485 CHECK_EQ( 4492 CHECK_EQ(
4486 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) / 4493 3, ((optimized_code_map->length() - SharedFunctionInfo::kEntriesStart) /
4487 SharedFunctionInfo::kEntryLength)); 4494 SharedFunctionInfo::kEntryLength));
4488 // But one of them (formerly for c2) should be cleared. 4495 // But one of them (formerly for c2) should be cleared.
4489 int cleared_count = 0; 4496 int cleared_count = 0;
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
4524 CHECK_EQ(0, cleared_count); 4531 CHECK_EQ(0, cleared_count);
4525 } 4532 }
4526 } 4533 }
4527 4534
4528 4535
4529 TEST(Regress513496) { 4536 TEST(Regress513496) {
4530 i::FLAG_flush_optimized_code_cache = false; 4537 i::FLAG_flush_optimized_code_cache = false;
4531 i::FLAG_allow_natives_syntax = true; 4538 i::FLAG_allow_natives_syntax = true;
4532 CcTest::InitializeVM(); 4539 CcTest::InitializeVM();
4533 Isolate* isolate = CcTest::i_isolate(); 4540 Isolate* isolate = CcTest::i_isolate();
4534 Heap* heap = isolate->heap();
4535 HandleScope scope(isolate); 4541 HandleScope scope(isolate);
4536 4542
4537 // Perfrom one initial GC to enable code flushing. 4543 // Perfrom one initial GC to enable code flushing.
4538 CcTest::heap()->CollectAllGarbage(); 4544 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4539 4545
4540 // Prepare an optimized closure with containing an inlined function. Then age 4546 // Prepare an optimized closure with containing an inlined function. Then age
4541 // the inlined unoptimized code to trigger code flushing but make sure the 4547 // the inlined unoptimized code to trigger code flushing but make sure the
4542 // outer optimized code is kept in the optimized code map. 4548 // outer optimized code is kept in the optimized code map.
4543 Handle<SharedFunctionInfo> shared; 4549 Handle<SharedFunctionInfo> shared;
4544 { 4550 {
4545 LocalContext context; 4551 LocalContext context;
4546 HandleScope inner_scope(isolate); 4552 HandleScope inner_scope(isolate);
4547 CompileRun( 4553 CompileRun(
4548 "function g(x) { return x + 1 }" 4554 "function g(x) { return x + 1 }"
(...skipping 23 matching lines...) Expand all
4572 CompileRun("f = null"); 4578 CompileRun("f = null");
4573 } 4579 }
4574 4580
4575 // Lookup the optimized code and keep it alive. 4581 // Lookup the optimized code and keep it alive.
4576 CodeAndLiterals result = shared->SearchOptimizedCodeMap( 4582 CodeAndLiterals result = shared->SearchOptimizedCodeMap(
4577 isolate->context()->native_context(), BailoutId::None()); 4583 isolate->context()->native_context(), BailoutId::None());
4578 Handle<Code> optimized_code(result.code, isolate); 4584 Handle<Code> optimized_code(result.code, isolate);
4579 4585
4580 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even 4586 // Finish a full GC cycle so that the unoptimized code of 'g' is flushed even
4581 // though the optimized code for 'f' is reachable via the optimized code map. 4587 // though the optimized code for 'f' is reachable via the optimized code map.
4582 heap->CollectAllGarbage(); 4588 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4583 4589
4584 // Make a new closure that will get code installed from the code map. 4590 // Make a new closure that will get code installed from the code map.
4585 // Unoptimized code is missing and the deoptimizer will go ballistic. 4591 // Unoptimized code is missing and the deoptimizer will go ballistic.
4586 CompileRun("var h = mkClosure(); h('bozo');"); 4592 CompileRun("var h = mkClosure(); h('bozo');");
4587 } 4593 }
4588 4594
4589 4595
4590 TEST(LargeObjectSlotRecording) { 4596 TEST(LargeObjectSlotRecording) {
4591 FLAG_manual_evacuation_candidates_selection = true; 4597 FLAG_manual_evacuation_candidates_selection = true;
4592 CcTest::InitializeVM(); 4598 CcTest::InitializeVM();
(...skipping 22 matching lines...) Expand all
4615 4621
4616 // Create references from the large object to the object on the evacuation 4622 // Create references from the large object to the object on the evacuation
4617 // candidate. 4623 // candidate.
4618 const int kStep = size / 10; 4624 const int kStep = size / 10;
4619 for (int i = 0; i < size; i += kStep) { 4625 for (int i = 0; i < size; i += kStep) {
4620 lo->set(i, *lit); 4626 lo->set(i, *lit);
4621 CHECK(lo->get(i) == old_location); 4627 CHECK(lo->get(i) == old_location);
4622 } 4628 }
4623 4629
4624 // Move the evaucation candidate object. 4630 // Move the evaucation candidate object.
4625 CcTest::heap()->CollectAllGarbage(); 4631 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4626 4632
4627 // Verify that the pointers in the large object got updated. 4633 // Verify that the pointers in the large object got updated.
4628 for (int i = 0; i < size; i += kStep) { 4634 for (int i = 0; i < size; i += kStep) {
4629 CHECK_EQ(lo->get(i), *lit); 4635 CHECK_EQ(lo->get(i), *lit);
4630 CHECK(lo->get(i) != old_location); 4636 CHECK(lo->get(i) != old_location);
4631 } 4637 }
4632 } 4638 }
4633 4639
4634 4640
4635 class DummyVisitor : public ObjectVisitor { 4641 class DummyVisitor : public ObjectVisitor {
(...skipping 26 matching lines...) Expand all
4662 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) { 4668 TEST(IncrementalMarkingStepMakesBigProgressWithLargeObjects) {
4663 CcTest::InitializeVM(); 4669 CcTest::InitializeVM();
4664 v8::HandleScope scope(CcTest::isolate()); 4670 v8::HandleScope scope(CcTest::isolate());
4665 CompileRun("function f(n) {" 4671 CompileRun("function f(n) {"
4666 " var a = new Array(n);" 4672 " var a = new Array(n);"
4667 " for (var i = 0; i < n; i += 100) a[i] = i;" 4673 " for (var i = 0; i < n; i += 100) a[i] = i;"
4668 "};" 4674 "};"
4669 "f(10 * 1024 * 1024);"); 4675 "f(10 * 1024 * 1024);");
4670 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 4676 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
4671 if (marking->IsStopped()) { 4677 if (marking->IsStopped()) {
4672 CcTest::heap()->StartIncrementalMarking(); 4678 CcTest::heap()->StartIncrementalMarking(
4679 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
4673 } 4680 }
4674 // This big step should be sufficient to mark the whole array. 4681 // This big step should be sufficient to mark the whole array.
4675 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 4682 marking->Step(100 * MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4676 IncrementalMarking::FORCE_COMPLETION); 4683 IncrementalMarking::FORCE_COMPLETION);
4677 CHECK(marking->IsComplete() || 4684 CHECK(marking->IsComplete() ||
4678 marking->IsReadyToOverApproximateWeakClosure()); 4685 marking->IsReadyToOverApproximateWeakClosure());
4679 } 4686 }
4680 4687
4681 4688
4682 TEST(DisableInlineAllocation) { 4689 TEST(DisableInlineAllocation) {
(...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after
4773 } 4780 }
4774 4781
4775 // TurboFan respects pretenuring feedback from allocation sites, Crankshaft 4782 // TurboFan respects pretenuring feedback from allocation sites, Crankshaft
4776 // does not. Either is fine for the purposes of this test. 4783 // does not. Either is fine for the purposes of this test.
4777 CHECK(dependency_group_count == 1 || dependency_group_count == 2); 4784 CHECK(dependency_group_count == 1 || dependency_group_count == 2);
4778 } 4785 }
4779 4786
4780 // Now make sure that a gc should get rid of the function, even though we 4787 // Now make sure that a gc should get rid of the function, even though we
4781 // still have the allocation site alive. 4788 // still have the allocation site alive.
4782 for (int i = 0; i < 4; i++) { 4789 for (int i = 0; i < 4; i++) {
4783 heap->CollectAllGarbage(); 4790 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4784 } 4791 }
4785 4792
4786 // The site still exists because of our global handle, but the code is no 4793 // The site still exists because of our global handle, but the code is no
4787 // longer referred to by dependent_code(). 4794 // longer referred to by dependent_code().
4788 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() && 4795 CHECK(site->dependent_code()->object_at(0)->IsWeakCell() &&
4789 WeakCell::cast(site->dependent_code()->object_at(0))->cleared()); 4796 WeakCell::cast(site->dependent_code()->object_at(0))->cleared());
4790 } 4797 }
4791 4798
4792 4799
4793 TEST(CellsInOptimizedCodeAreWeak) { 4800 TEST(CellsInOptimizedCodeAreWeak) {
(...skipping 27 matching lines...) Expand all
4821 4828
4822 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4829 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4823 *v8::Local<v8::Function>::Cast(CcTest::global() 4830 *v8::Local<v8::Function>::Cast(CcTest::global()
4824 ->Get(context.local(), v8_str("bar")) 4831 ->Get(context.local(), v8_str("bar"))
4825 .ToLocalChecked()))); 4832 .ToLocalChecked())));
4826 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4833 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4827 } 4834 }
4828 4835
4829 // Now make sure that a gc should get rid of the function 4836 // Now make sure that a gc should get rid of the function
4830 for (int i = 0; i < 4; i++) { 4837 for (int i = 0; i < 4; i++) {
4831 heap->CollectAllGarbage(); 4838 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4832 } 4839 }
4833 4840
4834 CHECK(code->marked_for_deoptimization()); 4841 CHECK(code->marked_for_deoptimization());
4835 } 4842 }
4836 4843
4837 4844
4838 TEST(ObjectsInOptimizedCodeAreWeak) { 4845 TEST(ObjectsInOptimizedCodeAreWeak) {
4839 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return; 4846 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4840 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4847 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4841 i::FLAG_allow_natives_syntax = true; 4848 i::FLAG_allow_natives_syntax = true;
(...skipping 22 matching lines...) Expand all
4864 4871
4865 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4872 Handle<JSFunction> bar = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4866 *v8::Local<v8::Function>::Cast(CcTest::global() 4873 *v8::Local<v8::Function>::Cast(CcTest::global()
4867 ->Get(context.local(), v8_str("bar")) 4874 ->Get(context.local(), v8_str("bar"))
4868 .ToLocalChecked()))); 4875 .ToLocalChecked())));
4869 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4876 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4870 } 4877 }
4871 4878
4872 // Now make sure that a gc should get rid of the function 4879 // Now make sure that a gc should get rid of the function
4873 for (int i = 0; i < 4; i++) { 4880 for (int i = 0; i < 4; i++) {
4874 heap->CollectAllGarbage(); 4881 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4875 } 4882 }
4876 4883
4877 CHECK(code->marked_for_deoptimization()); 4884 CHECK(code->marked_for_deoptimization());
4878 } 4885 }
4879 4886
4880 TEST(NewSpaceObjectsInOptimizedCode) { 4887 TEST(NewSpaceObjectsInOptimizedCode) {
4881 if (i::FLAG_always_opt || !i::FLAG_crankshaft || i::FLAG_turbo) return; 4888 if (i::FLAG_always_opt || !i::FLAG_crankshaft || i::FLAG_turbo) return;
4882 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4889 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4883 i::FLAG_allow_natives_syntax = true; 4890 i::FLAG_allow_natives_syntax = true;
4884 CcTest::InitializeVM(); 4891 CcTest::InitializeVM();
(...skipping 29 matching lines...) Expand all
4914 *v8::Local<v8::Function>::Cast(CcTest::global() 4921 *v8::Local<v8::Function>::Cast(CcTest::global()
4915 ->Get(context.local(), v8_str("bar")) 4922 ->Get(context.local(), v8_str("bar"))
4916 .ToLocalChecked()))); 4923 .ToLocalChecked())));
4917 4924
4918 Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle( 4925 Handle<JSFunction> foo = Handle<JSFunction>::cast(v8::Utils::OpenHandle(
4919 *v8::Local<v8::Function>::Cast(CcTest::global() 4926 *v8::Local<v8::Function>::Cast(CcTest::global()
4920 ->Get(context.local(), v8_str("foo")) 4927 ->Get(context.local(), v8_str("foo"))
4921 .ToLocalChecked()))); 4928 .ToLocalChecked())));
4922 4929
4923 CHECK(heap->InNewSpace(*foo)); 4930 CHECK(heap->InNewSpace(*foo));
4924 heap->CollectGarbage(NEW_SPACE); 4931 CcTest::CollectGarbage(NEW_SPACE);
4925 heap->CollectGarbage(NEW_SPACE); 4932 CcTest::CollectGarbage(NEW_SPACE);
4926 CHECK(!heap->InNewSpace(*foo)); 4933 CHECK(!heap->InNewSpace(*foo));
4927 #ifdef VERIFY_HEAP 4934 #ifdef VERIFY_HEAP
4928 heap->Verify(); 4935 heap->Verify();
4929 #endif 4936 #endif
4930 CHECK(!bar->code()->marked_for_deoptimization()); 4937 CHECK(!bar->code()->marked_for_deoptimization());
4931 code = scope.CloseAndEscape(Handle<Code>(bar->code())); 4938 code = scope.CloseAndEscape(Handle<Code>(bar->code()));
4932 } 4939 }
4933 4940
4934 // Now make sure that a gc should get rid of the function 4941 // Now make sure that a gc should get rid of the function
4935 for (int i = 0; i < 4; i++) { 4942 for (int i = 0; i < 4; i++) {
4936 heap->CollectAllGarbage(); 4943 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4937 } 4944 }
4938 4945
4939 CHECK(code->marked_for_deoptimization()); 4946 CHECK(code->marked_for_deoptimization());
4940 } 4947 }
4941 4948
4942 TEST(NoWeakHashTableLeakWithIncrementalMarking) { 4949 TEST(NoWeakHashTableLeakWithIncrementalMarking) {
4943 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return; 4950 if (i::FLAG_always_opt || !i::FLAG_crankshaft) return;
4944 if (!i::FLAG_incremental_marking) return; 4951 if (!i::FLAG_incremental_marking) return;
4945 i::FLAG_weak_embedded_objects_in_optimized_code = true; 4952 i::FLAG_weak_embedded_objects_in_optimized_code = true;
4946 i::FLAG_allow_natives_syntax = true; 4953 i::FLAG_allow_natives_syntax = true;
4947 i::FLAG_compilation_cache = false; 4954 i::FLAG_compilation_cache = false;
4948 i::FLAG_retain_maps_for_n_gc = 0; 4955 i::FLAG_retain_maps_for_n_gc = 0;
4949 CcTest::InitializeVM(); 4956 CcTest::InitializeVM();
4950 Isolate* isolate = CcTest::i_isolate(); 4957 Isolate* isolate = CcTest::i_isolate();
4951 4958
4952 // Do not run for no-snap builds. 4959 // Do not run for no-snap builds.
4953 if (!i::Snapshot::HasContextSnapshot(isolate, 0)) return; 4960 if (!i::Snapshot::HasContextSnapshot(isolate, 0)) return;
4954 4961
4955 v8::internal::Heap* heap = CcTest::heap(); 4962 v8::internal::Heap* heap = CcTest::heap();
4956 4963
4957 // Get a clean slate regarding optimized functions on the heap. 4964 // Get a clean slate regarding optimized functions on the heap.
4958 i::Deoptimizer::DeoptimizeAll(isolate); 4965 i::Deoptimizer::DeoptimizeAll(isolate);
4959 heap->CollectAllGarbage(); 4966 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
4960 4967
4961 if (!isolate->use_crankshaft()) return; 4968 if (!isolate->use_crankshaft()) return;
4962 HandleScope outer_scope(heap->isolate()); 4969 HandleScope outer_scope(heap->isolate());
4963 for (int i = 0; i < 3; i++) { 4970 for (int i = 0; i < 3; i++) {
4964 heap::SimulateIncrementalMarking(heap); 4971 heap::SimulateIncrementalMarking(heap);
4965 { 4972 {
4966 LocalContext context; 4973 LocalContext context;
4967 HandleScope scope(heap->isolate()); 4974 HandleScope scope(heap->isolate());
4968 EmbeddedVector<char, 256> source; 4975 EmbeddedVector<char, 256> source;
4969 SNPrintF(source, 4976 SNPrintF(source,
4970 "function bar%d() {" 4977 "function bar%d() {"
4971 " return foo%d(1);" 4978 " return foo%d(1);"
4972 "};" 4979 "};"
4973 "function foo%d(x) { with (x) { return 1 + x; } };" 4980 "function foo%d(x) { with (x) { return 1 + x; } };"
4974 "bar%d();" 4981 "bar%d();"
4975 "bar%d();" 4982 "bar%d();"
4976 "bar%d();" 4983 "bar%d();"
4977 "%%OptimizeFunctionOnNextCall(bar%d);" 4984 "%%OptimizeFunctionOnNextCall(bar%d);"
4978 "bar%d();", 4985 "bar%d();",
4979 i, i, i, i, i, i, i, i); 4986 i, i, i, i, i, i, i, i);
4980 CompileRun(source.start()); 4987 CompileRun(source.start());
4981 } 4988 }
4982 // We have to abort incremental marking here to abandon black pages. 4989 // We have to abort incremental marking here to abandon black pages.
4983 heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask); 4990 CcTest::CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
4984 } 4991 }
4985 int elements = 0; 4992 int elements = 0;
4986 if (heap->weak_object_to_code_table()->IsHashTable()) { 4993 if (heap->weak_object_to_code_table()->IsHashTable()) {
4987 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table()); 4994 WeakHashTable* t = WeakHashTable::cast(heap->weak_object_to_code_table());
4988 elements = t->NumberOfElements(); 4995 elements = t->NumberOfElements();
4989 } 4996 }
4990 CHECK_EQ(0, elements); 4997 CHECK_EQ(0, elements);
4991 } 4998 }
4992 4999
4993 5000
(...skipping 28 matching lines...) Expand all
5022 TEST(NextCodeLinkIsWeak) { 5029 TEST(NextCodeLinkIsWeak) {
5023 i::FLAG_always_opt = false; 5030 i::FLAG_always_opt = false;
5024 i::FLAG_allow_natives_syntax = true; 5031 i::FLAG_allow_natives_syntax = true;
5025 CcTest::InitializeVM(); 5032 CcTest::InitializeVM();
5026 Isolate* isolate = CcTest::i_isolate(); 5033 Isolate* isolate = CcTest::i_isolate();
5027 v8::internal::Heap* heap = CcTest::heap(); 5034 v8::internal::Heap* heap = CcTest::heap();
5028 5035
5029 if (!isolate->use_crankshaft()) return; 5036 if (!isolate->use_crankshaft()) return;
5030 HandleScope outer_scope(heap->isolate()); 5037 HandleScope outer_scope(heap->isolate());
5031 Handle<Code> code; 5038 Handle<Code> code;
5032 heap->CollectAllAvailableGarbage(); 5039 CcTest::CollectAllAvailableGarbage();
5033 int code_chain_length_before, code_chain_length_after; 5040 int code_chain_length_before, code_chain_length_after;
5034 { 5041 {
5035 HandleScope scope(heap->isolate()); 5042 HandleScope scope(heap->isolate());
5036 Handle<JSFunction> mortal = 5043 Handle<JSFunction> mortal =
5037 OptimizeDummyFunction(CcTest::isolate(), "mortal"); 5044 OptimizeDummyFunction(CcTest::isolate(), "mortal");
5038 Handle<JSFunction> immortal = 5045 Handle<JSFunction> immortal =
5039 OptimizeDummyFunction(CcTest::isolate(), "immortal"); 5046 OptimizeDummyFunction(CcTest::isolate(), "immortal");
5040 CHECK_EQ(immortal->code()->next_code_link(), mortal->code()); 5047 CHECK_EQ(immortal->code()->next_code_link(), mortal->code());
5041 code_chain_length_before = GetCodeChainLength(immortal->code()); 5048 code_chain_length_before = GetCodeChainLength(immortal->code());
5042 // Keep the immortal code and let the mortal code die. 5049 // Keep the immortal code and let the mortal code die.
5043 code = scope.CloseAndEscape(Handle<Code>(immortal->code())); 5050 code = scope.CloseAndEscape(Handle<Code>(immortal->code()));
5044 CompileRun("mortal = null; immortal = null;"); 5051 CompileRun("mortal = null; immortal = null;");
5045 } 5052 }
5046 heap->CollectAllAvailableGarbage(); 5053 CcTest::CollectAllAvailableGarbage();
5047 // Now mortal code should be dead. 5054 // Now mortal code should be dead.
5048 code_chain_length_after = GetCodeChainLength(*code); 5055 code_chain_length_after = GetCodeChainLength(*code);
5049 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after); 5056 CHECK_EQ(code_chain_length_before - 1, code_chain_length_after);
5050 } 5057 }
5051 5058
5052 5059
5053 static Handle<Code> DummyOptimizedCode(Isolate* isolate) { 5060 static Handle<Code> DummyOptimizedCode(Isolate* isolate) {
5054 i::byte buffer[i::Assembler::kMinimalBufferSize]; 5061 i::byte buffer[i::Assembler::kMinimalBufferSize];
5055 MacroAssembler masm(isolate, buffer, sizeof(buffer), 5062 MacroAssembler masm(isolate, buffer, sizeof(buffer),
5056 v8::internal::CodeObjectRequired::kYes); 5063 v8::internal::CodeObjectRequired::kYes);
(...skipping 10 matching lines...) Expand all
5067 5074
5068 5075
5069 TEST(NextCodeLinkIsWeak2) { 5076 TEST(NextCodeLinkIsWeak2) {
5070 i::FLAG_allow_natives_syntax = true; 5077 i::FLAG_allow_natives_syntax = true;
5071 CcTest::InitializeVM(); 5078 CcTest::InitializeVM();
5072 Isolate* isolate = CcTest::i_isolate(); 5079 Isolate* isolate = CcTest::i_isolate();
5073 v8::internal::Heap* heap = CcTest::heap(); 5080 v8::internal::Heap* heap = CcTest::heap();
5074 5081
5075 if (!isolate->use_crankshaft()) return; 5082 if (!isolate->use_crankshaft()) return;
5076 HandleScope outer_scope(heap->isolate()); 5083 HandleScope outer_scope(heap->isolate());
5077 heap->CollectAllAvailableGarbage(); 5084 CcTest::CollectAllAvailableGarbage();
5078 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate); 5085 Handle<Context> context(Context::cast(heap->native_contexts_list()), isolate);
5079 Handle<Code> new_head; 5086 Handle<Code> new_head;
5080 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate); 5087 Handle<Object> old_head(context->get(Context::OPTIMIZED_CODE_LIST), isolate);
5081 { 5088 {
5082 HandleScope scope(heap->isolate()); 5089 HandleScope scope(heap->isolate());
5083 Handle<Code> immortal = DummyOptimizedCode(isolate); 5090 Handle<Code> immortal = DummyOptimizedCode(isolate);
5084 Handle<Code> mortal = DummyOptimizedCode(isolate); 5091 Handle<Code> mortal = DummyOptimizedCode(isolate);
5085 mortal->set_next_code_link(*old_head); 5092 mortal->set_next_code_link(*old_head);
5086 immortal->set_next_code_link(*mortal); 5093 immortal->set_next_code_link(*mortal);
5087 context->set(Context::OPTIMIZED_CODE_LIST, *immortal); 5094 context->set(Context::OPTIMIZED_CODE_LIST, *immortal);
5088 new_head = scope.CloseAndEscape(immortal); 5095 new_head = scope.CloseAndEscape(immortal);
5089 } 5096 }
5090 heap->CollectAllAvailableGarbage(); 5097 CcTest::CollectAllAvailableGarbage();
5091 // Now mortal code should be dead. 5098 // Now mortal code should be dead.
5092 CHECK_EQ(*old_head, new_head->next_code_link()); 5099 CHECK_EQ(*old_head, new_head->next_code_link());
5093 } 5100 }
5094 5101
5095 5102
5096 static bool weak_ic_cleared = false; 5103 static bool weak_ic_cleared = false;
5097 5104
5098 static void ClearWeakIC( 5105 static void ClearWeakIC(
5099 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) { 5106 const v8::WeakCallbackInfo<v8::Persistent<v8::Object>>& data) {
5100 printf("clear weak is called\n"); 5107 printf("clear weak is called\n");
(...skipping 29 matching lines...) Expand all
5130 " createObj(hat);" 5137 " createObj(hat);"
5131 " return hat;" 5138 " return hat;"
5132 " })();"; 5139 " })();";
5133 garbage.Reset(isolate, CompileRun(env.local(), source) 5140 garbage.Reset(isolate, CompileRun(env.local(), source)
5134 .ToLocalChecked() 5141 .ToLocalChecked()
5135 ->ToObject(env.local()) 5142 ->ToObject(env.local())
5136 .ToLocalChecked()); 5143 .ToLocalChecked());
5137 } 5144 }
5138 weak_ic_cleared = false; 5145 weak_ic_cleared = false;
5139 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); 5146 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5140 Heap* heap = CcTest::i_isolate()->heap(); 5147 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5141 heap->CollectAllGarbage();
5142 CHECK(weak_ic_cleared); 5148 CHECK(weak_ic_cleared);
5143 5149
5144 // We've determined the constructor in createObj has had it's weak cell 5150 // We've determined the constructor in createObj has had it's weak cell
5145 // cleared. Now, verify that one additional call with a new function 5151 // cleared. Now, verify that one additional call with a new function
5146 // allows monomorphicity. 5152 // allows monomorphicity.
5147 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>( 5153 Handle<TypeFeedbackVector> feedback_vector = Handle<TypeFeedbackVector>(
5148 createObj->feedback_vector(), CcTest::i_isolate()); 5154 createObj->feedback_vector(), CcTest::i_isolate());
5149 for (int i = 0; i < 20; i++) { 5155 for (int i = 0; i < 20; i++) {
5150 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5156 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5151 CHECK(slot_value->IsWeakCell()); 5157 CHECK(slot_value->IsWeakCell());
5152 if (WeakCell::cast(slot_value)->cleared()) break; 5158 if (WeakCell::cast(slot_value)->cleared()) break;
5153 heap->CollectAllGarbage(); 5159 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5154 } 5160 }
5155 5161
5156 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5162 Object* slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5157 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared()); 5163 CHECK(slot_value->IsWeakCell() && WeakCell::cast(slot_value)->cleared());
5158 CompileRun( 5164 CompileRun(
5159 "function coat() { this.x = 6; }" 5165 "function coat() { this.x = 6; }"
5160 "createObj(coat);"); 5166 "createObj(coat);");
5161 slot_value = feedback_vector->Get(FeedbackVectorSlot(0)); 5167 slot_value = feedback_vector->Get(FeedbackVectorSlot(0));
5162 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared()); 5168 CHECK(slot_value->IsWeakCell() && !WeakCell::cast(slot_value)->cleared());
5163 } 5169 }
5164 5170
5165 5171
5166 // Checks that the value returned by execution of the source is weak. 5172 // Checks that the value returned by execution of the source is weak.
5167 void CheckWeakness(const char* source) { 5173 void CheckWeakness(const char* source) {
5168 i::FLAG_stress_compaction = false; 5174 i::FLAG_stress_compaction = false;
5169 CcTest::InitializeVM(); 5175 CcTest::InitializeVM();
5170 v8::Isolate* isolate = CcTest::isolate(); 5176 v8::Isolate* isolate = CcTest::isolate();
5171 LocalContext env; 5177 LocalContext env;
5172 v8::HandleScope scope(isolate); 5178 v8::HandleScope scope(isolate);
5173 v8::Persistent<v8::Object> garbage; 5179 v8::Persistent<v8::Object> garbage;
5174 { 5180 {
5175 v8::HandleScope scope(isolate); 5181 v8::HandleScope scope(isolate);
5176 garbage.Reset(isolate, CompileRun(env.local(), source) 5182 garbage.Reset(isolate, CompileRun(env.local(), source)
5177 .ToLocalChecked() 5183 .ToLocalChecked()
5178 ->ToObject(env.local()) 5184 ->ToObject(env.local())
5179 .ToLocalChecked()); 5185 .ToLocalChecked());
5180 } 5186 }
5181 weak_ic_cleared = false; 5187 weak_ic_cleared = false;
5182 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter); 5188 garbage.SetWeak(&garbage, &ClearWeakIC, v8::WeakCallbackType::kParameter);
5183 Heap* heap = CcTest::i_isolate()->heap(); 5189 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5184 heap->CollectAllGarbage();
5185 CHECK(weak_ic_cleared); 5190 CHECK(weak_ic_cleared);
5186 } 5191 }
5187 5192
5188 5193
5189 // Each of the following "weak IC" tests creates an IC that embeds a map with 5194 // Each of the following "weak IC" tests creates an IC that embeds a map with
5190 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC. 5195 // the prototype pointing to _proto_ and checks that the _proto_ dies on GC.
5191 TEST(WeakMapInMonomorphicLoadIC) { 5196 TEST(WeakMapInMonomorphicLoadIC) {
5192 CheckWeakness("function loadIC(obj) {" 5197 CheckWeakness("function loadIC(obj) {"
5193 " return obj.name;" 5198 " return obj.name;"
5194 "}" 5199 "}"
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
5368 CHECK(!IC::ICUseVector(kind)); 5373 CHECK(!IC::ICUseVector(kind));
5369 CHECK_EQ(state, IC::StateFromCode(ic)); 5374 CHECK_EQ(state, IC::StateFromCode(ic));
5370 } 5375 }
5371 } 5376 }
5372 5377
5373 5378
5374 TEST(MonomorphicStaysMonomorphicAfterGC) { 5379 TEST(MonomorphicStaysMonomorphicAfterGC) {
5375 if (FLAG_always_opt) return; 5380 if (FLAG_always_opt) return;
5376 CcTest::InitializeVM(); 5381 CcTest::InitializeVM();
5377 Isolate* isolate = CcTest::i_isolate(); 5382 Isolate* isolate = CcTest::i_isolate();
5378 Heap* heap = isolate->heap();
5379 v8::HandleScope scope(CcTest::isolate()); 5383 v8::HandleScope scope(CcTest::isolate());
5380 CompileRun( 5384 CompileRun(
5381 "function loadIC(obj) {" 5385 "function loadIC(obj) {"
5382 " return obj.name;" 5386 " return obj.name;"
5383 "}" 5387 "}"
5384 "function testIC() {" 5388 "function testIC() {"
5385 " var proto = {'name' : 'weak'};" 5389 " var proto = {'name' : 'weak'};"
5386 " var obj = Object.create(proto);" 5390 " var obj = Object.create(proto);"
5387 " loadIC(obj);" 5391 " loadIC(obj);"
5388 " loadIC(obj);" 5392 " loadIC(obj);"
5389 " loadIC(obj);" 5393 " loadIC(obj);"
5390 " return proto;" 5394 " return proto;"
5391 "};"); 5395 "};");
5392 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC"); 5396 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5393 { 5397 {
5394 v8::HandleScope scope(CcTest::isolate()); 5398 v8::HandleScope scope(CcTest::isolate());
5395 CompileRun("(testIC())"); 5399 CompileRun("(testIC())");
5396 } 5400 }
5397 heap->CollectAllGarbage(); 5401 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5398 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC); 5402 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
5399 { 5403 {
5400 v8::HandleScope scope(CcTest::isolate()); 5404 v8::HandleScope scope(CcTest::isolate());
5401 CompileRun("(testIC())"); 5405 CompileRun("(testIC())");
5402 } 5406 }
5403 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC); 5407 CheckIC(loadIC, Code::LOAD_IC, 0, MONOMORPHIC);
5404 } 5408 }
5405 5409
5406 5410
5407 TEST(PolymorphicStaysPolymorphicAfterGC) { 5411 TEST(PolymorphicStaysPolymorphicAfterGC) {
5408 if (FLAG_always_opt) return; 5412 if (FLAG_always_opt) return;
5409 CcTest::InitializeVM(); 5413 CcTest::InitializeVM();
5410 Isolate* isolate = CcTest::i_isolate(); 5414 Isolate* isolate = CcTest::i_isolate();
5411 Heap* heap = isolate->heap();
5412 v8::HandleScope scope(CcTest::isolate()); 5415 v8::HandleScope scope(CcTest::isolate());
5413 CompileRun( 5416 CompileRun(
5414 "function loadIC(obj) {" 5417 "function loadIC(obj) {"
5415 " return obj.name;" 5418 " return obj.name;"
5416 "}" 5419 "}"
5417 "function testIC() {" 5420 "function testIC() {"
5418 " var proto = {'name' : 'weak'};" 5421 " var proto = {'name' : 'weak'};"
5419 " var obj = Object.create(proto);" 5422 " var obj = Object.create(proto);"
5420 " loadIC(obj);" 5423 " loadIC(obj);"
5421 " loadIC(obj);" 5424 " loadIC(obj);"
5422 " loadIC(obj);" 5425 " loadIC(obj);"
5423 " var poly = Object.create(proto);" 5426 " var poly = Object.create(proto);"
5424 " poly.x = true;" 5427 " poly.x = true;"
5425 " loadIC(poly);" 5428 " loadIC(poly);"
5426 " return proto;" 5429 " return proto;"
5427 "};"); 5430 "};");
5428 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC"); 5431 Handle<JSFunction> loadIC = GetFunctionByName(isolate, "loadIC");
5429 { 5432 {
5430 v8::HandleScope scope(CcTest::isolate()); 5433 v8::HandleScope scope(CcTest::isolate());
5431 CompileRun("(testIC())"); 5434 CompileRun("(testIC())");
5432 } 5435 }
5433 heap->CollectAllGarbage(); 5436 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5434 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC); 5437 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
5435 { 5438 {
5436 v8::HandleScope scope(CcTest::isolate()); 5439 v8::HandleScope scope(CcTest::isolate());
5437 CompileRun("(testIC())"); 5440 CompileRun("(testIC())");
5438 } 5441 }
5439 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC); 5442 CheckIC(loadIC, Code::LOAD_IC, 0, POLYMORPHIC);
5440 } 5443 }
5441 5444
5442 5445
5443 TEST(WeakCell) { 5446 TEST(WeakCell) {
5444 CcTest::InitializeVM(); 5447 CcTest::InitializeVM();
5445 Isolate* isolate = CcTest::i_isolate(); 5448 Isolate* isolate = CcTest::i_isolate();
5446 v8::internal::Heap* heap = CcTest::heap();
5447 v8::internal::Factory* factory = isolate->factory(); 5449 v8::internal::Factory* factory = isolate->factory();
5448 5450
5449 HandleScope outer_scope(isolate); 5451 HandleScope outer_scope(isolate);
5450 Handle<WeakCell> weak_cell1; 5452 Handle<WeakCell> weak_cell1;
5451 { 5453 {
5452 HandleScope inner_scope(isolate); 5454 HandleScope inner_scope(isolate);
5453 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED); 5455 Handle<HeapObject> value = factory->NewFixedArray(1, NOT_TENURED);
5454 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value)); 5456 weak_cell1 = inner_scope.CloseAndEscape(factory->NewWeakCell(value));
5455 } 5457 }
5456 5458
5457 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5459 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5458 Handle<WeakCell> weak_cell2; 5460 Handle<WeakCell> weak_cell2;
5459 { 5461 {
5460 HandleScope inner_scope(isolate); 5462 HandleScope inner_scope(isolate);
5461 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor)); 5463 weak_cell2 = inner_scope.CloseAndEscape(factory->NewWeakCell(survivor));
5462 } 5464 }
5463 CHECK(weak_cell1->value()->IsFixedArray()); 5465 CHECK(weak_cell1->value()->IsFixedArray());
5464 CHECK_EQ(*survivor, weak_cell2->value()); 5466 CHECK_EQ(*survivor, weak_cell2->value());
5465 heap->CollectGarbage(NEW_SPACE); 5467 CcTest::CollectGarbage(NEW_SPACE);
5466 CHECK(weak_cell1->value()->IsFixedArray()); 5468 CHECK(weak_cell1->value()->IsFixedArray());
5467 CHECK_EQ(*survivor, weak_cell2->value()); 5469 CHECK_EQ(*survivor, weak_cell2->value());
5468 heap->CollectGarbage(NEW_SPACE); 5470 CcTest::CollectGarbage(NEW_SPACE);
5469 CHECK(weak_cell1->value()->IsFixedArray()); 5471 CHECK(weak_cell1->value()->IsFixedArray());
5470 CHECK_EQ(*survivor, weak_cell2->value()); 5472 CHECK_EQ(*survivor, weak_cell2->value());
5471 heap->CollectAllAvailableGarbage(); 5473 CcTest::CollectAllAvailableGarbage();
5472 CHECK(weak_cell1->cleared()); 5474 CHECK(weak_cell1->cleared());
5473 CHECK_EQ(*survivor, weak_cell2->value()); 5475 CHECK_EQ(*survivor, weak_cell2->value());
5474 } 5476 }
5475 5477
5476 5478
5477 TEST(WeakCellsWithIncrementalMarking) { 5479 TEST(WeakCellsWithIncrementalMarking) {
5478 CcTest::InitializeVM(); 5480 CcTest::InitializeVM();
5479 Isolate* isolate = CcTest::i_isolate(); 5481 Isolate* isolate = CcTest::i_isolate();
5480 v8::internal::Heap* heap = CcTest::heap(); 5482 v8::internal::Heap* heap = CcTest::heap();
5481 v8::internal::Factory* factory = isolate->factory(); 5483 v8::internal::Factory* factory = isolate->factory();
5482 5484
5483 const int N = 16; 5485 const int N = 16;
5484 HandleScope outer_scope(isolate); 5486 HandleScope outer_scope(isolate);
5485 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED); 5487 Handle<FixedArray> survivor = factory->NewFixedArray(1, NOT_TENURED);
5486 Handle<WeakCell> weak_cells[N]; 5488 Handle<WeakCell> weak_cells[N];
5487 5489
5488 for (int i = 0; i < N; i++) { 5490 for (int i = 0; i < N; i++) {
5489 HandleScope inner_scope(isolate); 5491 HandleScope inner_scope(isolate);
5490 Handle<HeapObject> value = 5492 Handle<HeapObject> value =
5491 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED); 5493 i == 0 ? survivor : factory->NewFixedArray(1, NOT_TENURED);
5492 Handle<WeakCell> weak_cell = factory->NewWeakCell(value); 5494 Handle<WeakCell> weak_cell = factory->NewWeakCell(value);
5493 CHECK(weak_cell->value()->IsFixedArray()); 5495 CHECK(weak_cell->value()->IsFixedArray());
5494 IncrementalMarking* marking = heap->incremental_marking(); 5496 IncrementalMarking* marking = heap->incremental_marking();
5495 if (marking->IsStopped()) { 5497 if (marking->IsStopped()) {
5496 heap->StartIncrementalMarking(); 5498 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
5499 i::GarbageCollectionReason::kTesting);
5497 } 5500 }
5498 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 5501 marking->Step(128, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5499 IncrementalMarking::FORCE_COMPLETION); 5502 IncrementalMarking::FORCE_COMPLETION);
5500 heap->CollectGarbage(NEW_SPACE); 5503 CcTest::CollectGarbage(NEW_SPACE);
5501 CHECK(weak_cell->value()->IsFixedArray()); 5504 CHECK(weak_cell->value()->IsFixedArray());
5502 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell); 5505 weak_cells[i] = inner_scope.CloseAndEscape(weak_cell);
5503 } 5506 }
5504 // Call collect all twice to make sure that we also cleared 5507 // Call collect all twice to make sure that we also cleared
5505 // weak cells that were allocated on black pages. 5508 // weak cells that were allocated on black pages.
5506 heap->CollectAllGarbage(); 5509 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5507 heap->CollectAllGarbage(); 5510 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5508 CHECK_EQ(*survivor, weak_cells[0]->value()); 5511 CHECK_EQ(*survivor, weak_cells[0]->value());
5509 for (int i = 1; i < N; i++) { 5512 for (int i = 1; i < N; i++) {
5510 CHECK(weak_cells[i]->cleared()); 5513 CHECK(weak_cells[i]->cleared());
5511 } 5514 }
5512 } 5515 }
5513 5516
5514 5517
5515 #ifdef DEBUG 5518 #ifdef DEBUG
5516 TEST(AddInstructionChangesNewSpacePromotion) { 5519 TEST(AddInstructionChangesNewSpacePromotion) {
5517 i::FLAG_allow_natives_syntax = true; 5520 i::FLAG_allow_natives_syntax = true;
(...skipping 26 matching lines...) Expand all
5544 "%OptimizeFunctionOnNextCall(crash);" 5547 "%OptimizeFunctionOnNextCall(crash);"
5545 "crash(1);"); 5548 "crash(1);");
5546 5549
5547 v8::Local<v8::Object> global = CcTest::global(); 5550 v8::Local<v8::Object> global = CcTest::global();
5548 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast( 5551 v8::Local<v8::Function> g = v8::Local<v8::Function>::Cast(
5549 global->Get(env.local(), v8_str("crash")).ToLocalChecked()); 5552 global->Get(env.local(), v8_str("crash")).ToLocalChecked());
5550 v8::Local<v8::Value> args1[] = {v8_num(1)}; 5553 v8::Local<v8::Value> args1[] = {v8_num(1)};
5551 heap->DisableInlineAllocation(); 5554 heap->DisableInlineAllocation();
5552 heap->set_allocation_timeout(1); 5555 heap->set_allocation_timeout(1);
5553 g->Call(env.local(), global, 1, args1).ToLocalChecked(); 5556 g->Call(env.local(), global, 1, args1).ToLocalChecked();
5554 heap->CollectAllGarbage(); 5557 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5555 } 5558 }
5556 5559
5557 5560
5558 void OnFatalErrorExpectOOM(const char* location, const char* message) { 5561 void OnFatalErrorExpectOOM(const char* location, const char* message) {
5559 // Exit with 0 if the location matches our expectation. 5562 // Exit with 0 if the location matches our expectation.
5560 exit(strcmp(location, "CALL_AND_RETRY_LAST")); 5563 exit(strcmp(location, "CALL_AND_RETRY_LAST"));
5561 } 5564 }
5562 5565
5563 5566
5564 TEST(CEntryStubOOM) { 5567 TEST(CEntryStubOOM) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5605 const int kFixedArrayLen = 512; 5608 const int kFixedArrayLen = 512;
5606 Handle<FixedArray> objects[kMaxObjects]; 5609 Handle<FixedArray> objects[kMaxObjects];
5607 for (int i = 0; (i < kMaxObjects) && 5610 for (int i = 0; (i < kMaxObjects) &&
5608 heap->CanExpandOldGeneration(old_space->AreaSize()); 5611 heap->CanExpandOldGeneration(old_space->AreaSize());
5609 i++) { 5612 i++) {
5610 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED); 5613 objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED);
5611 Page::FromAddress(objects[i]->address()) 5614 Page::FromAddress(objects[i]->address())
5612 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 5615 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
5613 } 5616 }
5614 heap::SimulateFullSpace(old_space); 5617 heap::SimulateFullSpace(old_space);
5615 heap->CollectGarbage(OLD_SPACE); 5618 heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting);
5616 // If we get this far, we've successfully aborted compaction. Any further 5619 // If we get this far, we've successfully aborted compaction. Any further
5617 // allocations might trigger OOM. 5620 // allocations might trigger OOM.
5618 } 5621 }
5619 isolate->Exit(); 5622 isolate->Exit();
5620 isolate->Dispose(); 5623 isolate->Dispose();
5621 } 5624 }
5622 5625
5623 5626
5624 TEST(Regress357137) { 5627 TEST(Regress357137) {
5625 CcTest::InitializeVM(); 5628 CcTest::InitializeVM();
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
5701 // semi-space page. The second allocation in (3) will not fit into the 5704 // semi-space page. The second allocation in (3) will not fit into the
5702 // first semi-space page, but it will overwrite the promotion queue which 5705 // first semi-space page, but it will overwrite the promotion queue which
5703 // are in the second semi-space page. If the right guards are in place, the 5706 // are in the second semi-space page. If the right guards are in place, the
5704 // promotion queue will be evacuated in that case. 5707 // promotion queue will be evacuated in that case.
5705 5708
5706 5709
5707 CHECK(new_space->IsAtMaximumCapacity()); 5710 CHECK(new_space->IsAtMaximumCapacity());
5708 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); 5711 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5709 5712
5710 // Call the scavenger two times to get an empty new space 5713 // Call the scavenger two times to get an empty new space
5711 heap->CollectGarbage(NEW_SPACE); 5714 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5712 heap->CollectGarbage(NEW_SPACE); 5715 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5713 5716
5714 // First create a few objects which will survive a scavenge, and will get 5717 // First create a few objects which will survive a scavenge, and will get
5715 // promoted to the old generation later on. These objects will create 5718 // promoted to the old generation later on. These objects will create
5716 // promotion queue entries at the end of the second semi-space page. 5719 // promotion queue entries at the end of the second semi-space page.
5717 const int number_handles = 12; 5720 const int number_handles = 12;
5718 Handle<FixedArray> handles[number_handles]; 5721 Handle<FixedArray> handles[number_handles];
5719 for (int i = 0; i < number_handles; i++) { 5722 for (int i = 0; i < number_handles; i++) {
5720 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5723 handles[i] = i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5721 } 5724 }
5722 5725
5723 heap->CollectGarbage(NEW_SPACE); 5726 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5724 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); 5727 CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity());
5725 5728
5726 // Fill-up the first semi-space page. 5729 // Fill-up the first semi-space page.
5727 heap::FillUpOnePage(new_space); 5730 heap::FillUpOnePage(new_space);
5728 5731
5729 // Create a small object to initialize the bump pointer on the second 5732 // Create a small object to initialize the bump pointer on the second
5730 // semi-space page. 5733 // semi-space page.
5731 Handle<FixedArray> small = 5734 Handle<FixedArray> small =
5732 i_isolate->factory()->NewFixedArray(1, NOT_TENURED); 5735 i_isolate->factory()->NewFixedArray(1, NOT_TENURED);
5733 CHECK(heap->InNewSpace(*small)); 5736 CHECK(heap->InNewSpace(*small));
5734 5737
5735 // Fill-up the second semi-space page. 5738 // Fill-up the second semi-space page.
5736 heap::FillUpOnePage(new_space); 5739 heap::FillUpOnePage(new_space);
5737 5740
5738 // This scavenge will corrupt memory if the promotion queue is not 5741 // This scavenge will corrupt memory if the promotion queue is not
5739 // evacuated. 5742 // evacuated.
5740 heap->CollectGarbage(NEW_SPACE); 5743 heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting);
5741 } 5744 }
5742 isolate->Dispose(); 5745 isolate->Dispose();
5743 } 5746 }
5744 5747
5745 5748
5746 TEST(Regress388880) { 5749 TEST(Regress388880) {
5747 i::FLAG_expose_gc = true; 5750 i::FLAG_expose_gc = true;
5748 CcTest::InitializeVM(); 5751 CcTest::InitializeVM();
5749 v8::HandleScope scope(CcTest::isolate()); 5752 v8::HandleScope scope(CcTest::isolate());
5750 Isolate* isolate = CcTest::i_isolate(); 5753 Isolate* isolate = CcTest::i_isolate();
(...skipping 22 matching lines...) Expand all
5773 // Ensure that the object allocated where we need it. 5776 // Ensure that the object allocated where we need it.
5774 Page* page = Page::FromAddress(o->address()); 5777 Page* page = Page::FromAddress(o->address());
5775 CHECK_EQ(desired_offset, page->Offset(o->address())); 5778 CHECK_EQ(desired_offset, page->Offset(o->address()));
5776 5779
5777 // Now we have an object right at the end of the page. 5780 // Now we have an object right at the end of the page.
5778 5781
5779 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes() 5782 // Enable incremental marking to trigger actions in Heap::AdjustLiveBytes()
5780 // that would cause crash. 5783 // that would cause crash.
5781 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 5784 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5782 marking->Stop(); 5785 marking->Stop();
5783 CcTest::heap()->StartIncrementalMarking(); 5786 CcTest::heap()->StartIncrementalMarking(i::Heap::kNoGCFlags,
5787 i::GarbageCollectionReason::kTesting);
5784 CHECK(marking->IsMarking()); 5788 CHECK(marking->IsMarking());
5785 5789
5786 // Now everything is set up for crashing in JSObject::MigrateFastToFast() 5790 // Now everything is set up for crashing in JSObject::MigrateFastToFast()
5787 // when it calls heap->AdjustLiveBytes(...). 5791 // when it calls heap->AdjustLiveBytes(...).
5788 JSObject::MigrateToMap(o, map2); 5792 JSObject::MigrateToMap(o, map2);
5789 } 5793 }
5790 5794
5791 5795
5792 TEST(Regress3631) { 5796 TEST(Regress3631) {
5793 i::FLAG_expose_gc = true; 5797 i::FLAG_expose_gc = true;
5794 CcTest::InitializeVM(); 5798 CcTest::InitializeVM();
5795 v8::HandleScope scope(CcTest::isolate()); 5799 v8::HandleScope scope(CcTest::isolate());
5796 Isolate* isolate = CcTest::i_isolate(); 5800 Isolate* isolate = CcTest::i_isolate();
5797 Heap* heap = isolate->heap(); 5801 Heap* heap = isolate->heap();
5798 IncrementalMarking* marking = CcTest::heap()->incremental_marking(); 5802 IncrementalMarking* marking = CcTest::heap()->incremental_marking();
5799 v8::Local<v8::Value> result = CompileRun( 5803 v8::Local<v8::Value> result = CompileRun(
5800 "var weak_map = new WeakMap();" 5804 "var weak_map = new WeakMap();"
5801 "var future_keys = [];" 5805 "var future_keys = [];"
5802 "for (var i = 0; i < 50; i++) {" 5806 "for (var i = 0; i < 50; i++) {"
5803 " var key = {'k' : i + 0.1};" 5807 " var key = {'k' : i + 0.1};"
5804 " weak_map.set(key, 1);" 5808 " weak_map.set(key, 1);"
5805 " future_keys.push({'x' : i + 0.2});" 5809 " future_keys.push({'x' : i + 0.2});"
5806 "}" 5810 "}"
5807 "weak_map"); 5811 "weak_map");
5808 if (marking->IsStopped()) { 5812 if (marking->IsStopped()) {
5809 CcTest::heap()->StartIncrementalMarking(); 5813 CcTest::heap()->StartIncrementalMarking(
5814 i::Heap::kNoGCFlags, i::GarbageCollectionReason::kTesting);
5810 } 5815 }
5811 // Incrementally mark the backing store. 5816 // Incrementally mark the backing store.
5812 Handle<JSReceiver> obj = 5817 Handle<JSReceiver> obj =
5813 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5818 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5814 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj)); 5819 Handle<JSWeakCollection> weak_map(reinterpret_cast<JSWeakCollection*>(*obj));
5815 while (!Marking::IsBlack( 5820 while (!Marking::IsBlack(
5816 ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) && 5821 ObjectMarking::MarkBitFrom(HeapObject::cast(weak_map->table()))) &&
5817 !marking->IsStopped()) { 5822 !marking->IsStopped()) {
5818 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 5823 marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
5819 IncrementalMarking::FORCE_COMPLETION); 5824 IncrementalMarking::FORCE_COMPLETION);
5820 } 5825 }
5821 // Stash the backing store in a handle. 5826 // Stash the backing store in a handle.
5822 Handle<Object> save(weak_map->table(), isolate); 5827 Handle<Object> save(weak_map->table(), isolate);
5823 // The following line will update the backing store. 5828 // The following line will update the backing store.
5824 CompileRun( 5829 CompileRun(
5825 "for (var i = 0; i < 50; i++) {" 5830 "for (var i = 0; i < 50; i++) {"
5826 " weak_map.set(future_keys[i], i);" 5831 " weak_map.set(future_keys[i], i);"
5827 "}"); 5832 "}");
5828 heap->incremental_marking()->set_should_hurry(true); 5833 heap->incremental_marking()->set_should_hurry(true);
5829 heap->CollectGarbage(OLD_SPACE); 5834 CcTest::CollectGarbage(OLD_SPACE);
5830 } 5835 }
5831 5836
5832 5837
5833 TEST(Regress442710) { 5838 TEST(Regress442710) {
5834 CcTest::InitializeVM(); 5839 CcTest::InitializeVM();
5835 Isolate* isolate = CcTest::i_isolate(); 5840 Isolate* isolate = CcTest::i_isolate();
5836 Heap* heap = isolate->heap();
5837 Factory* factory = isolate->factory(); 5841 Factory* factory = isolate->factory();
5838 5842
5839 HandleScope sc(isolate); 5843 HandleScope sc(isolate);
5840 Handle<JSGlobalObject> global( 5844 Handle<JSGlobalObject> global(
5841 CcTest::i_isolate()->context()->global_object()); 5845 CcTest::i_isolate()->context()->global_object());
5842 Handle<JSArray> array = factory->NewJSArray(2); 5846 Handle<JSArray> array = factory->NewJSArray(2);
5843 5847
5844 Handle<String> name = factory->InternalizeUtf8String("testArray"); 5848 Handle<String> name = factory->InternalizeUtf8String("testArray");
5845 JSReceiver::SetProperty(global, name, array, SLOPPY).Check(); 5849 JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
5846 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();"); 5850 CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
5847 heap->CollectGarbage(OLD_SPACE); 5851 CcTest::CollectGarbage(OLD_SPACE);
5848 } 5852 }
5849 5853
5850 5854
5851 HEAP_TEST(NumberStringCacheSize) { 5855 HEAP_TEST(NumberStringCacheSize) {
5852 // Test that the number-string cache has not been resized in the snapshot. 5856 // Test that the number-string cache has not been resized in the snapshot.
5853 CcTest::InitializeVM(); 5857 CcTest::InitializeVM();
5854 Isolate* isolate = CcTest::i_isolate(); 5858 Isolate* isolate = CcTest::i_isolate();
5855 if (!isolate->snapshot_available()) return; 5859 if (!isolate->snapshot_available()) return;
5856 Heap* heap = isolate->heap(); 5860 Heap* heap = isolate->heap();
5857 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2, 5861 CHECK_EQ(Heap::kInitialNumberStringCacheSize * 2,
5858 heap->number_string_cache()->length()); 5862 heap->number_string_cache()->length());
5859 } 5863 }
5860 5864
5861 5865
5862 TEST(Regress3877) { 5866 TEST(Regress3877) {
5863 CcTest::InitializeVM(); 5867 CcTest::InitializeVM();
5864 Isolate* isolate = CcTest::i_isolate(); 5868 Isolate* isolate = CcTest::i_isolate();
5865 Heap* heap = isolate->heap();
5866 Factory* factory = isolate->factory(); 5869 Factory* factory = isolate->factory();
5867 HandleScope scope(isolate); 5870 HandleScope scope(isolate);
5868 CompileRun("function cls() { this.x = 10; }"); 5871 CompileRun("function cls() { this.x = 10; }");
5869 Handle<WeakCell> weak_prototype; 5872 Handle<WeakCell> weak_prototype;
5870 { 5873 {
5871 HandleScope inner_scope(isolate); 5874 HandleScope inner_scope(isolate);
5872 v8::Local<v8::Value> result = CompileRun("cls.prototype"); 5875 v8::Local<v8::Value> result = CompileRun("cls.prototype");
5873 Handle<JSReceiver> proto = 5876 Handle<JSReceiver> proto =
5874 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5877 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5875 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto)); 5878 weak_prototype = inner_scope.CloseAndEscape(factory->NewWeakCell(proto));
5876 } 5879 }
5877 CHECK(!weak_prototype->cleared()); 5880 CHECK(!weak_prototype->cleared());
5878 CompileRun( 5881 CompileRun(
5879 "var a = { };" 5882 "var a = { };"
5880 "a.x = new cls();" 5883 "a.x = new cls();"
5881 "cls.prototype = null;"); 5884 "cls.prototype = null;");
5882 for (int i = 0; i < 4; i++) { 5885 for (int i = 0; i < 4; i++) {
5883 heap->CollectAllGarbage(); 5886 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5884 } 5887 }
5885 // The map of a.x keeps prototype alive 5888 // The map of a.x keeps prototype alive
5886 CHECK(!weak_prototype->cleared()); 5889 CHECK(!weak_prototype->cleared());
5887 // Change the map of a.x and make the previous map garbage collectable. 5890 // Change the map of a.x and make the previous map garbage collectable.
5888 CompileRun("a.x.__proto__ = {};"); 5891 CompileRun("a.x.__proto__ = {};");
5889 for (int i = 0; i < 4; i++) { 5892 for (int i = 0; i < 4; i++) {
5890 heap->CollectAllGarbage(); 5893 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
5891 } 5894 }
5892 CHECK(weak_prototype->cleared()); 5895 CHECK(weak_prototype->cleared());
5893 } 5896 }
5894 5897
5895 5898
5896 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) { 5899 Handle<WeakCell> AddRetainedMap(Isolate* isolate, Heap* heap) {
5897 HandleScope inner_scope(isolate); 5900 HandleScope inner_scope(isolate);
5898 Handle<Map> map = Map::Create(isolate, 1); 5901 Handle<Map> map = Map::Create(isolate, 1);
5899 v8::Local<v8::Value> result = 5902 v8::Local<v8::Value> result =
5900 CompileRun("(function () { return {x : 10}; })();"); 5903 CompileRun("(function () { return {x : 10}; })();");
5901 Handle<JSReceiver> proto = 5904 Handle<JSReceiver> proto =
5902 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result)); 5905 v8::Utils::OpenHandle(*v8::Local<v8::Object>::Cast(result));
5903 Map::SetPrototype(map, proto); 5906 Map::SetPrototype(map, proto);
5904 heap->AddRetainedMap(map); 5907 heap->AddRetainedMap(map);
5905 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map)); 5908 return inner_scope.CloseAndEscape(Map::WeakCellForMap(map));
5906 } 5909 }
5907 5910
5908 5911
5909 void CheckMapRetainingFor(int n) { 5912 void CheckMapRetainingFor(int n) {
5910 FLAG_retain_maps_for_n_gc = n; 5913 FLAG_retain_maps_for_n_gc = n;
5911 Isolate* isolate = CcTest::i_isolate(); 5914 Isolate* isolate = CcTest::i_isolate();
5912 Heap* heap = isolate->heap(); 5915 Heap* heap = isolate->heap();
5913 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap); 5916 Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
5914 CHECK(!weak_cell->cleared()); 5917 CHECK(!weak_cell->cleared());
5915 for (int i = 0; i < n; i++) { 5918 for (int i = 0; i < n; i++) {
5916 heap::SimulateIncrementalMarking(heap); 5919 heap::SimulateIncrementalMarking(heap);
5917 heap->CollectGarbage(OLD_SPACE); 5920 CcTest::CollectGarbage(OLD_SPACE);
5918 } 5921 }
5919 CHECK(!weak_cell->cleared()); 5922 CHECK(!weak_cell->cleared());
5920 heap::SimulateIncrementalMarking(heap); 5923 heap::SimulateIncrementalMarking(heap);
5921 heap->CollectGarbage(OLD_SPACE); 5924 CcTest::CollectGarbage(OLD_SPACE);
5922 CHECK(weak_cell->cleared()); 5925 CHECK(weak_cell->cleared());
5923 } 5926 }
5924 5927
5925 5928
5926 TEST(MapRetaining) { 5929 TEST(MapRetaining) {
5927 CcTest::InitializeVM(); 5930 CcTest::InitializeVM();
5928 v8::HandleScope scope(CcTest::isolate()); 5931 v8::HandleScope scope(CcTest::isolate());
5929 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc); 5932 CheckMapRetainingFor(FLAG_retain_maps_for_n_gc);
5930 CheckMapRetainingFor(0); 5933 CheckMapRetainingFor(0);
5931 CheckMapRetainingFor(1); 5934 CheckMapRetainingFor(1);
5932 CheckMapRetainingFor(7); 5935 CheckMapRetainingFor(7);
5933 } 5936 }
5934 5937
5935 5938
5936 TEST(RegressArrayListGC) { 5939 TEST(RegressArrayListGC) {
5937 FLAG_retain_maps_for_n_gc = 1; 5940 FLAG_retain_maps_for_n_gc = 1;
5938 FLAG_incremental_marking = 0; 5941 FLAG_incremental_marking = 0;
5939 FLAG_gc_global = true; 5942 FLAG_gc_global = true;
5940 CcTest::InitializeVM(); 5943 CcTest::InitializeVM();
5941 v8::HandleScope scope(CcTest::isolate()); 5944 v8::HandleScope scope(CcTest::isolate());
5942 Isolate* isolate = CcTest::i_isolate(); 5945 Isolate* isolate = CcTest::i_isolate();
5943 Heap* heap = isolate->heap(); 5946 Heap* heap = isolate->heap();
5944 AddRetainedMap(isolate, heap); 5947 AddRetainedMap(isolate, heap);
5945 Handle<Map> map = Map::Create(isolate, 1); 5948 Handle<Map> map = Map::Create(isolate, 1);
5946 heap->CollectGarbage(OLD_SPACE); 5949 CcTest::CollectGarbage(OLD_SPACE);
5947 // Force GC in old space on next addition of retained map. 5950 // Force GC in old space on next addition of retained map.
5948 Map::WeakCellForMap(map); 5951 Map::WeakCellForMap(map);
5949 heap::SimulateFullSpace(CcTest::heap()->new_space()); 5952 heap::SimulateFullSpace(CcTest::heap()->new_space());
5950 for (int i = 0; i < 10; i++) { 5953 for (int i = 0; i < 10; i++) {
5951 heap->AddRetainedMap(map); 5954 heap->AddRetainedMap(map);
5952 } 5955 }
5953 heap->CollectGarbage(OLD_SPACE); 5956 CcTest::CollectGarbage(OLD_SPACE);
5954 } 5957 }
5955 5958
5956 5959
5957 #ifdef DEBUG 5960 #ifdef DEBUG
5958 TEST(PathTracer) { 5961 TEST(PathTracer) {
5959 CcTest::InitializeVM(); 5962 CcTest::InitializeVM();
5960 v8::HandleScope scope(CcTest::isolate()); 5963 v8::HandleScope scope(CcTest::isolate());
5961 5964
5962 v8::Local<v8::Value> result = CompileRun("'abc'"); 5965 v8::Local<v8::Value> result = CompileRun("'abc'");
5963 Handle<Object> o = v8::Utils::OpenHandle(*result); 5966 Handle<Object> o = v8::Utils::OpenHandle(*result);
(...skipping 30 matching lines...) Expand all
5994 // Check that free space filler is at the right place and did not smash the 5997 // Check that free space filler is at the right place and did not smash the
5995 // array header. 5998 // array header.
5996 CHECK(array->IsFixedArrayBase()); 5999 CHECK(array->IsFixedArrayBase());
5997 CHECK_EQ(initial_length - elements_to_trim, array->length()); 6000 CHECK_EQ(initial_length - elements_to_trim, array->length());
5998 int new_size = array->size(); 6001 int new_size = array->size();
5999 if (new_size != old_size) { 6002 if (new_size != old_size) {
6000 // Free space filler should be created in this case. 6003 // Free space filler should be created in this case.
6001 Address next_obj_address = array->address() + array->size(); 6004 Address next_obj_address = array->address() + array->size();
6002 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller()); 6005 CHECK(HeapObject::FromAddress(next_obj_address)->IsFiller());
6003 } 6006 }
6004 heap->CollectAllAvailableGarbage(); 6007 CcTest::CollectAllAvailableGarbage();
6005 } 6008 }
6006 6009
6007 6010
6008 TEST(Regress472513) { 6011 TEST(Regress472513) {
6009 CcTest::InitializeVM(); 6012 CcTest::InitializeVM();
6010 v8::HandleScope scope(CcTest::isolate()); 6013 v8::HandleScope scope(CcTest::isolate());
6011 6014
6012 // The combination of type/initial_length/elements_to_trim triggered 6015 // The combination of type/initial_length/elements_to_trim triggered
6013 // typed array header smashing with free space filler (crbug/472513). 6016 // typed array header smashing with free space filler (crbug/472513).
6014 6017
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
6051 CHECK(try_catch.HasCaught()); 6054 CHECK(try_catch.HasCaught());
6052 Isolate* isolate = CcTest::i_isolate(); 6055 Isolate* isolate = CcTest::i_isolate();
6053 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception()); 6056 Handle<Object> exception = v8::Utils::OpenHandle(*try_catch.Exception());
6054 Handle<Name> key = isolate->factory()->stack_trace_symbol(); 6057 Handle<Name> key = isolate->factory()->stack_trace_symbol();
6055 Handle<Object> stack_trace = 6058 Handle<Object> stack_trace =
6056 Object::GetProperty(exception, key).ToHandleChecked(); 6059 Object::GetProperty(exception, key).ToHandleChecked();
6057 Handle<Object> code = 6060 Handle<Object> code =
6058 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); 6061 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6059 CHECK(code->IsCode()); 6062 CHECK(code->IsCode());
6060 6063
6061 isolate->heap()->CollectAllAvailableGarbage("stack trace preprocessing"); 6064 CcTest::CollectAllAvailableGarbage();
6062 6065
6063 Handle<Object> pos = 6066 Handle<Object> pos =
6064 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked(); 6067 Object::GetElement(isolate, stack_trace, 3).ToHandleChecked();
6065 CHECK(pos->IsSmi()); 6068 CHECK(pos->IsSmi());
6066 6069
6067 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace); 6070 Handle<JSArray> stack_trace_array = Handle<JSArray>::cast(stack_trace);
6068 int array_length = Smi::cast(stack_trace_array->length())->value(); 6071 int array_length = Smi::cast(stack_trace_array->length())->value();
6069 for (int i = 0; i < array_length; i++) { 6072 for (int i = 0; i < array_length; i++) {
6070 Handle<Object> element = 6073 Handle<Object> element =
6071 Object::GetElement(isolate, stack_trace, i).ToHandleChecked(); 6074 Object::GetElement(isolate, stack_trace, i).ToHandleChecked();
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
6104 ->Get(env.local(), name) 6107 ->Get(env.local(), name)
6105 .ToLocalChecked() 6108 .ToLocalChecked()
6106 ->ToObject(env.local()) 6109 ->ToObject(env.local())
6107 .ToLocalChecked()); 6110 .ToLocalChecked());
6108 CHECK(CcTest::global()->Delete(env.local(), name).FromJust()); 6111 CHECK(CcTest::global()->Delete(env.local(), name).FromJust());
6109 } 6112 }
6110 6113
6111 utils.SetWeak(&utils, UtilsHasBeenCollected, 6114 utils.SetWeak(&utils, UtilsHasBeenCollected,
6112 v8::WeakCallbackType::kParameter); 6115 v8::WeakCallbackType::kParameter);
6113 6116
6114 CcTest::heap()->CollectAllAvailableGarbage("fire weak callbacks"); 6117 CcTest::CollectAllAvailableGarbage();
6115 6118
6116 CHECK(utils_has_been_collected); 6119 CHECK(utils_has_been_collected);
6117 } 6120 }
6118 6121
6119 6122
6120 TEST(Regress1878) { 6123 TEST(Regress1878) {
6121 FLAG_allow_natives_syntax = true; 6124 FLAG_allow_natives_syntax = true;
6122 CcTest::InitializeVM(); 6125 CcTest::InitializeVM();
6123 v8::Isolate* isolate = CcTest::isolate(); 6126 v8::Isolate* isolate = CcTest::isolate();
6124 v8::HandleScope scope(isolate); 6127 v8::HandleScope scope(isolate);
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
6162 CHECK_EQ(bytes, static_cast<size_t>(array->Size())); 6165 CHECK_EQ(bytes, static_cast<size_t>(array->Size()));
6163 } 6166 }
6164 6167
6165 6168
6166 TEST(NewSpaceAllocationCounter) { 6169 TEST(NewSpaceAllocationCounter) {
6167 CcTest::InitializeVM(); 6170 CcTest::InitializeVM();
6168 v8::HandleScope scope(CcTest::isolate()); 6171 v8::HandleScope scope(CcTest::isolate());
6169 Isolate* isolate = CcTest::i_isolate(); 6172 Isolate* isolate = CcTest::i_isolate();
6170 Heap* heap = isolate->heap(); 6173 Heap* heap = isolate->heap();
6171 size_t counter1 = heap->NewSpaceAllocationCounter(); 6174 size_t counter1 = heap->NewSpaceAllocationCounter();
6172 heap->CollectGarbage(NEW_SPACE); 6175 CcTest::CollectGarbage(NEW_SPACE);
6173 const size_t kSize = 1024; 6176 const size_t kSize = 1024;
6174 AllocateInSpace(isolate, kSize, NEW_SPACE); 6177 AllocateInSpace(isolate, kSize, NEW_SPACE);
6175 size_t counter2 = heap->NewSpaceAllocationCounter(); 6178 size_t counter2 = heap->NewSpaceAllocationCounter();
6176 CHECK_EQ(kSize, counter2 - counter1); 6179 CHECK_EQ(kSize, counter2 - counter1);
6177 heap->CollectGarbage(NEW_SPACE); 6180 CcTest::CollectGarbage(NEW_SPACE);
6178 size_t counter3 = heap->NewSpaceAllocationCounter(); 6181 size_t counter3 = heap->NewSpaceAllocationCounter();
6179 CHECK_EQ(0U, counter3 - counter2); 6182 CHECK_EQ(0U, counter3 - counter2);
6180 // Test counter overflow. 6183 // Test counter overflow.
6181 size_t max_counter = -1; 6184 size_t max_counter = -1;
6182 heap->set_new_space_allocation_counter(max_counter - 10 * kSize); 6185 heap->set_new_space_allocation_counter(max_counter - 10 * kSize);
6183 size_t start = heap->NewSpaceAllocationCounter(); 6186 size_t start = heap->NewSpaceAllocationCounter();
6184 for (int i = 0; i < 20; i++) { 6187 for (int i = 0; i < 20; i++) {
6185 AllocateInSpace(isolate, kSize, NEW_SPACE); 6188 AllocateInSpace(isolate, kSize, NEW_SPACE);
6186 size_t counter = heap->NewSpaceAllocationCounter(); 6189 size_t counter = heap->NewSpaceAllocationCounter();
6187 CHECK_EQ(kSize, counter - start); 6190 CHECK_EQ(kSize, counter - start);
6188 start = counter; 6191 start = counter;
6189 } 6192 }
6190 } 6193 }
6191 6194
6192 6195
6193 TEST(OldSpaceAllocationCounter) { 6196 TEST(OldSpaceAllocationCounter) {
6194 CcTest::InitializeVM(); 6197 CcTest::InitializeVM();
6195 v8::HandleScope scope(CcTest::isolate()); 6198 v8::HandleScope scope(CcTest::isolate());
6196 Isolate* isolate = CcTest::i_isolate(); 6199 Isolate* isolate = CcTest::i_isolate();
6197 Heap* heap = isolate->heap(); 6200 Heap* heap = isolate->heap();
6198 size_t counter1 = heap->OldGenerationAllocationCounter(); 6201 size_t counter1 = heap->OldGenerationAllocationCounter();
6199 heap->CollectGarbage(NEW_SPACE); 6202 CcTest::CollectGarbage(NEW_SPACE);
6200 heap->CollectGarbage(NEW_SPACE); 6203 CcTest::CollectGarbage(NEW_SPACE);
6201 const size_t kSize = 1024; 6204 const size_t kSize = 1024;
6202 AllocateInSpace(isolate, kSize, OLD_SPACE); 6205 AllocateInSpace(isolate, kSize, OLD_SPACE);
6203 size_t counter2 = heap->OldGenerationAllocationCounter(); 6206 size_t counter2 = heap->OldGenerationAllocationCounter();
6204 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed. 6207 // TODO(ulan): replace all CHECK_LE with CHECK_EQ after v8:4148 is fixed.
6205 CHECK_LE(kSize, counter2 - counter1); 6208 CHECK_LE(kSize, counter2 - counter1);
6206 heap->CollectGarbage(NEW_SPACE); 6209 CcTest::CollectGarbage(NEW_SPACE);
6207 size_t counter3 = heap->OldGenerationAllocationCounter(); 6210 size_t counter3 = heap->OldGenerationAllocationCounter();
6208 CHECK_EQ(0u, counter3 - counter2); 6211 CHECK_EQ(0u, counter3 - counter2);
6209 AllocateInSpace(isolate, kSize, OLD_SPACE); 6212 AllocateInSpace(isolate, kSize, OLD_SPACE);
6210 heap->CollectGarbage(OLD_SPACE); 6213 CcTest::CollectGarbage(OLD_SPACE);
6211 size_t counter4 = heap->OldGenerationAllocationCounter(); 6214 size_t counter4 = heap->OldGenerationAllocationCounter();
6212 CHECK_LE(kSize, counter4 - counter3); 6215 CHECK_LE(kSize, counter4 - counter3);
6213 // Test counter overflow. 6216 // Test counter overflow.
6214 size_t max_counter = -1; 6217 size_t max_counter = -1;
6215 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize); 6218 heap->set_old_generation_allocation_counter(max_counter - 10 * kSize);
6216 size_t start = heap->OldGenerationAllocationCounter(); 6219 size_t start = heap->OldGenerationAllocationCounter();
6217 for (int i = 0; i < 20; i++) { 6220 for (int i = 0; i < 20; i++) {
6218 AllocateInSpace(isolate, kSize, OLD_SPACE); 6221 AllocateInSpace(isolate, kSize, OLD_SPACE);
6219 size_t counter = heap->OldGenerationAllocationCounter(); 6222 size_t counter = heap->OldGenerationAllocationCounter();
6220 CHECK_LE(kSize, counter - start); 6223 CHECK_LE(kSize, counter - start);
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after
6275 } 6278 }
6276 6279
6277 6280
6278 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) { 6281 static void RemoveCodeAndGC(const v8::FunctionCallbackInfo<v8::Value>& args) {
6279 Isolate* isolate = CcTest::i_isolate(); 6282 Isolate* isolate = CcTest::i_isolate();
6280 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]); 6283 Handle<Object> obj = v8::Utils::OpenHandle(*args[0]);
6281 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj); 6284 Handle<JSFunction> fun = Handle<JSFunction>::cast(obj);
6282 fun->ReplaceCode(*isolate->builtins()->CompileLazy()); 6285 fun->ReplaceCode(*isolate->builtins()->CompileLazy());
6283 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy()); 6286 fun->shared()->ReplaceCode(*isolate->builtins()->CompileLazy());
6284 fun->shared()->ClearBytecodeArray(); // Bytecode is code too. 6287 fun->shared()->ClearBytecodeArray(); // Bytecode is code too.
6285 isolate->heap()->CollectAllAvailableGarbage("remove code and gc"); 6288 CcTest::CollectAllAvailableGarbage();
6286 } 6289 }
6287 6290
6288 6291
6289 TEST(CanonicalSharedFunctionInfo) { 6292 TEST(CanonicalSharedFunctionInfo) {
6290 CcTest::InitializeVM(); 6293 CcTest::InitializeVM();
6291 v8::Isolate* isolate = CcTest::isolate(); 6294 v8::Isolate* isolate = CcTest::isolate();
6292 v8::HandleScope scope(isolate); 6295 v8::HandleScope scope(isolate);
6293 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate); 6296 v8::Local<v8::ObjectTemplate> global = v8::ObjectTemplate::New(isolate);
6294 global->Set(isolate, "check", v8::FunctionTemplate::New( 6297 global->Set(isolate, "check", v8::FunctionTemplate::New(
6295 isolate, CheckEqualSharedFunctionInfos)); 6298 isolate, CheckEqualSharedFunctionInfos));
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
6362 } 6365 }
6363 6366
6364 6367
6365 TEST(ScriptIterator) { 6368 TEST(ScriptIterator) {
6366 CcTest::InitializeVM(); 6369 CcTest::InitializeVM();
6367 v8::HandleScope scope(CcTest::isolate()); 6370 v8::HandleScope scope(CcTest::isolate());
6368 Isolate* isolate = CcTest::i_isolate(); 6371 Isolate* isolate = CcTest::i_isolate();
6369 Heap* heap = CcTest::heap(); 6372 Heap* heap = CcTest::heap();
6370 LocalContext context; 6373 LocalContext context;
6371 6374
6372 heap->CollectAllGarbage(); 6375 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6373 6376
6374 int script_count = 0; 6377 int script_count = 0;
6375 { 6378 {
6376 HeapIterator it(heap); 6379 HeapIterator it(heap);
6377 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { 6380 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6378 if (obj->IsScript()) script_count++; 6381 if (obj->IsScript()) script_count++;
6379 } 6382 }
6380 } 6383 }
6381 6384
6382 { 6385 {
6383 Script::Iterator iterator(isolate); 6386 Script::Iterator iterator(isolate);
6384 while (iterator.Next()) script_count--; 6387 while (iterator.Next()) script_count--;
6385 } 6388 }
6386 6389
6387 CHECK_EQ(0, script_count); 6390 CHECK_EQ(0, script_count);
6388 } 6391 }
6389 6392
6390 6393
6391 TEST(SharedFunctionInfoIterator) { 6394 TEST(SharedFunctionInfoIterator) {
6392 CcTest::InitializeVM(); 6395 CcTest::InitializeVM();
6393 v8::HandleScope scope(CcTest::isolate()); 6396 v8::HandleScope scope(CcTest::isolate());
6394 Isolate* isolate = CcTest::i_isolate(); 6397 Isolate* isolate = CcTest::i_isolate();
6395 Heap* heap = CcTest::heap(); 6398 Heap* heap = CcTest::heap();
6396 LocalContext context; 6399 LocalContext context;
6397 6400
6398 heap->CollectAllGarbage(); 6401 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6399 heap->CollectAllGarbage(); 6402 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6400 6403
6401 int sfi_count = 0; 6404 int sfi_count = 0;
6402 { 6405 {
6403 HeapIterator it(heap); 6406 HeapIterator it(heap);
6404 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) { 6407 for (HeapObject* obj = it.next(); obj != NULL; obj = it.next()) {
6405 if (!obj->IsSharedFunctionInfo()) continue; 6408 if (!obj->IsSharedFunctionInfo()) continue;
6406 sfi_count++; 6409 sfi_count++;
6407 } 6410 }
6408 } 6411 }
6409 6412
(...skipping 19 matching lines...) Expand all
6429 Heap* heap = CcTest::heap(); 6432 Heap* heap = CcTest::heap();
6430 LocalContext context; 6433 LocalContext context;
6431 6434
6432 v8::Persistent<Value> parent; 6435 v8::Persistent<Value> parent;
6433 v8::Persistent<Value> child; 6436 v8::Persistent<Value> child;
6434 6437
6435 parent.Reset(isolate, v8::Object::New(isolate)); 6438 parent.Reset(isolate, v8::Object::New(isolate));
6436 child.Reset(isolate, v8::Object::New(isolate)); 6439 child.Reset(isolate, v8::Object::New(isolate));
6437 6440
6438 heap::SimulateFullSpace(heap->old_space()); 6441 heap::SimulateFullSpace(heap->old_space());
6439 heap->CollectGarbage(OLD_SPACE); 6442 CcTest::CollectGarbage(OLD_SPACE);
6440 { 6443 {
6441 UniqueId id = MakeUniqueId(parent); 6444 UniqueId id = MakeUniqueId(parent);
6442 isolate->SetObjectGroupId(parent, id); 6445 isolate->SetObjectGroupId(parent, id);
6443 isolate->SetReferenceFromGroup(id, child); 6446 isolate->SetReferenceFromGroup(id, child);
6444 } 6447 }
6445 // The CollectGarbage call above starts sweeper threads. 6448 // The CollectGarbage call above starts sweeper threads.
6446 // The crash will happen if the following two functions 6449 // The crash will happen if the following two functions
6447 // are called before sweeping finishes. 6450 // are called before sweeping finishes.
6448 heap->StartIncrementalMarking(); 6451 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6449 heap->FinalizeIncrementalMarkingIfComplete("test"); 6452 i::GarbageCollectionReason::kTesting);
6453 heap->FinalizeIncrementalMarkingIfComplete(
6454 i::GarbageCollectionReason::kTesting);
6450 } 6455 }
6451 6456
6452 6457
6453 HEAP_TEST(TestMemoryReducerSampleJsCalls) { 6458 HEAP_TEST(TestMemoryReducerSampleJsCalls) {
6454 CcTest::InitializeVM(); 6459 CcTest::InitializeVM();
6455 v8::HandleScope scope(CcTest::isolate()); 6460 v8::HandleScope scope(CcTest::isolate());
6456 Heap* heap = CcTest::heap(); 6461 Heap* heap = CcTest::heap();
6457 Isolate* isolate = CcTest::i_isolate(); 6462 Isolate* isolate = CcTest::i_isolate();
6458 MemoryReducer* memory_reducer = heap->memory_reducer_; 6463 MemoryReducer* memory_reducer = heap->memory_reducer_;
6459 memory_reducer->SampleAndGetJsCallsPerMs(0); 6464 memory_reducer->SampleAndGetJsCallsPerMs(0);
(...skipping 26 matching lines...) Expand all
6486 Factory* factory = isolate->factory(); 6491 Factory* factory = isolate->factory();
6487 const int N = 6492 const int N =
6488 (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize; 6493 (kMaxRegularHeapObjectSize - FixedArray::kHeaderSize) / kPointerSize;
6489 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED); 6494 Handle<FixedArray> array = factory->NewFixedArray(N, TENURED);
6490 CHECK(heap->old_space()->Contains(*array)); 6495 CHECK(heap->old_space()->Contains(*array));
6491 Handle<Object> number = factory->NewHeapNumber(1.0); 6496 Handle<Object> number = factory->NewHeapNumber(1.0);
6492 CHECK(heap->InNewSpace(*number)); 6497 CHECK(heap->InNewSpace(*number));
6493 for (int i = 0; i < N; i++) { 6498 for (int i = 0; i < N; i++) {
6494 array->set(i, *number); 6499 array->set(i, *number);
6495 } 6500 }
6496 heap->CollectGarbage(OLD_SPACE); 6501 CcTest::CollectGarbage(OLD_SPACE);
6497 heap::SimulateFullSpace(heap->old_space()); 6502 heap::SimulateFullSpace(heap->old_space());
6498 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1); 6503 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(*array, N - 1);
6499 heap->mark_compact_collector()->EnsureSweepingCompleted(); 6504 heap->mark_compact_collector()->EnsureSweepingCompleted();
6500 ByteArray* byte_array; 6505 ByteArray* byte_array;
6501 const int M = 256; 6506 const int M = 256;
6502 // Don't allow old space expansion. The test works without this flag too, 6507 // Don't allow old space expansion. The test works without this flag too,
6503 // but becomes very slow. 6508 // but becomes very slow.
6504 heap->set_force_oom(true); 6509 heap->set_force_oom(true);
6505 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) { 6510 while (heap->AllocateByteArray(M, TENURED).To(&byte_array)) {
6506 for (int j = 0; j < M; j++) { 6511 for (int j = 0; j < M; j++) {
6507 byte_array->set(j, 0x31); 6512 byte_array->set(j, 0x31);
6508 } 6513 }
6509 } 6514 }
6510 // Re-enable old space expansion to avoid OOM crash. 6515 // Re-enable old space expansion to avoid OOM crash.
6511 heap->set_force_oom(false); 6516 heap->set_force_oom(false);
6512 heap->CollectGarbage(NEW_SPACE); 6517 CcTest::CollectGarbage(NEW_SPACE);
6513 } 6518 }
6514 6519
6515 HEAP_TEST(Regress589413) { 6520 HEAP_TEST(Regress589413) {
6516 FLAG_stress_compaction = true; 6521 FLAG_stress_compaction = true;
6517 FLAG_manual_evacuation_candidates_selection = true; 6522 FLAG_manual_evacuation_candidates_selection = true;
6518 FLAG_parallel_compaction = false; 6523 FLAG_parallel_compaction = false;
6519 FLAG_concurrent_sweeping = false; 6524 FLAG_concurrent_sweeping = false;
6520 CcTest::InitializeVM(); 6525 CcTest::InitializeVM();
6521 v8::HandleScope scope(CcTest::isolate()); 6526 v8::HandleScope scope(CcTest::isolate());
6522 Heap* heap = CcTest::heap(); 6527 Heap* heap = CcTest::heap();
6523 // Get the heap in clean state. 6528 // Get the heap in clean state.
6524 heap->CollectGarbage(OLD_SPACE); 6529 CcTest::CollectGarbage(OLD_SPACE);
6525 heap->CollectGarbage(OLD_SPACE); 6530 CcTest::CollectGarbage(OLD_SPACE);
6526 Isolate* isolate = CcTest::i_isolate(); 6531 Isolate* isolate = CcTest::i_isolate();
6527 Factory* factory = isolate->factory(); 6532 Factory* factory = isolate->factory();
6528 // Fill the new space with byte arrays with elements looking like pointers. 6533 // Fill the new space with byte arrays with elements looking like pointers.
6529 const int M = 256; 6534 const int M = 256;
6530 ByteArray* byte_array; 6535 ByteArray* byte_array;
6531 while (heap->AllocateByteArray(M).To(&byte_array)) { 6536 while (heap->AllocateByteArray(M).To(&byte_array)) {
6532 for (int j = 0; j < M; j++) { 6537 for (int j = 0; j < M; j++) {
6533 byte_array->set(j, 0x31); 6538 byte_array->set(j, 0x31);
6534 } 6539 }
6535 // Add the array in root set. 6540 // Add the array in root set.
6536 handle(byte_array); 6541 handle(byte_array);
6537 } 6542 }
6538 // Make sure the byte arrays will be promoted on the next GC. 6543 // Make sure the byte arrays will be promoted on the next GC.
6539 heap->CollectGarbage(NEW_SPACE); 6544 CcTest::CollectGarbage(NEW_SPACE);
6540 // This number is close to large free list category threshold. 6545 // This number is close to large free list category threshold.
6541 const int N = 0x3eee; 6546 const int N = 0x3eee;
6542 { 6547 {
6543 std::vector<FixedArray*> arrays; 6548 std::vector<FixedArray*> arrays;
6544 std::set<Page*> pages; 6549 std::set<Page*> pages;
6545 FixedArray* array; 6550 FixedArray* array;
6546 // Fill all pages with fixed arrays. 6551 // Fill all pages with fixed arrays.
6547 heap->set_force_oom(true); 6552 heap->set_force_oom(true);
6548 while (heap->AllocateFixedArray(N, TENURED).To(&array)) { 6553 while (heap->AllocateFixedArray(N, TENURED).To(&array)) {
6549 arrays.push_back(array); 6554 arrays.push_back(array);
(...skipping 27 matching lines...) Expand all
6577 } 6582 }
6578 } 6583 }
6579 } 6584 }
6580 heap::SimulateIncrementalMarking(heap); 6585 heap::SimulateIncrementalMarking(heap);
6581 for (size_t j = 0; j < arrays.size(); j++) { 6586 for (size_t j = 0; j < arrays.size(); j++) {
6582 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1); 6587 heap->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(arrays[j], N - 1);
6583 } 6588 }
6584 } 6589 }
6585 // Force allocation from the free list. 6590 // Force allocation from the free list.
6586 heap->set_force_oom(true); 6591 heap->set_force_oom(true);
6587 heap->CollectGarbage(OLD_SPACE); 6592 CcTest::CollectGarbage(OLD_SPACE);
6588 } 6593 }
6589 6594
6590 TEST(Regress598319) { 6595 TEST(Regress598319) {
6591 // This test ensures that no white objects can cross the progress bar of large 6596 // This test ensures that no white objects can cross the progress bar of large
6592 // objects during incremental marking. It checks this by using Shift() during 6597 // objects during incremental marking. It checks this by using Shift() during
6593 // incremental marking. 6598 // incremental marking.
6594 CcTest::InitializeVM(); 6599 CcTest::InitializeVM();
6595 v8::HandleScope scope(CcTest::isolate()); 6600 v8::HandleScope scope(CcTest::isolate());
6596 Heap* heap = CcTest::heap(); 6601 Heap* heap = CcTest::heap();
6597 Isolate* isolate = heap->isolate(); 6602 Isolate* isolate = heap->isolate();
(...skipping 20 matching lines...) Expand all
6618 6623
6619 Handle<FixedArray> root; 6624 Handle<FixedArray> root;
6620 } arr(isolate, kNumberOfObjects); 6625 } arr(isolate, kNumberOfObjects);
6621 6626
6622 CHECK_EQ(arr.get()->length(), kNumberOfObjects); 6627 CHECK_EQ(arr.get()->length(), kNumberOfObjects);
6623 CHECK(heap->lo_space()->Contains(arr.get())); 6628 CHECK(heap->lo_space()->Contains(arr.get()));
6624 LargePage* page = heap->lo_space()->FindPage(arr.get()->address()); 6629 LargePage* page = heap->lo_space()->FindPage(arr.get()->address());
6625 CHECK_NOT_NULL(page); 6630 CHECK_NOT_NULL(page);
6626 6631
6627 // GC to cleanup state 6632 // GC to cleanup state
6628 heap->CollectGarbage(OLD_SPACE); 6633 CcTest::CollectGarbage(OLD_SPACE);
6629 MarkCompactCollector* collector = heap->mark_compact_collector(); 6634 MarkCompactCollector* collector = heap->mark_compact_collector();
6630 if (collector->sweeping_in_progress()) { 6635 if (collector->sweeping_in_progress()) {
6631 collector->EnsureSweepingCompleted(); 6636 collector->EnsureSweepingCompleted();
6632 } 6637 }
6633 6638
6634 CHECK(heap->lo_space()->Contains(arr.get())); 6639 CHECK(heap->lo_space()->Contains(arr.get()));
6635 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get()))); 6640 CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(arr.get())));
6636 for (int i = 0; i < arr.get()->length(); i++) { 6641 for (int i = 0; i < arr.get()->length(); i++) {
6637 CHECK(Marking::IsWhite( 6642 CHECK(Marking::IsWhite(
6638 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); 6643 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6639 } 6644 }
6640 6645
6641 // Start incremental marking. 6646 // Start incremental marking.
6642 IncrementalMarking* marking = heap->incremental_marking(); 6647 IncrementalMarking* marking = heap->incremental_marking();
6643 CHECK(marking->IsMarking() || marking->IsStopped()); 6648 CHECK(marking->IsMarking() || marking->IsStopped());
6644 if (marking->IsStopped()) { 6649 if (marking->IsStopped()) {
6645 heap->StartIncrementalMarking(); 6650 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6651 i::GarbageCollectionReason::kTesting);
6646 } 6652 }
6647 CHECK(marking->IsMarking()); 6653 CHECK(marking->IsMarking());
6648 6654
6649 // Check that we have not marked the interesting array during root scanning. 6655 // Check that we have not marked the interesting array during root scanning.
6650 for (int i = 0; i < arr.get()->length(); i++) { 6656 for (int i = 0; i < arr.get()->length(); i++) {
6651 CHECK(Marking::IsWhite( 6657 CHECK(Marking::IsWhite(
6652 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i))))); 6658 ObjectMarking::MarkBitFrom(HeapObject::cast(arr.get()->get(i)))));
6653 } 6659 }
6654 6660
6655 // Now we search for a state where we are in incremental marking and have 6661 // Now we search for a state where we are in incremental marking and have
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after
6701 intptr_t size_after = heap->SizeOfObjects(); 6707 intptr_t size_after = heap->SizeOfObjects();
6702 CHECK_EQ(size_after, size_before + array->Size()); 6708 CHECK_EQ(size_after, size_before + array->Size());
6703 } 6709 }
6704 6710
6705 TEST(Regress615489) { 6711 TEST(Regress615489) {
6706 FLAG_black_allocation = true; 6712 FLAG_black_allocation = true;
6707 CcTest::InitializeVM(); 6713 CcTest::InitializeVM();
6708 v8::HandleScope scope(CcTest::isolate()); 6714 v8::HandleScope scope(CcTest::isolate());
6709 Heap* heap = CcTest::heap(); 6715 Heap* heap = CcTest::heap();
6710 Isolate* isolate = heap->isolate(); 6716 Isolate* isolate = heap->isolate();
6711 heap->CollectAllGarbage(); 6717 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6712 6718
6713 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6719 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6714 i::IncrementalMarking* marking = heap->incremental_marking(); 6720 i::IncrementalMarking* marking = heap->incremental_marking();
6715 if (collector->sweeping_in_progress()) { 6721 if (collector->sweeping_in_progress()) {
6716 collector->EnsureSweepingCompleted(); 6722 collector->EnsureSweepingCompleted();
6717 } 6723 }
6718 CHECK(marking->IsMarking() || marking->IsStopped()); 6724 CHECK(marking->IsMarking() || marking->IsStopped());
6719 if (marking->IsStopped()) { 6725 if (marking->IsStopped()) {
6720 heap->StartIncrementalMarking(); 6726 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6727 i::GarbageCollectionReason::kTesting);
6721 } 6728 }
6722 CHECK(marking->IsMarking()); 6729 CHECK(marking->IsMarking());
6723 marking->StartBlackAllocationForTesting(); 6730 marking->StartBlackAllocationForTesting();
6724 { 6731 {
6725 AlwaysAllocateScope always_allocate(CcTest::i_isolate()); 6732 AlwaysAllocateScope always_allocate(CcTest::i_isolate());
6726 v8::HandleScope inner(CcTest::isolate()); 6733 v8::HandleScope inner(CcTest::isolate());
6727 isolate->factory()->NewFixedArray(500, TENURED)->Size(); 6734 isolate->factory()->NewFixedArray(500, TENURED)->Size();
6728 } 6735 }
6729 while (!marking->IsComplete()) { 6736 while (!marking->IsComplete()) {
6730 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD, 6737 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
6731 IncrementalMarking::FORCE_COMPLETION); 6738 IncrementalMarking::FORCE_COMPLETION);
6732 if (marking->IsReadyToOverApproximateWeakClosure()) { 6739 if (marking->IsReadyToOverApproximateWeakClosure()) {
6733 marking->FinalizeIncrementally(); 6740 marking->FinalizeIncrementally();
6734 } 6741 }
6735 } 6742 }
6736 CHECK(marking->IsComplete()); 6743 CHECK(marking->IsComplete());
6737 intptr_t size_before = heap->SizeOfObjects(); 6744 intptr_t size_before = heap->SizeOfObjects();
6738 CcTest::heap()->CollectAllGarbage(); 6745 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6739 intptr_t size_after = heap->SizeOfObjects(); 6746 intptr_t size_after = heap->SizeOfObjects();
6740 // Live size does not increase after garbage collection. 6747 // Live size does not increase after garbage collection.
6741 CHECK_LE(size_after, size_before); 6748 CHECK_LE(size_after, size_before);
6742 } 6749 }
6743 6750
6744 class StaticOneByteResource : public v8::String::ExternalOneByteStringResource { 6751 class StaticOneByteResource : public v8::String::ExternalOneByteStringResource {
6745 public: 6752 public:
6746 explicit StaticOneByteResource(const char* data) : data_(data) {} 6753 explicit StaticOneByteResource(const char* data) : data_(data) {}
6747 6754
6748 ~StaticOneByteResource() {} 6755 ~StaticOneByteResource() {}
6749 6756
6750 const char* data() const { return data_; } 6757 const char* data() const { return data_; }
6751 6758
6752 size_t length() const { return strlen(data_); } 6759 size_t length() const { return strlen(data_); }
6753 6760
6754 private: 6761 private:
6755 const char* data_; 6762 const char* data_;
6756 }; 6763 };
6757 6764
6758 TEST(Regress631969) { 6765 TEST(Regress631969) {
6759 FLAG_manual_evacuation_candidates_selection = true; 6766 FLAG_manual_evacuation_candidates_selection = true;
6760 FLAG_parallel_compaction = false; 6767 FLAG_parallel_compaction = false;
6761 FLAG_concurrent_sweeping = false; 6768 FLAG_concurrent_sweeping = false;
6762 CcTest::InitializeVM(); 6769 CcTest::InitializeVM();
6763 v8::HandleScope scope(CcTest::isolate()); 6770 v8::HandleScope scope(CcTest::isolate());
6764 Heap* heap = CcTest::heap(); 6771 Heap* heap = CcTest::heap();
6765 // Get the heap in clean state. 6772 // Get the heap in clean state.
6766 heap->CollectGarbage(OLD_SPACE); 6773 CcTest::CollectGarbage(OLD_SPACE);
6767 heap->CollectGarbage(OLD_SPACE); 6774 CcTest::CollectGarbage(OLD_SPACE);
6768 Isolate* isolate = CcTest::i_isolate(); 6775 Isolate* isolate = CcTest::i_isolate();
6769 Factory* factory = isolate->factory(); 6776 Factory* factory = isolate->factory();
6770 // Allocate two strings in a fresh page and mark the page as evacuation 6777 // Allocate two strings in a fresh page and mark the page as evacuation
6771 // candidate. 6778 // candidate.
6772 heap::SimulateFullSpace(heap->old_space()); 6779 heap::SimulateFullSpace(heap->old_space());
6773 Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED); 6780 Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED);
6774 Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED); 6781 Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED);
6775 Page::FromAddress(s1->address()) 6782 Page::FromAddress(s1->address())
6776 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); 6783 ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
6777 6784
6778 heap::SimulateIncrementalMarking(heap, false); 6785 heap::SimulateIncrementalMarking(heap, false);
6779 6786
6780 // Allocate a cons string and promote it to a fresh page in the old space. 6787 // Allocate a cons string and promote it to a fresh page in the old space.
6781 heap::SimulateFullSpace(heap->old_space()); 6788 heap::SimulateFullSpace(heap->old_space());
6782 Handle<String> s3; 6789 Handle<String> s3;
6783 factory->NewConsString(s1, s2).ToHandle(&s3); 6790 factory->NewConsString(s1, s2).ToHandle(&s3);
6784 heap->CollectGarbage(NEW_SPACE); 6791 CcTest::CollectGarbage(NEW_SPACE);
6785 heap->CollectGarbage(NEW_SPACE); 6792 CcTest::CollectGarbage(NEW_SPACE);
6786 6793
6787 // Finish incremental marking. 6794 // Finish incremental marking.
6788 IncrementalMarking* marking = heap->incremental_marking(); 6795 IncrementalMarking* marking = heap->incremental_marking();
6789 while (!marking->IsComplete()) { 6796 while (!marking->IsComplete()) {
6790 marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD, 6797 marking->Step(MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD,
6791 IncrementalMarking::FORCE_COMPLETION); 6798 IncrementalMarking::FORCE_COMPLETION);
6792 if (marking->IsReadyToOverApproximateWeakClosure()) { 6799 if (marking->IsReadyToOverApproximateWeakClosure()) {
6793 marking->FinalizeIncrementally(); 6800 marking->FinalizeIncrementally();
6794 } 6801 }
6795 } 6802 }
6796 6803
6797 { 6804 {
6798 StaticOneByteResource external_string("12345678901234"); 6805 StaticOneByteResource external_string("12345678901234");
6799 s3->MakeExternal(&external_string); 6806 s3->MakeExternal(&external_string);
6800 heap->CollectGarbage(OLD_SPACE); 6807 CcTest::CollectGarbage(OLD_SPACE);
6801 } 6808 }
6802 } 6809 }
6803 6810
6804 TEST(LeftTrimFixedArrayInBlackArea) { 6811 TEST(LeftTrimFixedArrayInBlackArea) {
6805 FLAG_black_allocation = true; 6812 FLAG_black_allocation = true;
6806 CcTest::InitializeVM(); 6813 CcTest::InitializeVM();
6807 v8::HandleScope scope(CcTest::isolate()); 6814 v8::HandleScope scope(CcTest::isolate());
6808 Heap* heap = CcTest::heap(); 6815 Heap* heap = CcTest::heap();
6809 Isolate* isolate = heap->isolate(); 6816 Isolate* isolate = heap->isolate();
6810 heap->CollectAllGarbage(); 6817 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6811 6818
6812 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6819 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6813 i::IncrementalMarking* marking = heap->incremental_marking(); 6820 i::IncrementalMarking* marking = heap->incremental_marking();
6814 if (collector->sweeping_in_progress()) { 6821 if (collector->sweeping_in_progress()) {
6815 collector->EnsureSweepingCompleted(); 6822 collector->EnsureSweepingCompleted();
6816 } 6823 }
6817 CHECK(marking->IsMarking() || marking->IsStopped()); 6824 CHECK(marking->IsMarking() || marking->IsStopped());
6818 if (marking->IsStopped()) { 6825 if (marking->IsStopped()) {
6819 heap->StartIncrementalMarking(); 6826 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6827 i::GarbageCollectionReason::kTesting);
6820 } 6828 }
6821 CHECK(marking->IsMarking()); 6829 CHECK(marking->IsMarking());
6822 marking->StartBlackAllocationForTesting(); 6830 marking->StartBlackAllocationForTesting();
6823 6831
6824 // Ensure that we allocate a new page, set up a bump pointer area, and 6832 // Ensure that we allocate a new page, set up a bump pointer area, and
6825 // perform the allocation in a black area. 6833 // perform the allocation in a black area.
6826 heap::SimulateFullSpace(heap->old_space()); 6834 heap::SimulateFullSpace(heap->old_space());
6827 isolate->factory()->NewFixedArray(4, TENURED); 6835 isolate->factory()->NewFixedArray(4, TENURED);
6828 Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED); 6836 Handle<FixedArray> array = isolate->factory()->NewFixedArray(50, TENURED);
6829 CHECK(heap->old_space()->Contains(*array)); 6837 CHECK(heap->old_space()->Contains(*array));
6830 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array))); 6838 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(*array)));
6831 6839
6832 // Now left trim the allocated black area. A filler has to be installed 6840 // Now left trim the allocated black area. A filler has to be installed
6833 // for the trimmed area and all mark bits of the trimmed area have to be 6841 // for the trimmed area and all mark bits of the trimmed area have to be
6834 // cleared. 6842 // cleared.
6835 FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10); 6843 FixedArrayBase* trimmed = heap->LeftTrimFixedArray(*array, 10);
6836 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed))); 6844 CHECK(Marking::IsBlack(ObjectMarking::MarkBitFrom(trimmed)));
6837 6845
6838 heap::GcAndSweep(heap, OLD_SPACE); 6846 heap::GcAndSweep(heap, OLD_SPACE);
6839 } 6847 }
6840 6848
6841 TEST(ContinuousLeftTrimFixedArrayInBlackArea) { 6849 TEST(ContinuousLeftTrimFixedArrayInBlackArea) {
6842 FLAG_black_allocation = true; 6850 FLAG_black_allocation = true;
6843 CcTest::InitializeVM(); 6851 CcTest::InitializeVM();
6844 v8::HandleScope scope(CcTest::isolate()); 6852 v8::HandleScope scope(CcTest::isolate());
6845 Heap* heap = CcTest::heap(); 6853 Heap* heap = CcTest::heap();
6846 Isolate* isolate = heap->isolate(); 6854 Isolate* isolate = heap->isolate();
6847 heap->CollectAllGarbage(); 6855 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6848 6856
6849 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6857 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6850 i::IncrementalMarking* marking = heap->incremental_marking(); 6858 i::IncrementalMarking* marking = heap->incremental_marking();
6851 if (collector->sweeping_in_progress()) { 6859 if (collector->sweeping_in_progress()) {
6852 collector->EnsureSweepingCompleted(); 6860 collector->EnsureSweepingCompleted();
6853 } 6861 }
6854 CHECK(marking->IsMarking() || marking->IsStopped()); 6862 CHECK(marking->IsMarking() || marking->IsStopped());
6855 if (marking->IsStopped()) { 6863 if (marking->IsStopped()) {
6856 heap->StartIncrementalMarking(); 6864 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6865 i::GarbageCollectionReason::kTesting);
6857 } 6866 }
6858 CHECK(marking->IsMarking()); 6867 CHECK(marking->IsMarking());
6859 marking->StartBlackAllocationForTesting(); 6868 marking->StartBlackAllocationForTesting();
6860 6869
6861 // Ensure that we allocate a new page, set up a bump pointer area, and 6870 // Ensure that we allocate a new page, set up a bump pointer area, and
6862 // perform the allocation in a black area. 6871 // perform the allocation in a black area.
6863 heap::SimulateFullSpace(heap->old_space()); 6872 heap::SimulateFullSpace(heap->old_space());
6864 isolate->factory()->NewFixedArray(10, TENURED); 6873 isolate->factory()->NewFixedArray(10, TENURED);
6865 6874
6866 // Allocate the fixed array that will be trimmed later. 6875 // Allocate the fixed array that will be trimmed later.
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after
6901 6910
6902 heap::GcAndSweep(heap, OLD_SPACE); 6911 heap::GcAndSweep(heap, OLD_SPACE);
6903 } 6912 }
6904 6913
6905 TEST(ContinuousRightTrimFixedArrayInBlackArea) { 6914 TEST(ContinuousRightTrimFixedArrayInBlackArea) {
6906 FLAG_black_allocation = true; 6915 FLAG_black_allocation = true;
6907 CcTest::InitializeVM(); 6916 CcTest::InitializeVM();
6908 v8::HandleScope scope(CcTest::isolate()); 6917 v8::HandleScope scope(CcTest::isolate());
6909 Heap* heap = CcTest::heap(); 6918 Heap* heap = CcTest::heap();
6910 Isolate* isolate = heap->isolate(); 6919 Isolate* isolate = heap->isolate();
6911 heap->CollectAllGarbage(); 6920 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6912 6921
6913 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6922 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6914 i::IncrementalMarking* marking = heap->incremental_marking(); 6923 i::IncrementalMarking* marking = heap->incremental_marking();
6915 if (collector->sweeping_in_progress()) { 6924 if (collector->sweeping_in_progress()) {
6916 collector->EnsureSweepingCompleted(); 6925 collector->EnsureSweepingCompleted();
6917 } 6926 }
6918 CHECK(marking->IsMarking() || marking->IsStopped()); 6927 CHECK(marking->IsMarking() || marking->IsStopped());
6919 if (marking->IsStopped()) { 6928 if (marking->IsStopped()) {
6920 heap->StartIncrementalMarking(); 6929 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6930 i::GarbageCollectionReason::kTesting);
6921 } 6931 }
6922 CHECK(marking->IsMarking()); 6932 CHECK(marking->IsMarking());
6923 marking->StartBlackAllocationForTesting(); 6933 marking->StartBlackAllocationForTesting();
6924 6934
6925 // Ensure that we allocate a new page, set up a bump pointer area, and 6935 // Ensure that we allocate a new page, set up a bump pointer area, and
6926 // perform the allocation in a black area. 6936 // perform the allocation in a black area.
6927 heap::SimulateFullSpace(heap->old_space()); 6937 heap::SimulateFullSpace(heap->old_space());
6928 isolate->factory()->NewFixedArray(10, TENURED); 6938 isolate->factory()->NewFixedArray(10, TENURED);
6929 6939
6930 // Allocate the fixed array that will be trimmed later. 6940 // Allocate the fixed array that will be trimmed later.
(...skipping 28 matching lines...) Expand all
6959 heap::GcAndSweep(heap, OLD_SPACE); 6969 heap::GcAndSweep(heap, OLD_SPACE);
6960 } 6970 }
6961 6971
6962 TEST(SlotFilteringAfterBlackAreas) { 6972 TEST(SlotFilteringAfterBlackAreas) {
6963 FLAG_black_allocation = true; 6973 FLAG_black_allocation = true;
6964 CcTest::InitializeVM(); 6974 CcTest::InitializeVM();
6965 v8::HandleScope scope(CcTest::isolate()); 6975 v8::HandleScope scope(CcTest::isolate());
6966 Heap* heap = CcTest::heap(); 6976 Heap* heap = CcTest::heap();
6967 Isolate* isolate = heap->isolate(); 6977 Isolate* isolate = heap->isolate();
6968 MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector(); 6978 MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector();
6969 heap->CollectAllGarbage(); 6979 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
6970 6980
6971 i::MarkCompactCollector* collector = heap->mark_compact_collector(); 6981 i::MarkCompactCollector* collector = heap->mark_compact_collector();
6972 i::IncrementalMarking* marking = heap->incremental_marking(); 6982 i::IncrementalMarking* marking = heap->incremental_marking();
6973 if (collector->sweeping_in_progress()) { 6983 if (collector->sweeping_in_progress()) {
6974 collector->EnsureSweepingCompleted(); 6984 collector->EnsureSweepingCompleted();
6975 } 6985 }
6976 CHECK(marking->IsMarking() || marking->IsStopped()); 6986 CHECK(marking->IsMarking() || marking->IsStopped());
6977 if (marking->IsStopped()) { 6987 if (marking->IsStopped()) {
6978 heap->StartIncrementalMarking(); 6988 heap->StartIncrementalMarking(i::Heap::kNoGCFlags,
6989 i::GarbageCollectionReason::kTesting);
6979 } 6990 }
6980 CHECK(marking->IsMarking()); 6991 CHECK(marking->IsMarking());
6981 marking->StartBlackAllocationForTesting(); 6992 marking->StartBlackAllocationForTesting();
6982 6993
6983 // Ensure that we allocate a new page, set up a bump pointer area, and 6994 // Ensure that we allocate a new page, set up a bump pointer area, and
6984 // perform the allocation in a black area. 6995 // perform the allocation in a black area.
6985 heap::SimulateFullSpace(heap->old_space()); 6996 heap::SimulateFullSpace(heap->old_space());
6986 Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED); 6997 Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED);
6987 Page* page = Page::FromAddress(array->address()); 6998 Page* page = Page::FromAddress(array->address());
6988 6999
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
7028 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000); 7039 Handle<FixedArray> array = isolate->factory()->NewFixedArray(200000);
7029 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address()); 7040 MemoryChunk* chunk = MemoryChunk::FromAddress(array->address());
7030 CHECK(chunk->owner()->identity() == LO_SPACE); 7041 CHECK(chunk->owner()->identity() == LO_SPACE);
7031 7042
7032 intptr_t size_before = array->Size(); 7043 intptr_t size_before = array->Size();
7033 size_t committed_memory_before = chunk->CommittedPhysicalMemory(); 7044 size_t committed_memory_before = chunk->CommittedPhysicalMemory();
7034 7045
7035 array->Shrink(1); 7046 array->Shrink(1);
7036 CHECK(array->Size() < size_before); 7047 CHECK(array->Size() < size_before);
7037 7048
7038 CcTest::heap()->CollectAllGarbage(); 7049 CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask);
7039 CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before); 7050 CHECK(chunk->CommittedPhysicalMemory() < committed_memory_before);
7040 size_t shrinked_size = 7051 size_t shrinked_size =
7041 RoundUp((array->address() - chunk->address()) + array->Size(), 7052 RoundUp((array->address() - chunk->address()) + array->Size(),
7042 base::OS::CommitPageSize()); 7053 base::OS::CommitPageSize());
7043 CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory()); 7054 CHECK_EQ(shrinked_size, chunk->CommittedPhysicalMemory());
7044 } 7055 }
7045 7056
7046 TEST(RememberedSetRemoveRange) { 7057 TEST(RememberedSetRemoveRange) {
7047 CcTest::InitializeVM(); 7058 CcTest::InitializeVM();
7048 v8::HandleScope scope(CcTest::isolate()); 7059 v8::HandleScope scope(CcTest::isolate());
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
7100 chunk, chunk->area_end() - kPointerSize, chunk->area_end()); 7111 chunk, chunk->area_end() - kPointerSize, chunk->area_end());
7101 slots[chunk->area_end() - kPointerSize] = false; 7112 slots[chunk->area_end() - kPointerSize] = false;
7102 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { 7113 RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) {
7103 CHECK(slots[addr]); 7114 CHECK(slots[addr]);
7104 return KEEP_SLOT; 7115 return KEEP_SLOT;
7105 }); 7116 });
7106 } 7117 }
7107 7118
7108 } // namespace internal 7119 } // namespace internal
7109 } // namespace v8 7120 } // namespace v8
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698