OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 14 matching lines...) Expand all Loading... |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #include <stdlib.h> | 28 #include <stdlib.h> |
29 | 29 |
30 #include "src/base/platform/platform.h" | 30 #include "src/base/platform/platform.h" |
31 #include "src/snapshot/snapshot.h" | 31 #include "src/snapshot/snapshot.h" |
32 #include "src/v8.h" | 32 #include "src/v8.h" |
33 #include "test/cctest/cctest.h" | 33 #include "test/cctest/cctest.h" |
34 #include "test/cctest/heap/heap-tester.h" | 34 #include "test/cctest/heap/heap-tester.h" |
| 35 #include "test/cctest/heap/heap-utils.h" |
35 | 36 |
36 namespace v8 { | 37 namespace v8 { |
37 namespace internal { | 38 namespace internal { |
38 | 39 |
39 #if 0 | 40 #if 0 |
40 static void VerifyRegionMarking(Address page_start) { | 41 static void VerifyRegionMarking(Address page_start) { |
41 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER | 42 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER |
42 Page* p = Page::FromAddress(page_start); | 43 Page* p = Page::FromAddress(page_start); |
43 | 44 |
44 p->SetRegionMarks(Page::kAllRegionsCleanMarks); | 45 p->SetRegionMarks(Page::kAllRegionsCleanMarks); |
(...skipping 426 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
471 // The available value is conservative such that it may report | 472 // The available value is conservative such that it may report |
472 // zero prior to heap exhaustion. | 473 // zero prior to heap exhaustion. |
473 CHECK(lo->Available() < available || available == 0); | 474 CHECK(lo->Available() < available || available == 0); |
474 } | 475 } |
475 | 476 |
476 CHECK(!lo->IsEmpty()); | 477 CHECK(!lo->IsEmpty()); |
477 | 478 |
478 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); | 479 CHECK(lo->AllocateRaw(lo_size, NOT_EXECUTABLE).IsRetry()); |
479 } | 480 } |
480 | 481 |
481 | 482 TEST(SizeOfInitialHeap) { |
482 TEST(SizeOfFirstPageIsLargeEnough) { | |
483 if (i::FLAG_always_opt) return; | 483 if (i::FLAG_always_opt) return; |
484 // Bootstrapping without a snapshot causes more allocations. | 484 // Bootstrapping without a snapshot causes more allocations. |
485 CcTest::InitializeVM(); | 485 CcTest::InitializeVM(); |
486 Isolate* isolate = CcTest::i_isolate(); | 486 Isolate* isolate = CcTest::i_isolate(); |
487 if (!isolate->snapshot_available()) return; | 487 if (!isolate->snapshot_available()) return; |
488 HandleScope scope(isolate); | 488 HandleScope scope(isolate); |
489 v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext(); | 489 v8::Local<v8::Context> context = CcTest::isolate()->GetCurrentContext(); |
490 // Skip this test on the custom snapshot builder. | 490 // Skip this test on the custom snapshot builder. |
491 if (!CcTest::global() | 491 if (!CcTest::global() |
492 ->Get(context, v8_str("assertEquals")) | 492 ->Get(context, v8_str("assertEquals")) |
493 .ToLocalChecked() | 493 .ToLocalChecked() |
494 ->IsUndefined()) { | 494 ->IsUndefined()) { |
495 return; | 495 return; |
496 } | 496 } |
497 | 497 |
498 // If this test fails due to enabling experimental natives that are not part | 498 // The limit for each space for an empty isolate containing just the |
499 // of the snapshot, we may need to adjust CalculateFirstPageSizes. | 499 // snapshot. |
| 500 const size_t kMaxInitialSizePerSpace = 1536 * KB; // 1.5MB |
500 | 501 |
501 // Freshly initialized VM gets by with one page per space. | 502 // Freshly initialized VM gets by with the snapshot size (which is below |
| 503 // kMaxInitialSizePerSpace per space). |
| 504 Heap* heap = isolate->heap(); |
| 505 int page_count[LAST_PAGED_SPACE + 1] = {0, 0, 0, 0}; |
502 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { | 506 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { |
503 // Debug code can be very large, so skip CODE_SPACE if we are generating it. | 507 // Debug code can be very large, so skip CODE_SPACE if we are generating it. |
504 if (i == CODE_SPACE && i::FLAG_debug_code) continue; | 508 if (i == CODE_SPACE && i::FLAG_debug_code) continue; |
505 CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages()); | 509 |
| 510 page_count[i] = heap->paged_space(i)->CountTotalPages(); |
| 511 // Check that the initial heap is also below the limit. |
| 512 CHECK_LT(static_cast<size_t>(heap->paged_space(i)->CommittedMemory()), |
| 513 kMaxInitialSizePerSpace); |
506 } | 514 } |
507 | 515 |
508 // Executing the empty script gets by with one page per space. | 516 // Executing the empty script gets by with the same number of pages, i.e., |
| 517 // requires no extra space. |
509 CompileRun("/*empty*/"); | 518 CompileRun("/*empty*/"); |
510 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { | 519 for (int i = FIRST_PAGED_SPACE; i <= LAST_PAGED_SPACE; i++) { |
511 // Debug code can be very large, so skip CODE_SPACE if we are generating it. | 520 // Debug code can be very large, so skip CODE_SPACE if we are generating it. |
512 if (i == CODE_SPACE && i::FLAG_debug_code) continue; | 521 if (i == CODE_SPACE && i::FLAG_debug_code) continue; |
513 CHECK_EQ(1, isolate->heap()->paged_space(i)->CountTotalPages()); | 522 CHECK_EQ(page_count[i], isolate->heap()->paged_space(i)->CountTotalPages()); |
514 } | 523 } |
515 | 524 |
516 // No large objects required to perform the above steps. | 525 // No large objects required to perform the above steps. |
517 CHECK(isolate->heap()->lo_space()->IsEmpty()); | 526 CHECK(isolate->heap()->lo_space()->IsEmpty()); |
518 } | 527 } |
519 | 528 |
520 static HeapObject* AllocateUnaligned(NewSpace* space, int size) { | 529 static HeapObject* AllocateUnaligned(NewSpace* space, int size) { |
521 AllocationResult allocation = space->AllocateRawUnaligned(size); | 530 AllocationResult allocation = space->AllocateRawUnaligned(size); |
522 CHECK(!allocation.IsRetry()); | 531 CHECK(!allocation.IsRetry()); |
523 HeapObject* filler = NULL; | 532 HeapObject* filler = NULL; |
(...skipping 150 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
674 | 683 |
675 new_space->RemoveAllocationObserver(&observer1); | 684 new_space->RemoveAllocationObserver(&observer1); |
676 new_space->RemoveAllocationObserver(&observer2); | 685 new_space->RemoveAllocationObserver(&observer2); |
677 | 686 |
678 CHECK_EQ(observer1.count(), 32); | 687 CHECK_EQ(observer1.count(), 32); |
679 CHECK_EQ(observer2.count(), 28); | 688 CHECK_EQ(observer2.count(), 28); |
680 } | 689 } |
681 isolate->Dispose(); | 690 isolate->Dispose(); |
682 } | 691 } |
683 | 692 |
| 693 TEST(ShrinkPageToHighWaterMarkFreeSpaceEnd) { |
| 694 CcTest::InitializeVM(); |
| 695 Isolate* isolate = CcTest::i_isolate(); |
| 696 HandleScope scope(isolate); |
| 697 |
| 698 heap::SealCurrentObjects(CcTest::heap()); |
| 699 |
| 700 // Prepare page that only contains a single object and a trailing FreeSpace |
| 701 // filler. |
| 702 Handle<FixedArray> array = isolate->factory()->NewFixedArray(128, TENURED); |
| 703 Page* page = Page::FromAddress(array->address()); |
| 704 |
| 705 // Reset space so high water mark is consistent. |
| 706 CcTest::heap()->old_space()->ResetFreeList(); |
| 707 CcTest::heap()->old_space()->EmptyAllocationInfo(); |
| 708 |
| 709 HeapObject* filler = |
| 710 HeapObject::FromAddress(array->address() + array->Size()); |
| 711 CHECK(filler->IsFreeSpace()); |
| 712 size_t shrinked = page->ShrinkToHighWaterMark(); |
| 713 size_t should_have_shrinked = |
| 714 RoundDown(static_cast<size_t>(Page::kAllocatableMemory - array->Size()), |
| 715 base::OS::CommitPageSize()); |
| 716 CHECK_EQ(should_have_shrinked, shrinked); |
| 717 } |
| 718 |
| 719 TEST(ShrinkPageToHighWaterMarkNoFiller) { |
| 720 CcTest::InitializeVM(); |
| 721 Isolate* isolate = CcTest::i_isolate(); |
| 722 HandleScope scope(isolate); |
| 723 |
| 724 heap::SealCurrentObjects(CcTest::heap()); |
| 725 |
| 726 const int kFillerSize = 0; |
| 727 std::vector<Handle<FixedArray>> arrays = |
| 728 heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize); |
| 729 Handle<FixedArray> array = arrays.back(); |
| 730 Page* page = Page::FromAddress(array->address()); |
| 731 CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); |
| 732 |
| 733 // Reset space so high water mark and fillers are consistent. |
| 734 CcTest::heap()->old_space()->ResetFreeList(); |
| 735 CcTest::heap()->old_space()->EmptyAllocationInfo(); |
| 736 |
| 737 const size_t shrinked = page->ShrinkToHighWaterMark(); |
| 738 CHECK_EQ(0, shrinked); |
| 739 } |
| 740 |
| 741 TEST(ShrinkPageToHighWaterMarkOneWordFiller) { |
| 742 CcTest::InitializeVM(); |
| 743 Isolate* isolate = CcTest::i_isolate(); |
| 744 HandleScope scope(isolate); |
| 745 |
| 746 heap::SealCurrentObjects(CcTest::heap()); |
| 747 |
| 748 const int kFillerSize = kPointerSize; |
| 749 std::vector<Handle<FixedArray>> arrays = |
| 750 heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize); |
| 751 Handle<FixedArray> array = arrays.back(); |
| 752 Page* page = Page::FromAddress(array->address()); |
| 753 CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); |
| 754 |
| 755 // Reset space so high water mark and fillers are consistent. |
| 756 CcTest::heap()->old_space()->ResetFreeList(); |
| 757 CcTest::heap()->old_space()->EmptyAllocationInfo(); |
| 758 |
| 759 HeapObject* filler = |
| 760 HeapObject::FromAddress(array->address() + array->Size()); |
| 761 CHECK_EQ(filler->map(), CcTest::heap()->one_pointer_filler_map()); |
| 762 |
| 763 const size_t shrinked = page->ShrinkToHighWaterMark(); |
| 764 CHECK_EQ(0, shrinked); |
| 765 } |
| 766 |
| 767 TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { |
| 768 CcTest::InitializeVM(); |
| 769 Isolate* isolate = CcTest::i_isolate(); |
| 770 HandleScope scope(isolate); |
| 771 |
| 772 heap::SealCurrentObjects(CcTest::heap()); |
| 773 |
| 774 const int kFillerSize = 2 * kPointerSize; |
| 775 std::vector<Handle<FixedArray>> arrays = |
| 776 heap::FillOldSpacePageWithFixedArrays(CcTest::heap(), kFillerSize); |
| 777 Handle<FixedArray> array = arrays.back(); |
| 778 Page* page = Page::FromAddress(array->address()); |
| 779 CHECK_EQ(page->area_end(), array->address() + array->Size() + kFillerSize); |
| 780 |
| 781 // Reset space so high water mark and fillers are consistent. |
| 782 CcTest::heap()->old_space()->ResetFreeList(); |
| 783 CcTest::heap()->old_space()->EmptyAllocationInfo(); |
| 784 |
| 785 HeapObject* filler = |
| 786 HeapObject::FromAddress(array->address() + array->Size()); |
| 787 CHECK_EQ(filler->map(), CcTest::heap()->two_pointer_filler_map()); |
| 788 |
| 789 const size_t shrinked = page->ShrinkToHighWaterMark(); |
| 790 CHECK_EQ(0, shrinked); |
| 791 } |
| 792 |
684 } // namespace internal | 793 } // namespace internal |
685 } // namespace v8 | 794 } // namespace v8 |
OLD | NEW |