OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 430 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
441 pages_in_old_space + pages_in_compaction_space); | 441 pages_in_old_space + pages_in_compaction_space); |
442 | 442 |
443 delete compaction_space; | 443 delete compaction_space; |
444 delete old_space; | 444 delete old_space; |
445 | 445 |
446 memory_allocator->TearDown(); | 446 memory_allocator->TearDown(); |
447 delete memory_allocator; | 447 delete memory_allocator; |
448 } | 448 } |
449 | 449 |
450 | 450 |
| 451 TEST(CompactionSpaceUsingExternalMemory) { |
| 452 const int kObjectSize = 512; |
| 453 |
| 454 Isolate* isolate = CcTest::i_isolate(); |
| 455 Heap* heap = isolate->heap(); |
| 456 MemoryAllocator* allocator = new MemoryAllocator(isolate); |
| 457 CHECK(allocator != nullptr); |
| 458 CHECK(allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); |
| 459 TestMemoryAllocatorScope test_scope(isolate, allocator); |
| 460 |
| 461 CompactionSpaceCollection* collection = new CompactionSpaceCollection(heap); |
| 462 CompactionSpace* compaction_space = collection->Get(OLD_SPACE); |
| 463 CHECK(compaction_space != NULL); |
| 464 CHECK(compaction_space->SetUp()); |
| 465 |
| 466 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); |
| 467 CHECK(old_space != NULL); |
| 468 CHECK(old_space->SetUp()); |
| 469 |
| 470 // The linear allocation area already counts as used bytes, making |
| 471 // exact testing impossible. |
| 472 heap->DisableInlineAllocation(); |
| 473 |
| 474 // Test: |
| 475 // * Allocate a backing store in old_space. |
| 476 // * Compute the number num_rest_objects of kObjectSize objects that fit into |
| 477 // of available memory. |
| 478 // kNumRestObjects. |
| 479 // * Add the rest of available memory to the compaction space. |
| 480 // * Allocate kNumRestObjects in the compaction space. |
| 481 // * Allocate one object more. |
| 482 // * Merge the compaction space and compare the expected number of pages. |
| 483 |
| 484 // Allocate a single object in old_space to initialize a backing page. |
| 485 old_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked(); |
| 486 // Compute the number of objects that fit into the rest in old_space. |
| 487 intptr_t rest = static_cast<int>(old_space->Available()); |
| 488 CHECK_GT(rest, 0); |
| 489 intptr_t num_rest_objects = rest / kObjectSize; |
| 490 // After allocating num_rest_objects in compaction_space we allocate a bit |
| 491 // more. |
| 492 const intptr_t kAdditionalCompactionMemory = kObjectSize; |
| 493 // We expect a single old_space page. |
| 494 const intptr_t kExpectedInitialOldSpacePages = 1; |
| 495 // We expect a single additional page in compaction space because we mostly |
| 496 // use external memory. |
| 497 const intptr_t kExpectedCompactionPages = 1; |
| 498 // We expect two pages to be reachable from old_space in the end. |
| 499 const intptr_t kExpectedOldSpacePagesAfterMerge = 2; |
| 500 |
| 501 CHECK_EQ(old_space->CountTotalPages(), kExpectedInitialOldSpacePages); |
| 502 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
| 503 CHECK_EQ(compaction_space->Capacity(), 0); |
| 504 // Make the rest of memory available for compaction. |
| 505 old_space->DivideUponCompactionSpaces(&collection, 1, rest); |
| 506 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
| 507 CHECK_EQ(compaction_space->Capacity(), rest); |
| 508 while (num_rest_objects-- > 0) { |
| 509 compaction_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked(); |
| 510 } |
| 511 // We only used external memory so far. |
| 512 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
| 513 // Additional allocation. |
| 514 compaction_space->AllocateRawUnaligned(kAdditionalCompactionMemory) |
| 515 .ToObjectChecked(); |
| 516 // Now the compaction space shouldve also acquired a page. |
| 517 CHECK_EQ(compaction_space->CountTotalPages(), kExpectedCompactionPages); |
| 518 |
| 519 old_space->MergeCompactionSpace(compaction_space); |
| 520 CHECK_EQ(old_space->CountTotalPages(), kExpectedOldSpacePagesAfterMerge); |
| 521 |
| 522 delete collection; |
| 523 delete old_space; |
| 524 |
| 525 allocator->TearDown(); |
| 526 delete allocator; |
| 527 } |
| 528 |
| 529 |
| 530 CompactionSpaceCollection** HeapTester::InitializeCompactionSpaces( |
| 531 Heap* heap, int num_spaces) { |
| 532 CompactionSpaceCollection** spaces = |
| 533 new CompactionSpaceCollection*[num_spaces]; |
| 534 for (int i = 0; i < num_spaces; i++) { |
| 535 spaces[i] = new CompactionSpaceCollection(heap); |
| 536 } |
| 537 return spaces; |
| 538 } |
| 539 |
| 540 |
| 541 void HeapTester::DestroyCompactionSpaces(CompactionSpaceCollection** spaces, |
| 542 int num_spaces) { |
| 543 for (int i = 0; i < num_spaces; i++) { |
| 544 delete spaces[i]; |
| 545 } |
| 546 delete[] spaces; |
| 547 } |
| 548 |
| 549 |
| 550 void HeapTester::MergeCompactionSpaces(PagedSpace* space, |
| 551 CompactionSpaceCollection** spaces, |
| 552 int num_spaces) { |
| 553 AllocationSpace id = space->identity(); |
| 554 for (int i = 0; i < num_spaces; i++) { |
| 555 space->MergeCompactionSpace(spaces[i]->Get(id)); |
| 556 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Size(), 0); |
| 557 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Capacity(), 0); |
| 558 CHECK_EQ(spaces[i]->Get(id)->Waste(), 0); |
| 559 } |
| 560 } |
| 561 |
| 562 |
| 563 void HeapTester::AllocateInCompactionSpaces(CompactionSpaceCollection** spaces, |
| 564 AllocationSpace id, int num_spaces, |
| 565 int num_objects, int object_size) { |
| 566 for (int i = 0; i < num_spaces; i++) { |
| 567 for (int j = 0; j < num_objects; j++) { |
| 568 spaces[i]->Get(id)->AllocateRawUnaligned(object_size).ToObjectChecked(); |
| 569 } |
| 570 spaces[i]->Get(id)->EmptyAllocationInfo(); |
| 571 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Size(), |
| 572 num_objects * object_size); |
| 573 CHECK_GE(spaces[i]->Get(id)->accounting_stats_.Capacity(), |
| 574 spaces[i]->Get(id)->accounting_stats_.Size()); |
| 575 } |
| 576 } |
| 577 |
| 578 |
| 579 void HeapTester::CompactionStats(CompactionSpaceCollection** spaces, |
| 580 AllocationSpace id, int num_spaces, |
| 581 intptr_t* capacity, intptr_t* size) { |
| 582 *capacity = 0; |
| 583 *size = 0; |
| 584 for (int i = 0; i < num_spaces; i++) { |
| 585 *capacity += spaces[i]->Get(id)->accounting_stats_.Capacity(); |
| 586 *size += spaces[i]->Get(id)->accounting_stats_.Size(); |
| 587 } |
| 588 } |
| 589 |
| 590 |
| 591 void HeapTester::TestCompactionSpaceDivide(int num_additional_objects, |
| 592 int object_size, |
| 593 int num_compaction_spaces, |
| 594 int additional_capacity_in_bytes) { |
| 595 Isolate* isolate = CcTest::i_isolate(); |
| 596 Heap* heap = isolate->heap(); |
| 597 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); |
| 598 CHECK(old_space != nullptr); |
| 599 CHECK(old_space->SetUp()); |
| 600 old_space->AllocateRawUnaligned(object_size).ToObjectChecked(); |
| 601 old_space->EmptyAllocationInfo(); |
| 602 |
| 603 intptr_t rest_capacity = old_space->accounting_stats_.Capacity() - |
| 604 old_space->accounting_stats_.Size(); |
| 605 intptr_t capacity_for_compaction_space = |
| 606 rest_capacity / num_compaction_spaces; |
| 607 int num_objects_in_compaction_space = |
| 608 static_cast<int>(capacity_for_compaction_space) / object_size + |
| 609 num_additional_objects; |
| 610 CHECK_GT(num_objects_in_compaction_space, 0); |
| 611 intptr_t initial_old_space_capacity = old_space->accounting_stats_.Capacity(); |
| 612 |
| 613 CompactionSpaceCollection** spaces = |
| 614 InitializeCompactionSpaces(heap, num_compaction_spaces); |
| 615 old_space->DivideUponCompactionSpaces(spaces, num_compaction_spaces, |
| 616 capacity_for_compaction_space); |
| 617 |
| 618 intptr_t compaction_capacity = 0; |
| 619 intptr_t compaction_size = 0; |
| 620 CompactionStats(spaces, OLD_SPACE, num_compaction_spaces, |
| 621 &compaction_capacity, &compaction_size); |
| 622 |
| 623 intptr_t old_space_capacity = old_space->accounting_stats_.Capacity(); |
| 624 intptr_t old_space_size = old_space->accounting_stats_.Size(); |
| 625 // Compaction space memory is subtracted from the original space's capacity. |
| 626 CHECK_EQ(old_space_capacity, |
| 627 initial_old_space_capacity - compaction_capacity); |
| 628 CHECK_EQ(compaction_size, 0); |
| 629 |
| 630 AllocateInCompactionSpaces(spaces, OLD_SPACE, num_compaction_spaces, |
| 631 num_objects_in_compaction_space, object_size); |
| 632 |
| 633 // Old space size and capacity should be the same as after dividing. |
| 634 CHECK_EQ(old_space->accounting_stats_.Size(), old_space_size); |
| 635 CHECK_EQ(old_space->accounting_stats_.Capacity(), old_space_capacity); |
| 636 |
| 637 CompactionStats(spaces, OLD_SPACE, num_compaction_spaces, |
| 638 &compaction_capacity, &compaction_size); |
| 639 MergeCompactionSpaces(old_space, spaces, num_compaction_spaces); |
| 640 |
| 641 CHECK_EQ(old_space->accounting_stats_.Capacity(), |
| 642 old_space_capacity + compaction_capacity); |
| 643 CHECK_EQ(old_space->accounting_stats_.Size(), |
| 644 old_space_size + compaction_size); |
| 645 // We check against the expected end capacity. |
| 646 CHECK_EQ(old_space->accounting_stats_.Capacity(), |
| 647 initial_old_space_capacity + additional_capacity_in_bytes); |
| 648 |
| 649 DestroyCompactionSpaces(spaces, num_compaction_spaces); |
| 650 delete old_space; |
| 651 } |
| 652 |
| 653 |
| 654 HEAP_TEST(CompactionSpaceDivideSinglePage) { |
| 655 const int kObjectSize = KB; |
| 656 const int kCompactionSpaces = 4; |
| 657 // Since the bound for objects is tight and the dividing is best effort, we |
| 658 // subtract some objects to make sure we still fit in the initial page. |
| 659 // A CHECK makes sure that the overall number of allocated objects stays |
| 660 // > 0. |
| 661 const int kAdditionalObjects = -10; |
| 662 const int kAdditionalCapacityRequired = 0; |
| 663 TestCompactionSpaceDivide(kAdditionalObjects, kObjectSize, kCompactionSpaces, |
| 664 kAdditionalCapacityRequired); |
| 665 } |
| 666 |
| 667 |
| 668 HEAP_TEST(CompactionSpaceDivideMultiplePages) { |
| 669 const int kObjectSize = KB; |
| 670 const int kCompactionSpaces = 4; |
| 671 // Allocate half a page of objects to ensure that we need one more page per |
| 672 // compaction space. |
| 673 const int kAdditionalObjects = (Page::kPageSize / kObjectSize / 2); |
| 674 const int kAdditionalCapacityRequired = |
| 675 Page::kAllocatableMemory * kCompactionSpaces; |
| 676 TestCompactionSpaceDivide(kAdditionalObjects, kObjectSize, kCompactionSpaces, |
| 677 kAdditionalCapacityRequired); |
| 678 } |
| 679 |
| 680 |
451 TEST(LargeObjectSpace) { | 681 TEST(LargeObjectSpace) { |
452 v8::V8::Initialize(); | 682 v8::V8::Initialize(); |
453 | 683 |
454 LargeObjectSpace* lo = CcTest::heap()->lo_space(); | 684 LargeObjectSpace* lo = CcTest::heap()->lo_space(); |
455 CHECK(lo != NULL); | 685 CHECK(lo != NULL); |
456 | 686 |
457 int lo_size = Page::kPageSize; | 687 int lo_size = Page::kPageSize; |
458 | 688 |
459 Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked(); | 689 Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked(); |
460 CHECK(obj->IsHeapObject()); | 690 CHECK(obj->IsHeapObject()); |
(...skipping 185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
646 new_space->RemoveInlineAllocationObserver(&observer2); | 876 new_space->RemoveInlineAllocationObserver(&observer2); |
647 | 877 |
648 CHECK_EQ(observer1.count(), 32); | 878 CHECK_EQ(observer1.count(), 32); |
649 CHECK_EQ(observer2.count(), 28); | 879 CHECK_EQ(observer2.count(), 28); |
650 } | 880 } |
651 isolate->Dispose(); | 881 isolate->Dispose(); |
652 } | 882 } |
653 | 883 |
654 } // namespace internal | 884 } // namespace internal |
655 } // namespace v8 | 885 } // namespace v8 |
OLD | NEW |