Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(124)

Side by Side Diff: test/cctest/test-spaces.cc

Issue 1415733004: Reland of "[heap] Divide available memory upon compaction tasks" (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix size computation in TryRemoveMemory Created 5 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « test/cctest/heap-tester.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 16 matching lines...) Expand all
27 27
28 // TODO(mythria): Remove this define after this flag is turned on globally 28 // TODO(mythria): Remove this define after this flag is turned on globally
29 #define V8_IMMINENT_DEPRECATION_WARNINGS 29 #define V8_IMMINENT_DEPRECATION_WARNINGS
30 30
31 #include <stdlib.h> 31 #include <stdlib.h>
32 32
33 #include "src/base/platform/platform.h" 33 #include "src/base/platform/platform.h"
34 #include "src/snapshot/snapshot.h" 34 #include "src/snapshot/snapshot.h"
35 #include "src/v8.h" 35 #include "src/v8.h"
36 #include "test/cctest/cctest.h" 36 #include "test/cctest/cctest.h"
37 37 #include "test/cctest/heap-tester.h"
38 38
39 using namespace v8::internal; 39 using namespace v8::internal;
40 40
41 #if 0 41 #if 0
42 static void VerifyRegionMarking(Address page_start) { 42 static void VerifyRegionMarking(Address page_start) {
43 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER 43 #ifdef ENABLE_CARDMARKING_WRITE_BARRIER
44 Page* p = Page::FromAddress(page_start); 44 Page* p = Page::FromAddress(page_start);
45 45
46 p->SetRegionMarks(Page::kAllRegionsCleanMarks); 46 p->SetRegionMarks(Page::kAllRegionsCleanMarks);
47 47
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after
455 TEST(CompactionSpaceUsingExternalMemory) { 455 TEST(CompactionSpaceUsingExternalMemory) {
456 const int kObjectSize = 512; 456 const int kObjectSize = 512;
457 457
458 Isolate* isolate = CcTest::i_isolate(); 458 Isolate* isolate = CcTest::i_isolate();
459 Heap* heap = isolate->heap(); 459 Heap* heap = isolate->heap();
460 MemoryAllocator* allocator = new MemoryAllocator(isolate); 460 MemoryAllocator* allocator = new MemoryAllocator(isolate);
461 CHECK(allocator != nullptr); 461 CHECK(allocator != nullptr);
462 CHECK(allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); 462 CHECK(allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize()));
463 TestMemoryAllocatorScope test_scope(isolate, allocator); 463 TestMemoryAllocatorScope test_scope(isolate, allocator);
464 464
465 CompactionSpace* compaction_space = 465 CompactionSpaceCollection* collection = new CompactionSpaceCollection(heap);
466 new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE); 466 CompactionSpace* compaction_space = collection->Get(OLD_SPACE);
467 CHECK(compaction_space != NULL); 467 CHECK(compaction_space != NULL);
468 CHECK(compaction_space->SetUp()); 468 CHECK(compaction_space->SetUp());
469 469
470 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); 470 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
471 CHECK(old_space != NULL); 471 CHECK(old_space != NULL);
472 CHECK(old_space->SetUp()); 472 CHECK(old_space->SetUp());
473 473
474 // The linear allocation area already counts as used bytes, making 474 // The linear allocation area already counts as used bytes, making
475 // exact testing impossible. 475 // exact testing impossible.
476 heap->DisableInlineAllocation(); 476 heap->DisableInlineAllocation();
(...skipping 18 matching lines...) Expand all
495 // more. 495 // more.
496 const intptr_t kAdditionalCompactionMemory = kObjectSize; 496 const intptr_t kAdditionalCompactionMemory = kObjectSize;
497 // We expect a single old_space page. 497 // We expect a single old_space page.
498 const intptr_t kExpectedInitialOldSpacePages = 1; 498 const intptr_t kExpectedInitialOldSpacePages = 1;
499 // We expect a single additional page in compaction space because we mostly 499 // We expect a single additional page in compaction space because we mostly
500 // use external memory. 500 // use external memory.
501 const intptr_t kExpectedCompactionPages = 1; 501 const intptr_t kExpectedCompactionPages = 1;
502 // We expect two pages to be reachable from old_space in the end. 502 // We expect two pages to be reachable from old_space in the end.
503 const intptr_t kExpectedOldSpacePagesAfterMerge = 2; 503 const intptr_t kExpectedOldSpacePagesAfterMerge = 2;
504 504
505 Object* chunk =
506 old_space->AllocateRawUnaligned(static_cast<int>(rest)).ToObjectChecked();
507 CHECK_EQ(old_space->CountTotalPages(), kExpectedInitialOldSpacePages); 505 CHECK_EQ(old_space->CountTotalPages(), kExpectedInitialOldSpacePages);
508 CHECK(chunk != nullptr);
509 CHECK(chunk->IsHeapObject());
510
511 CHECK_EQ(compaction_space->CountTotalPages(), 0); 506 CHECK_EQ(compaction_space->CountTotalPages(), 0);
512 CHECK_EQ(compaction_space->Capacity(), 0); 507 CHECK_EQ(compaction_space->Capacity(), 0);
513 // Make the rest of memory available for compaction. 508 // Make the rest of memory available for compaction.
514 compaction_space->AddExternalMemory(HeapObject::cast(chunk)->address(), 509 old_space->DivideUponCompactionSpaces(&collection, 1, rest);
515 static_cast<int>(rest));
516 CHECK_EQ(compaction_space->CountTotalPages(), 0); 510 CHECK_EQ(compaction_space->CountTotalPages(), 0);
517 CHECK_EQ(compaction_space->Capacity(), rest); 511 CHECK_EQ(compaction_space->Capacity(), rest);
518 while (num_rest_objects-- > 0) { 512 while (num_rest_objects-- > 0) {
519 compaction_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked(); 513 compaction_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked();
520 } 514 }
521 // We only used external memory so far. 515 // We only used external memory so far.
522 CHECK_EQ(compaction_space->CountTotalPages(), 0); 516 CHECK_EQ(compaction_space->CountTotalPages(), 0);
523 // Additional allocation. 517 // Additional allocation.
524 compaction_space->AllocateRawUnaligned(kAdditionalCompactionMemory) 518 compaction_space->AllocateRawUnaligned(kAdditionalCompactionMemory)
525 .ToObjectChecked(); 519 .ToObjectChecked();
526 // Now the compaction space shouldve also acquired a page. 520 // Now the compaction space shouldve also acquired a page.
527 CHECK_EQ(compaction_space->CountTotalPages(), kExpectedCompactionPages); 521 CHECK_EQ(compaction_space->CountTotalPages(), kExpectedCompactionPages);
528 522
529 old_space->MergeCompactionSpace(compaction_space); 523 old_space->MergeCompactionSpace(compaction_space);
530 CHECK_EQ(old_space->CountTotalPages(), kExpectedOldSpacePagesAfterMerge); 524 CHECK_EQ(old_space->CountTotalPages(), kExpectedOldSpacePagesAfterMerge);
531 525
532 delete compaction_space; 526 delete collection;
533 delete old_space; 527 delete old_space;
534 528
535 allocator->TearDown(); 529 allocator->TearDown();
536 delete allocator; 530 delete allocator;
537 } 531 }
538 532
539 533
534 CompactionSpaceCollection** HeapTester::InitializeCompactionSpaces(
535 Heap* heap, int num_spaces) {
536 CompactionSpaceCollection** spaces =
537 new CompactionSpaceCollection*[num_spaces];
538 for (int i = 0; i < num_spaces; i++) {
539 spaces[i] = new CompactionSpaceCollection(heap);
540 }
541 return spaces;
542 }
543
544
545 void HeapTester::DestroyCompactionSpaces(CompactionSpaceCollection** spaces,
546 int num_spaces) {
547 for (int i = 0; i < num_spaces; i++) {
548 delete spaces[i];
549 }
550 delete[] spaces;
551 }
552
553
554 void HeapTester::MergeCompactionSpaces(PagedSpace* space,
555 CompactionSpaceCollection** spaces,
556 int num_spaces) {
557 AllocationSpace id = space->identity();
558 for (int i = 0; i < num_spaces; i++) {
559 space->MergeCompactionSpace(spaces[i]->Get(id));
560 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Size(), 0);
561 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Capacity(), 0);
562 CHECK_EQ(spaces[i]->Get(id)->Waste(), 0);
563 }
564 }
565
566
567 void HeapTester::AllocateInCompactionSpaces(CompactionSpaceCollection** spaces,
568 AllocationSpace id, int num_spaces,
569 int num_objects, int object_size) {
570 for (int i = 0; i < num_spaces; i++) {
571 for (int j = 0; j < num_objects; j++) {
572 spaces[i]->Get(id)->AllocateRawUnaligned(object_size).ToObjectChecked();
573 }
574 spaces[i]->Get(id)->EmptyAllocationInfo();
575 CHECK_EQ(spaces[i]->Get(id)->accounting_stats_.Size(),
576 num_objects * object_size);
577 CHECK_GE(spaces[i]->Get(id)->accounting_stats_.Capacity(),
578 spaces[i]->Get(id)->accounting_stats_.Size());
579 }
580 }
581
582
583 void HeapTester::CompactionStats(CompactionSpaceCollection** spaces,
584 AllocationSpace id, int num_spaces,
585 intptr_t* capacity, intptr_t* size) {
586 *capacity = 0;
587 *size = 0;
588 for (int i = 0; i < num_spaces; i++) {
589 *capacity += spaces[i]->Get(id)->accounting_stats_.Capacity();
590 *size += spaces[i]->Get(id)->accounting_stats_.Size();
591 }
592 }
593
594
595 void HeapTester::TestCompactionSpaceDivide(int num_additional_objects,
596 int object_size,
597 int num_compaction_spaces,
598 int additional_capacity_in_bytes) {
599 Isolate* isolate = CcTest::i_isolate();
600 Heap* heap = isolate->heap();
601 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE);
602 CHECK(old_space != nullptr);
603 CHECK(old_space->SetUp());
604 old_space->AllocateRawUnaligned(object_size).ToObjectChecked();
605 old_space->EmptyAllocationInfo();
606
607 intptr_t rest_capacity = old_space->accounting_stats_.Capacity() -
608 old_space->accounting_stats_.Size();
609 intptr_t capacity_for_compaction_space =
610 rest_capacity / num_compaction_spaces;
611 int num_objects_in_compaction_space =
612 static_cast<int>(capacity_for_compaction_space) / object_size +
613 num_additional_objects;
614 CHECK_GT(num_objects_in_compaction_space, 0);
615 intptr_t initial_old_space_capacity = old_space->accounting_stats_.Capacity();
616
617 CompactionSpaceCollection** spaces =
618 InitializeCompactionSpaces(heap, num_compaction_spaces);
619 old_space->DivideUponCompactionSpaces(spaces, num_compaction_spaces,
620 capacity_for_compaction_space);
621
622 intptr_t compaction_capacity = 0;
623 intptr_t compaction_size = 0;
624 CompactionStats(spaces, OLD_SPACE, num_compaction_spaces,
625 &compaction_capacity, &compaction_size);
626
627 intptr_t old_space_capacity = old_space->accounting_stats_.Capacity();
628 intptr_t old_space_size = old_space->accounting_stats_.Size();
629 // Compaction space memory is subtracted from the original space's capacity.
630 CHECK_EQ(old_space_capacity,
631 initial_old_space_capacity - compaction_capacity);
632 CHECK_EQ(compaction_size, 0);
633
634 AllocateInCompactionSpaces(spaces, OLD_SPACE, num_compaction_spaces,
635 num_objects_in_compaction_space, object_size);
636
637 // Old space size and capacity should be the same as after dividing.
638 CHECK_EQ(old_space->accounting_stats_.Size(), old_space_size);
639 CHECK_EQ(old_space->accounting_stats_.Capacity(), old_space_capacity);
640
641 CompactionStats(spaces, OLD_SPACE, num_compaction_spaces,
642 &compaction_capacity, &compaction_size);
643 MergeCompactionSpaces(old_space, spaces, num_compaction_spaces);
644
645 CHECK_EQ(old_space->accounting_stats_.Capacity(),
646 old_space_capacity + compaction_capacity);
647 CHECK_EQ(old_space->accounting_stats_.Size(),
648 old_space_size + compaction_size);
649 // We check against the expected end capacity.
650 CHECK_EQ(old_space->accounting_stats_.Capacity(),
651 initial_old_space_capacity + additional_capacity_in_bytes);
652
653 DestroyCompactionSpaces(spaces, num_compaction_spaces);
654 delete old_space;
655 }
656
657
658 HEAP_TEST(CompactionSpaceDivideSinglePage) {
659 const int kObjectSize = KB;
660 const int kCompactionSpaces = 4;
661 // Since the bound for objects is tight and the dividing is best effort, we
662 // subtract some objects to make sure we still fit in the initial page.
663 // A CHECK makes sure that the overall number of allocated objects stays
664 // > 0.
665 const int kAdditionalObjects = -10;
666 const int kAdditionalCapacityRequired = 0;
667 TestCompactionSpaceDivide(kAdditionalObjects, kObjectSize, kCompactionSpaces,
668 kAdditionalCapacityRequired);
669 }
670
671
672 HEAP_TEST(CompactionSpaceDivideMultiplePages) {
673 const int kObjectSize = KB;
674 const int kCompactionSpaces = 4;
675 // Allocate half a page of objects to ensure that we need one more page per
676 // compaction space.
677 const int kAdditionalObjects = (Page::kPageSize / kObjectSize / 2);
678 const int kAdditionalCapacityRequired =
679 Page::kAllocatableMemory * kCompactionSpaces;
680 TestCompactionSpaceDivide(kAdditionalObjects, kObjectSize, kCompactionSpaces,
681 kAdditionalCapacityRequired);
682 }
683
684
540 TEST(LargeObjectSpace) { 685 TEST(LargeObjectSpace) {
541 v8::V8::Initialize(); 686 v8::V8::Initialize();
542 687
543 LargeObjectSpace* lo = CcTest::heap()->lo_space(); 688 LargeObjectSpace* lo = CcTest::heap()->lo_space();
544 CHECK(lo != NULL); 689 CHECK(lo != NULL);
545 690
546 int lo_size = Page::kPageSize; 691 int lo_size = Page::kPageSize;
547 692
548 Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked(); 693 Object* obj = lo->AllocateRaw(lo_size, NOT_EXECUTABLE).ToObjectChecked();
549 CHECK(obj->IsHeapObject()); 694 CHECK(obj->IsHeapObject());
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after
638 783
639 // Turn the allocation into a proper object so isolate teardown won't 784 // Turn the allocation into a proper object so isolate teardown won't
640 // crash. 785 // crash.
641 HeapObject* free_space = NULL; 786 HeapObject* free_space = NULL;
642 CHECK(allocation.To(&free_space)); 787 CHECK(allocation.To(&free_space));
643 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); 788 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80);
644 } 789 }
645 } 790 }
646 isolate->Dispose(); 791 isolate->Dispose();
647 } 792 }
OLDNEW
« no previous file with comments | « test/cctest/heap-tester.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698