OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 440 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
451 TEST(CompactionSpaceUsingExternalMemory) { | 451 TEST(CompactionSpaceUsingExternalMemory) { |
452 const int kObjectSize = 512; | 452 const int kObjectSize = 512; |
453 | 453 |
454 Isolate* isolate = CcTest::i_isolate(); | 454 Isolate* isolate = CcTest::i_isolate(); |
455 Heap* heap = isolate->heap(); | 455 Heap* heap = isolate->heap(); |
456 MemoryAllocator* allocator = new MemoryAllocator(isolate); | 456 MemoryAllocator* allocator = new MemoryAllocator(isolate); |
457 CHECK(allocator != nullptr); | 457 CHECK(allocator != nullptr); |
458 CHECK(allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); | 458 CHECK(allocator->SetUp(heap->MaxReserved(), heap->MaxExecutableSize())); |
459 TestMemoryAllocatorScope test_scope(isolate, allocator); | 459 TestMemoryAllocatorScope test_scope(isolate, allocator); |
460 | 460 |
461 CompactionSpace* compaction_space = | 461 CompactionSpaceCollection* collection = new CompactionSpaceCollection(heap); |
462 new CompactionSpace(heap, OLD_SPACE, NOT_EXECUTABLE); | 462 CompactionSpace* compaction_space = collection->Get(OLD_SPACE); |
463 CHECK(compaction_space != NULL); | 463 CHECK(compaction_space != NULL); |
464 CHECK(compaction_space->SetUp()); | 464 CHECK(compaction_space->SetUp()); |
465 | 465 |
466 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); | 466 OldSpace* old_space = new OldSpace(heap, OLD_SPACE, NOT_EXECUTABLE); |
467 CHECK(old_space != NULL); | 467 CHECK(old_space != NULL); |
468 CHECK(old_space->SetUp()); | 468 CHECK(old_space->SetUp()); |
469 | 469 |
470 // The linear allocation area already counts as used bytes, making | 470 // The linear allocation area already counts as used bytes, making |
471 // exact testing impossible. | 471 // exact testing impossible. |
472 heap->DisableInlineAllocation(); | 472 heap->DisableInlineAllocation(); |
(...skipping 18 matching lines...) Expand all Loading... |
491 // more. | 491 // more. |
492 const intptr_t kAdditionalCompactionMemory = kObjectSize; | 492 const intptr_t kAdditionalCompactionMemory = kObjectSize; |
493 // We expect a single old_space page. | 493 // We expect a single old_space page. |
494 const intptr_t kExpectedInitialOldSpacePages = 1; | 494 const intptr_t kExpectedInitialOldSpacePages = 1; |
495 // We expect a single additional page in compaction space because we mostly | 495 // We expect a single additional page in compaction space because we mostly |
496 // use external memory. | 496 // use external memory. |
497 const intptr_t kExpectedCompactionPages = 1; | 497 const intptr_t kExpectedCompactionPages = 1; |
498 // We expect two pages to be reachable from old_space in the end. | 498 // We expect two pages to be reachable from old_space in the end. |
499 const intptr_t kExpectedOldSpacePagesAfterMerge = 2; | 499 const intptr_t kExpectedOldSpacePagesAfterMerge = 2; |
500 | 500 |
501 Object* chunk = | |
502 old_space->AllocateRawUnaligned(static_cast<int>(rest)).ToObjectChecked(); | |
503 CHECK_EQ(old_space->CountTotalPages(), kExpectedInitialOldSpacePages); | 501 CHECK_EQ(old_space->CountTotalPages(), kExpectedInitialOldSpacePages); |
504 CHECK(chunk != nullptr); | |
505 CHECK(chunk->IsHeapObject()); | |
506 | |
507 CHECK_EQ(compaction_space->CountTotalPages(), 0); | 502 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
508 CHECK_EQ(compaction_space->Capacity(), 0); | 503 CHECK_EQ(compaction_space->Capacity(), 0); |
509 // Make the rest of memory available for compaction. | 504 // Make the rest of memory available for compaction. |
510 compaction_space->AddExternalMemory(HeapObject::cast(chunk)->address(), | 505 old_space->DivideMemory(&collection, 1, rest); |
511 static_cast<int>(rest)); | |
512 CHECK_EQ(compaction_space->CountTotalPages(), 0); | 506 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
513 CHECK_EQ(compaction_space->Capacity(), rest); | 507 CHECK_EQ(compaction_space->Capacity(), rest); |
514 while (num_rest_objects-- > 0) { | 508 while (num_rest_objects-- > 0) { |
515 compaction_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked(); | 509 compaction_space->AllocateRawUnaligned(kObjectSize).ToObjectChecked(); |
516 } | 510 } |
517 // We only used external memory so far. | 511 // We only used external memory so far. |
518 CHECK_EQ(compaction_space->CountTotalPages(), 0); | 512 CHECK_EQ(compaction_space->CountTotalPages(), 0); |
519 // Additional allocation. | 513 // Additional allocation. |
520 compaction_space->AllocateRawUnaligned(kAdditionalCompactionMemory) | 514 compaction_space->AllocateRawUnaligned(kAdditionalCompactionMemory) |
521 .ToObjectChecked(); | 515 .ToObjectChecked(); |
522 // Now the compaction space shouldve also acquired a page. | 516 // Now the compaction space shouldve also acquired a page. |
523 CHECK_EQ(compaction_space->CountTotalPages(), kExpectedCompactionPages); | 517 CHECK_EQ(compaction_space->CountTotalPages(), kExpectedCompactionPages); |
524 | 518 |
525 old_space->MergeCompactionSpace(compaction_space); | 519 old_space->MergeCompactionSpace(compaction_space); |
526 CHECK_EQ(old_space->CountTotalPages(), kExpectedOldSpacePagesAfterMerge); | 520 CHECK_EQ(old_space->CountTotalPages(), kExpectedOldSpacePagesAfterMerge); |
527 | 521 |
528 delete compaction_space; | 522 delete collection; |
529 delete old_space; | 523 delete old_space; |
530 | 524 |
531 allocator->TearDown(); | 525 allocator->TearDown(); |
532 delete allocator; | 526 delete allocator; |
533 } | 527 } |
534 | 528 |
535 | 529 |
536 TEST(LargeObjectSpace) { | 530 TEST(LargeObjectSpace) { |
537 v8::V8::Initialize(); | 531 v8::V8::Initialize(); |
538 | 532 |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
634 | 628 |
635 // Turn the allocation into a proper object so isolate teardown won't | 629 // Turn the allocation into a proper object so isolate teardown won't |
636 // crash. | 630 // crash. |
637 HeapObject* free_space = NULL; | 631 HeapObject* free_space = NULL; |
638 CHECK(allocation.To(&free_space)); | 632 CHECK(allocation.To(&free_space)); |
639 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); | 633 new_space->heap()->CreateFillerObjectAt(free_space->address(), 80); |
640 } | 634 } |
641 } | 635 } |
642 isolate->Dispose(); | 636 isolate->Dispose(); |
643 } | 637 } |
OLD | NEW |