| OLD | NEW |
| 1 // Copyright 2008 the V8 project authors. All rights reserved. | 1 // Copyright 2008 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 550 } | 550 } |
| 551 | 551 |
| 552 | 552 |
| 553 static inline void CheckDoubleEquals(double expected, double actual) { | 553 static inline void CheckDoubleEquals(double expected, double actual) { |
| 554 const double kEpsilon = 1e-10; | 554 const double kEpsilon = 1e-10; |
| 555 CHECK_LE(expected, actual + kEpsilon); | 555 CHECK_LE(expected, actual + kEpsilon); |
| 556 CHECK_GE(expected, actual - kEpsilon); | 556 CHECK_GE(expected, actual - kEpsilon); |
| 557 } | 557 } |
| 558 | 558 |
| 559 | 559 |
| 560 static int LenFromSize(int size) { | |
| 561 return (size - i::FixedArray::kHeaderSize) / i::kPointerSize; | |
| 562 } | |
| 563 | |
| 564 | |
| 565 static inline void CreatePadding(i::Heap* heap, int padding_size, | |
| 566 i::PretenureFlag tenure) { | |
| 567 const int max_number_of_objects = 20; | |
| 568 v8::internal::Handle<v8::internal::FixedArray> | |
| 569 big_objects[max_number_of_objects]; | |
| 570 i::Isolate* isolate = heap->isolate(); | |
| 571 int allocate_memory; | |
| 572 int length; | |
| 573 int free_memory = padding_size; | |
| 574 if (tenure == i::TENURED) { | |
| 575 int current_free_memory = | |
| 576 static_cast<int>(*heap->old_space()->allocation_limit_address() - | |
| 577 *heap->old_space()->allocation_top_address()); | |
| 578 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | |
| 579 } else { | |
| 580 heap->new_space()->DisableInlineAllocationSteps(); | |
| 581 int current_free_memory = | |
| 582 static_cast<int>(*heap->new_space()->allocation_limit_address() - | |
| 583 *heap->new_space()->allocation_top_address()); | |
| 584 CHECK(padding_size <= current_free_memory || current_free_memory == 0); | |
| 585 } | |
| 586 for (int i = 0; i < max_number_of_objects && free_memory > 0; i++) { | |
| 587 if (free_memory > i::Page::kMaxRegularHeapObjectSize) { | |
| 588 allocate_memory = i::Page::kMaxRegularHeapObjectSize; | |
| 589 length = LenFromSize(allocate_memory); | |
| 590 } else { | |
| 591 allocate_memory = free_memory; | |
| 592 length = LenFromSize(allocate_memory); | |
| 593 if (length <= 0) { | |
| 594 // Not enough room to create another fixed array. Let's create a filler. | |
| 595 heap->CreateFillerObjectAt(*heap->old_space()->allocation_top_address(), | |
| 596 free_memory); | |
| 597 break; | |
| 598 } | |
| 599 } | |
| 600 big_objects[i] = isolate->factory()->NewFixedArray(length, tenure); | |
| 601 CHECK((tenure == i::NOT_TENURED && heap->InNewSpace(*big_objects[i])) || | |
| 602 (tenure == i::TENURED && heap->InOldSpace(*big_objects[i]))); | |
| 603 free_memory -= allocate_memory; | |
| 604 } | |
| 605 } | |
| 606 | |
| 607 | |
| 608 // Helper function that simulates a full new-space in the heap. | |
| 609 static inline bool FillUpOnePage(v8::internal::NewSpace* space) { | |
| 610 space->DisableInlineAllocationSteps(); | |
| 611 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | |
| 612 *space->allocation_top_address()); | |
| 613 if (space_remaining == 0) return false; | |
| 614 CreatePadding(space->heap(), space_remaining, i::NOT_TENURED); | |
| 615 return true; | |
| 616 } | |
| 617 | |
| 618 | |
| 619 // Helper function that simulates a fill new-space in the heap. | |
| 620 static inline void AllocateAllButNBytes(v8::internal::NewSpace* space, | |
| 621 int extra_bytes) { | |
| 622 space->DisableInlineAllocationSteps(); | |
| 623 int space_remaining = static_cast<int>(*space->allocation_limit_address() - | |
| 624 *space->allocation_top_address()); | |
| 625 CHECK(space_remaining >= extra_bytes); | |
| 626 int new_linear_size = space_remaining - extra_bytes; | |
| 627 if (new_linear_size == 0) return; | |
| 628 CreatePadding(space->heap(), new_linear_size, i::NOT_TENURED); | |
| 629 } | |
| 630 | |
| 631 | |
| 632 static inline void FillCurrentPage(v8::internal::NewSpace* space) { | |
| 633 AllocateAllButNBytes(space, 0); | |
| 634 } | |
| 635 | |
| 636 | |
| 637 static inline void SimulateFullSpace(v8::internal::NewSpace* space) { | |
| 638 FillCurrentPage(space); | |
| 639 while (FillUpOnePage(space)) { | |
| 640 } | |
| 641 } | |
| 642 | |
| 643 | |
| 644 // Helper function that simulates a full old-space in the heap. | |
| 645 static inline void SimulateFullSpace(v8::internal::PagedSpace* space) { | |
| 646 space->EmptyAllocationInfo(); | |
| 647 space->ResetFreeList(); | |
| 648 space->ClearStats(); | |
| 649 } | |
| 650 | |
| 651 | |
| 652 // Helper function that simulates many incremental marking steps until | |
| 653 // marking is completed. | |
| 654 static inline void SimulateIncrementalMarking(i::Heap* heap, | |
| 655 bool force_completion = true) { | |
| 656 i::MarkCompactCollector* collector = heap->mark_compact_collector(); | |
| 657 i::IncrementalMarking* marking = heap->incremental_marking(); | |
| 658 if (collector->sweeping_in_progress()) { | |
| 659 collector->EnsureSweepingCompleted(); | |
| 660 } | |
| 661 CHECK(marking->IsMarking() || marking->IsStopped()); | |
| 662 if (marking->IsStopped()) { | |
| 663 heap->StartIncrementalMarking(); | |
| 664 } | |
| 665 CHECK(marking->IsMarking()); | |
| 666 if (!force_completion) return; | |
| 667 | |
| 668 while (!marking->IsComplete()) { | |
| 669 marking->Step(i::MB, i::IncrementalMarking::NO_GC_VIA_STACK_GUARD); | |
| 670 if (marking->IsReadyToOverApproximateWeakClosure()) { | |
| 671 marking->FinalizeIncrementally(); | |
| 672 } | |
| 673 } | |
| 674 CHECK(marking->IsComplete()); | |
| 675 } | |
| 676 | |
| 677 | |
| 678 static void DummyDebugEventListener( | 560 static void DummyDebugEventListener( |
| 679 const v8::Debug::EventDetails& event_details) {} | 561 const v8::Debug::EventDetails& event_details) {} |
| 680 | 562 |
| 681 | 563 |
| 682 static inline void EnableDebugger(v8::Isolate* isolate) { | 564 static inline void EnableDebugger(v8::Isolate* isolate) { |
| 683 v8::Debug::SetDebugEventListener(isolate, &DummyDebugEventListener); | 565 v8::Debug::SetDebugEventListener(isolate, &DummyDebugEventListener); |
| 684 } | 566 } |
| 685 | 567 |
| 686 | 568 |
| 687 static inline void DisableDebugger(v8::Isolate* isolate) { | 569 static inline void DisableDebugger(v8::Isolate* isolate) { |
| (...skipping 28 matching lines...) Expand all Loading... |
| 716 HandleAndZoneScope() {} | 598 HandleAndZoneScope() {} |
| 717 | 599 |
| 718 // Prefixing the below with main_ reduces a lot of naming clashes. | 600 // Prefixing the below with main_ reduces a lot of naming clashes. |
| 719 i::Zone* main_zone() { return &main_zone_; } | 601 i::Zone* main_zone() { return &main_zone_; } |
| 720 | 602 |
| 721 private: | 603 private: |
| 722 i::Zone main_zone_; | 604 i::Zone main_zone_; |
| 723 }; | 605 }; |
| 724 | 606 |
| 725 #endif // ifndef CCTEST_H_ | 607 #endif // ifndef CCTEST_H_ |
| OLD | NEW |