Index: src/heap/mark-compact.cc |
diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
index c7e4d8f29a6cd9c60f1882bb945f0be518a6f256..d7ae8babc2aa67e101e31ec3b374e0ce9e596e35 100644 |
--- a/src/heap/mark-compact.cc |
+++ b/src/heap/mark-compact.cc |
@@ -600,7 +600,7 @@ void MarkCompactCollector::ComputeEvacuationHeuristics( |
// For memory reducing and optimize for memory mode we directly define both |
// constants. |
const int kTargetFragmentationPercentForReduceMemory = 20; |
- const int kMaxEvacuatedBytesForReduceMemory = 12 * Page::kPageSize; |
+ const int kMaxEvacuatedBytesForReduceMemory = 12 * MB; |
const int kTargetFragmentationPercentForOptimizeMemory = 20; |
const int kMaxEvacuatedBytesForOptimizeMemory = 6 * MB; |
@@ -608,10 +608,10 @@ void MarkCompactCollector::ComputeEvacuationHeuristics( |
// defaults to start and switch to a trace-based (using compaction speed) |
// approach as soon as we have enough samples. |
const int kTargetFragmentationPercent = 70; |
- const int kMaxEvacuatedBytes = 4 * Page::kPageSize; |
+ const int kMaxEvacuatedBytes = 4 * MB; |
// Time to take for a single area (=payload of page). Used as soon as there |
// exist enough compaction speed samples. |
- const int kTargetMsPerArea = 1; |
+ const float kTargetMsPerArea = 0.5; |
if (heap()->ShouldReduceMemory()) { |
*target_fragmentation_percent = kTargetFragmentationPercentForReduceMemory; |
@@ -3218,7 +3218,7 @@ int MarkCompactCollector::NumberOfParallelCompactionTasks(int pages, |
// The number of parallel compaction tasks is limited by: |
// - #evacuation pages |
// - (#cores - 1) |
- const double kTargetCompactionTimeInMs = 1; |
+ const double kTargetCompactionTimeInMs = .5; |
const int kNumSweepingTasks = 3; |
double compaction_speed = |
@@ -3918,6 +3918,11 @@ void MarkCompactCollector::StartSweepSpace(PagedSpace* space) { |
continue; |
} |
+ if (p->IsFlagSet(Page::NEVER_SWEEP)) { |
+ p->ClearLiveness(); |
+ continue; |
+ } |
+ |
if (p->IsFlagSet(Page::NEVER_ALLOCATE_ON_PAGE)) { |
// We need to sweep the page to get it into an iterable state again. Note |
// that this adds unusable memory into the free list that is later on |