| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 214 } | 214 } |
| 215 | 215 |
| 216 | 216 |
| 217 // TODO(1238405): Combine the infrastructure for --heap-stats and | 217 // TODO(1238405): Combine the infrastructure for --heap-stats and |
| 218 // --log-gc to avoid the complicated preprocessor and flag testing. | 218 // --log-gc to avoid the complicated preprocessor and flag testing. |
| 219 void Heap::ReportStatisticsAfterGC() { | 219 void Heap::ReportStatisticsAfterGC() { |
| 220 // Similar to the before GC, we use some complicated logic to ensure that | 220 // Similar to the before GC, we use some complicated logic to ensure that |
| 221 // NewSpace statistics are logged exactly once when --log-gc is turned on. | 221 // NewSpace statistics are logged exactly once when --log-gc is turned on. |
| 222 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) | 222 #if defined(DEBUG) && defined(ENABLE_LOGGING_AND_PROFILING) |
| 223 if (FLAG_heap_stats) { | 223 if (FLAG_heap_stats) { |
| 224 new_space_.CollectStatistics(); |
| 224 ReportHeapStatistics("After GC"); | 225 ReportHeapStatistics("After GC"); |
| 225 } else if (FLAG_log_gc) { | 226 } else if (FLAG_log_gc) { |
| 226 new_space_.ReportStatistics(); | 227 new_space_.ReportStatistics(); |
| 227 } | 228 } |
| 228 #elif defined(DEBUG) | 229 #elif defined(DEBUG) |
| 229 if (FLAG_heap_stats) ReportHeapStatistics("After GC"); | 230 if (FLAG_heap_stats) ReportHeapStatistics("After GC"); |
| 230 #elif defined(ENABLE_LOGGING_AND_PROFILING) | 231 #elif defined(ENABLE_LOGGING_AND_PROFILING) |
| 231 if (FLAG_log_gc) new_space_.ReportStatistics(); | 232 if (FLAG_log_gc) new_space_.ReportStatistics(); |
| 232 #endif | 233 #endif |
| 233 } | 234 } |
| (...skipping 187 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 421 | 422 |
| 422 if (collector == MARK_COMPACTOR) { | 423 if (collector == MARK_COMPACTOR) { |
| 423 MarkCompact(tracer); | 424 MarkCompact(tracer); |
| 424 | 425 |
| 425 int old_gen_size = PromotedSpaceSize(); | 426 int old_gen_size = PromotedSpaceSize(); |
| 426 old_gen_promotion_limit_ = | 427 old_gen_promotion_limit_ = |
| 427 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3); | 428 old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3); |
| 428 old_gen_allocation_limit_ = | 429 old_gen_allocation_limit_ = |
| 429 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); | 430 old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); |
| 430 old_gen_exhausted_ = false; | 431 old_gen_exhausted_ = false; |
| 431 | |
| 432 // If we have used the mark-compact collector to collect the new | |
| 433 // space, and it has not compacted the new space, we force a | |
| 434 // separate scavenge collection. This is a hack. It covers the | |
| 435 // case where (1) a new space collection was requested, (2) the | |
| 436 // collector selection policy selected the mark-compact collector, | |
| 437 // and (3) the mark-compact collector policy selected not to | |
| 438 // compact the new space. In that case, there is no more (usable) | |
| 439 // free space in the new space after the collection compared to | |
| 440 // before. | |
| 441 if (space == NEW_SPACE && !MarkCompactCollector::HasCompacted()) { | |
| 442 Scavenge(); | |
| 443 } | |
| 444 } else { | |
| 445 Scavenge(); | |
| 446 } | 432 } |
| 433 Scavenge(); |
| 447 Counters::objs_since_last_young.Set(0); | 434 Counters::objs_since_last_young.Set(0); |
| 448 | 435 |
| 449 PostGarbageCollectionProcessing(); | 436 PostGarbageCollectionProcessing(); |
| 450 | 437 |
| 451 if (collector == MARK_COMPACTOR) { | 438 if (collector == MARK_COMPACTOR) { |
| 452 // Register the amount of external allocated memory. | 439 // Register the amount of external allocated memory. |
| 453 amount_of_external_allocated_memory_at_last_global_gc_ = | 440 amount_of_external_allocated_memory_at_last_global_gc_ = |
| 454 amount_of_external_allocated_memory_; | 441 amount_of_external_allocated_memory_; |
| 455 } | 442 } |
| 456 | 443 |
| (...skipping 3213 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3670 #ifdef DEBUG | 3657 #ifdef DEBUG |
| 3671 bool Heap::GarbageCollectionGreedyCheck() { | 3658 bool Heap::GarbageCollectionGreedyCheck() { |
| 3672 ASSERT(FLAG_gc_greedy); | 3659 ASSERT(FLAG_gc_greedy); |
| 3673 if (Bootstrapper::IsActive()) return true; | 3660 if (Bootstrapper::IsActive()) return true; |
| 3674 if (disallow_allocation_failure()) return true; | 3661 if (disallow_allocation_failure()) return true; |
| 3675 return CollectGarbage(0, NEW_SPACE); | 3662 return CollectGarbage(0, NEW_SPACE); |
| 3676 } | 3663 } |
| 3677 #endif | 3664 #endif |
| 3678 | 3665 |
| 3679 } } // namespace v8::internal | 3666 } } // namespace v8::internal |
| OLD | NEW |