| OLD | NEW |
| 1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 int Heap::survived_since_last_expansion_ = 0; | 108 int Heap::survived_since_last_expansion_ = 0; |
| 109 int Heap::external_allocation_limit_ = 0; | 109 int Heap::external_allocation_limit_ = 0; |
| 110 | 110 |
| 111 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; | 111 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; |
| 112 | 112 |
| 113 int Heap::mc_count_ = 0; | 113 int Heap::mc_count_ = 0; |
| 114 int Heap::gc_count_ = 0; | 114 int Heap::gc_count_ = 0; |
| 115 | 115 |
| 116 int Heap::always_allocate_scope_depth_ = 0; | 116 int Heap::always_allocate_scope_depth_ = 0; |
| 117 int Heap::linear_allocation_scope_depth_ = 0; | 117 int Heap::linear_allocation_scope_depth_ = 0; |
| 118 bool Heap::context_disposed_pending_ = false; | 118 |
| 119 int Heap::contexts_disposed_ = 0; |
| 120 bool Heap::context_disposed_use_deprecated_heuristic_ = true; |
| 121 bool Heap::context_disposed_deprecated_pending_ = false; |
| 119 | 122 |
| 120 #ifdef DEBUG | 123 #ifdef DEBUG |
| 121 bool Heap::allocation_allowed_ = true; | 124 bool Heap::allocation_allowed_ = true; |
| 122 | 125 |
| 123 int Heap::allocation_timeout_ = 0; | 126 int Heap::allocation_timeout_ = 0; |
| 124 bool Heap::disallow_allocation_failure_ = false; | 127 bool Heap::disallow_allocation_failure_ = false; |
| 125 #endif // DEBUG | 128 #endif // DEBUG |
| 126 | 129 |
| 127 | 130 |
| 128 int Heap::Capacity() { | 131 int Heap::Capacity() { |
| (...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 364 void Heap::CollectAllGarbage(bool force_compaction) { | 367 void Heap::CollectAllGarbage(bool force_compaction) { |
| 365 // Since we are ignoring the return value, the exact choice of space does | 368 // Since we are ignoring the return value, the exact choice of space does |
| 366 // not matter, so long as we do not specify NEW_SPACE, which would not | 369 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 367 // cause a full GC. | 370 // cause a full GC. |
| 368 MarkCompactCollector::SetForceCompaction(force_compaction); | 371 MarkCompactCollector::SetForceCompaction(force_compaction); |
| 369 CollectGarbage(0, OLD_POINTER_SPACE); | 372 CollectGarbage(0, OLD_POINTER_SPACE); |
| 370 MarkCompactCollector::SetForceCompaction(false); | 373 MarkCompactCollector::SetForceCompaction(false); |
| 371 } | 374 } |
| 372 | 375 |
| 373 | 376 |
| 374 void Heap::CollectAllGarbageIfContextDisposed() { | 377 void Heap::CollectAllGarbageIfContextDisposedDeprecated() { |
| 378 if (!context_disposed_use_deprecated_heuristic_) return; |
| 375 // If the garbage collector interface is exposed through the global | 379 // If the garbage collector interface is exposed through the global |
| 376 // gc() function, we avoid being clever about forcing GCs when | 380 // gc() function, we avoid being clever about forcing GCs when |
| 377 // contexts are disposed and leave it to the embedder to make | 381 // contexts are disposed and leave it to the embedder to make |
| 378 // informed decisions about when to force a collection. | 382 // informed decisions about when to force a collection. |
| 379 if (!FLAG_expose_gc && context_disposed_pending_) { | 383 if (!FLAG_expose_gc && context_disposed_deprecated_pending_) { |
| 380 HistogramTimerScope scope(&Counters::gc_context); | 384 HistogramTimerScope scope(&Counters::gc_context); |
| 381 CollectAllGarbage(false); | 385 CollectAllGarbage(false); |
| 382 } | 386 } |
| 383 context_disposed_pending_ = false; | 387 context_disposed_deprecated_pending_ = false; |
| 384 } | 388 } |
| 385 | 389 |
| 386 | 390 |
| 387 void Heap::NotifyContextDisposed() { | 391 void Heap::NotifyContextDisposed() { |
| 388 context_disposed_pending_ = true; | 392 context_disposed_use_deprecated_heuristic_ = false; |
| 393 contexts_disposed_++; |
| 394 } |
| 395 |
| 396 |
| 397 void Heap::NotifyContextDisposedDeprecated() { |
| 398 if (!context_disposed_use_deprecated_heuristic_) return; |
| 399 context_disposed_deprecated_pending_ = true; |
| 389 } | 400 } |
| 390 | 401 |
| 391 | 402 |
| 392 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { | 403 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
| 393 // The VM is in the GC state until exiting this function. | 404 // The VM is in the GC state until exiting this function. |
| 394 VMState state(GC); | 405 VMState state(GC); |
| 395 | 406 |
| 396 #ifdef DEBUG | 407 #ifdef DEBUG |
| 397 // Reset the allocation timeout to the GC interval, but make sure to | 408 // Reset the allocation timeout to the GC interval, but make sure to |
| 398 // allow at least a few allocations after a collection. The reason | 409 // allow at least a few allocations after a collection. The reason |
| (...skipping 214 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 613 | 624 |
| 614 MarkCompactEpilogue(is_compacting); | 625 MarkCompactEpilogue(is_compacting); |
| 615 | 626 |
| 616 LOG(ResourceEvent("markcompact", "end")); | 627 LOG(ResourceEvent("markcompact", "end")); |
| 617 | 628 |
| 618 gc_state_ = NOT_IN_GC; | 629 gc_state_ = NOT_IN_GC; |
| 619 | 630 |
| 620 Shrink(); | 631 Shrink(); |
| 621 | 632 |
| 622 Counters::objs_since_last_full.Set(0); | 633 Counters::objs_since_last_full.Set(0); |
| 623 context_disposed_pending_ = false; | 634 |
| 635 contexts_disposed_ = 0; |
| 636 context_disposed_deprecated_pending_ = false; |
| 624 } | 637 } |
| 625 | 638 |
| 626 | 639 |
| 627 void Heap::MarkCompactPrologue(bool is_compacting) { | 640 void Heap::MarkCompactPrologue(bool is_compacting) { |
| 628 // At any old GC clear the keyed lookup cache to enable collection of unused | 641 // At any old GC clear the keyed lookup cache to enable collection of unused |
| 629 // maps. | 642 // maps. |
| 630 KeyedLookupCache::Clear(); | 643 KeyedLookupCache::Clear(); |
| 631 ContextSlotCache::Clear(); | 644 ContextSlotCache::Clear(); |
| 632 DescriptorLookupCache::Clear(); | 645 DescriptorLookupCache::Clear(); |
| 633 | 646 |
| (...skipping 2431 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3065 } | 3078 } |
| 3066 | 3079 |
| 3067 | 3080 |
| 3068 bool Heap::IdleNotification() { | 3081 bool Heap::IdleNotification() { |
| 3069 static const int kIdlesBeforeScavenge = 4; | 3082 static const int kIdlesBeforeScavenge = 4; |
| 3070 static const int kIdlesBeforeMarkSweep = 7; | 3083 static const int kIdlesBeforeMarkSweep = 7; |
| 3071 static const int kIdlesBeforeMarkCompact = 8; | 3084 static const int kIdlesBeforeMarkCompact = 8; |
| 3072 static int number_idle_notifications = 0; | 3085 static int number_idle_notifications = 0; |
| 3073 static int last_gc_count = gc_count_; | 3086 static int last_gc_count = gc_count_; |
| 3074 | 3087 |
| 3088 if (!FLAG_expose_gc && (contexts_disposed_ > 0)) { |
| 3089 HistogramTimerScope scope(&Counters::gc_context); |
| 3090 CollectAllGarbage(false); |
| 3091 ASSERT(contexts_disposed_ == 0); |
| 3092 return false; |
| 3093 } |
| 3094 |
| 3075 bool finished = false; | 3095 bool finished = false; |
| 3076 | 3096 |
| 3077 if (last_gc_count == gc_count_) { | 3097 if (last_gc_count == gc_count_) { |
| 3078 number_idle_notifications++; | 3098 number_idle_notifications++; |
| 3079 } else { | 3099 } else { |
| 3080 number_idle_notifications = 0; | 3100 number_idle_notifications = 0; |
| 3081 last_gc_count = gc_count_; | 3101 last_gc_count = gc_count_; |
| 3082 } | 3102 } |
| 3083 | 3103 |
| 3084 if (number_idle_notifications == kIdlesBeforeScavenge) { | 3104 if (number_idle_notifications == kIdlesBeforeScavenge) { |
| (...skipping 1134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4219 void ExternalStringTable::TearDown() { | 4239 void ExternalStringTable::TearDown() { |
| 4220 new_space_strings_.Free(); | 4240 new_space_strings_.Free(); |
| 4221 old_space_strings_.Free(); | 4241 old_space_strings_.Free(); |
| 4222 } | 4242 } |
| 4223 | 4243 |
| 4224 | 4244 |
| 4225 List<Object*> ExternalStringTable::new_space_strings_; | 4245 List<Object*> ExternalStringTable::new_space_strings_; |
| 4226 List<Object*> ExternalStringTable::old_space_strings_; | 4246 List<Object*> ExternalStringTable::old_space_strings_; |
| 4227 | 4247 |
| 4228 } } // namespace v8::internal | 4248 } } // namespace v8::internal |
| OLD | NEW |