OLD | NEW |
1 // Copyright 2009 the V8 project authors. All rights reserved. | 1 // Copyright 2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
108 int Heap::survived_since_last_expansion_ = 0; | 108 int Heap::survived_since_last_expansion_ = 0; |
109 int Heap::external_allocation_limit_ = 0; | 109 int Heap::external_allocation_limit_ = 0; |
110 | 110 |
111 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; | 111 Heap::HeapState Heap::gc_state_ = NOT_IN_GC; |
112 | 112 |
113 int Heap::mc_count_ = 0; | 113 int Heap::mc_count_ = 0; |
114 int Heap::gc_count_ = 0; | 114 int Heap::gc_count_ = 0; |
115 | 115 |
116 int Heap::always_allocate_scope_depth_ = 0; | 116 int Heap::always_allocate_scope_depth_ = 0; |
117 int Heap::linear_allocation_scope_depth_ = 0; | 117 int Heap::linear_allocation_scope_depth_ = 0; |
118 bool Heap::context_disposed_pending_ = false; | 118 |
| 119 int Heap::contexts_disposed_ = 0; |
| 120 bool Heap::context_disposed_use_deprecated_heuristic_ = true; |
| 121 bool Heap::context_disposed_deprecated_pending_ = false; |
119 | 122 |
120 #ifdef DEBUG | 123 #ifdef DEBUG |
121 bool Heap::allocation_allowed_ = true; | 124 bool Heap::allocation_allowed_ = true; |
122 | 125 |
123 int Heap::allocation_timeout_ = 0; | 126 int Heap::allocation_timeout_ = 0; |
124 bool Heap::disallow_allocation_failure_ = false; | 127 bool Heap::disallow_allocation_failure_ = false; |
125 #endif // DEBUG | 128 #endif // DEBUG |
126 | 129 |
127 | 130 |
128 int Heap::Capacity() { | 131 int Heap::Capacity() { |
(...skipping 235 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
364 void Heap::CollectAllGarbage(bool force_compaction) { | 367 void Heap::CollectAllGarbage(bool force_compaction) { |
365 // Since we are ignoring the return value, the exact choice of space does | 368 // Since we are ignoring the return value, the exact choice of space does |
366 // not matter, so long as we do not specify NEW_SPACE, which would not | 369 // not matter, so long as we do not specify NEW_SPACE, which would not |
367 // cause a full GC. | 370 // cause a full GC. |
368 MarkCompactCollector::SetForceCompaction(force_compaction); | 371 MarkCompactCollector::SetForceCompaction(force_compaction); |
369 CollectGarbage(0, OLD_POINTER_SPACE); | 372 CollectGarbage(0, OLD_POINTER_SPACE); |
370 MarkCompactCollector::SetForceCompaction(false); | 373 MarkCompactCollector::SetForceCompaction(false); |
371 } | 374 } |
372 | 375 |
373 | 376 |
374 void Heap::CollectAllGarbageIfContextDisposed(bool notified) { | 377 void Heap::CollectAllGarbageIfContextDisposedDeprecated() { |
375 // If the request has ever been the result of an explicit | 378 if (!context_disposed_use_deprecated_heuristic_) return; |
376 // notification, we ignore non-notified requests. This is a | |
377 // temporary solution to let the two ways of achieving GC at | |
378 // context disposal time co-exist. | |
379 static bool ever_notified = false; | |
380 if (notified) ever_notified = true; | |
381 if (ever_notified && !notified) return; | |
382 | |
383 // If the garbage collector interface is exposed through the global | 379 // If the garbage collector interface is exposed through the global |
384 // gc() function, we avoid being clever about forcing GCs when | 380 // gc() function, we avoid being clever about forcing GCs when |
385 // contexts are disposed and leave it to the embedder to make | 381 // contexts are disposed and leave it to the embedder to make |
386 // informed decisions about when to force a collection. | 382 // informed decisions about when to force a collection. |
387 if (!FLAG_expose_gc && (notified || context_disposed_pending_)) { | 383 if (!FLAG_expose_gc && context_disposed_deprecated_pending_) { |
388 HistogramTimerScope scope(&Counters::gc_context); | 384 HistogramTimerScope scope(&Counters::gc_context); |
389 CollectAllGarbage(false); | 385 CollectAllGarbage(false); |
390 } | 386 } |
391 context_disposed_pending_ = false; | 387 context_disposed_deprecated_pending_ = false; |
392 } | 388 } |
393 | 389 |
394 | 390 |
395 void Heap::NotifyContextDisposed() { | 391 void Heap::NotifyContextDisposed() { |
396 context_disposed_pending_ = true; | 392 context_disposed_use_deprecated_heuristic_ = false; |
| 393 contexts_disposed_++; |
| 394 } |
| 395 |
| 396 |
| 397 void Heap::NotifyContextDisposedDeprecated() { |
| 398 if (!context_disposed_use_deprecated_heuristic_) return; |
| 399 context_disposed_deprecated_pending_ = true; |
397 } | 400 } |
398 | 401 |
399 | 402 |
400 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { | 403 bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
401 // The VM is in the GC state until exiting this function. | 404 // The VM is in the GC state until exiting this function. |
402 VMState state(GC); | 405 VMState state(GC); |
403 | 406 |
404 #ifdef DEBUG | 407 #ifdef DEBUG |
405 // Reset the allocation timeout to the GC interval, but make sure to | 408 // Reset the allocation timeout to the GC interval, but make sure to |
406 // allow at least a few allocations after a collection. The reason | 409 // allow at least a few allocations after a collection. The reason |
(...skipping 225 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
632 | 635 |
633 MarkCompactEpilogue(is_compacting); | 636 MarkCompactEpilogue(is_compacting); |
634 | 637 |
635 LOG(ResourceEvent("markcompact", "end")); | 638 LOG(ResourceEvent("markcompact", "end")); |
636 | 639 |
637 gc_state_ = NOT_IN_GC; | 640 gc_state_ = NOT_IN_GC; |
638 | 641 |
639 Shrink(); | 642 Shrink(); |
640 | 643 |
641 Counters::objs_since_last_full.Set(0); | 644 Counters::objs_since_last_full.Set(0); |
642 context_disposed_pending_ = false; | 645 |
| 646 contexts_disposed_ = 0; |
| 647 context_disposed_deprecated_pending_ = false; |
643 } | 648 } |
644 | 649 |
645 | 650 |
646 void Heap::MarkCompactPrologue(bool is_compacting) { | 651 void Heap::MarkCompactPrologue(bool is_compacting) { |
647 // At any old GC clear the keyed lookup cache to enable collection of unused | 652 // At any old GC clear the keyed lookup cache to enable collection of unused |
648 // maps. | 653 // maps. |
649 KeyedLookupCache::Clear(); | 654 KeyedLookupCache::Clear(); |
650 ContextSlotCache::Clear(); | 655 ContextSlotCache::Clear(); |
651 DescriptorLookupCache::Clear(); | 656 DescriptorLookupCache::Clear(); |
652 | 657 |
(...skipping 2428 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3081 } | 3086 } |
3082 | 3087 |
3083 | 3088 |
3084 bool Heap::IdleNotification() { | 3089 bool Heap::IdleNotification() { |
3085 static const int kIdlesBeforeScavenge = 4; | 3090 static const int kIdlesBeforeScavenge = 4; |
3086 static const int kIdlesBeforeMarkSweep = 7; | 3091 static const int kIdlesBeforeMarkSweep = 7; |
3087 static const int kIdlesBeforeMarkCompact = 8; | 3092 static const int kIdlesBeforeMarkCompact = 8; |
3088 static int number_idle_notifications = 0; | 3093 static int number_idle_notifications = 0; |
3089 static int last_gc_count = gc_count_; | 3094 static int last_gc_count = gc_count_; |
3090 | 3095 |
| 3096 if (!FLAG_expose_gc && (contexts_disposed_ > 0)) { |
| 3097 HistogramTimerScope scope(&Counters::gc_context); |
| 3098 CollectAllGarbage(false); |
| 3099 ASSERT(contexts_disposed_ == 0); |
| 3100 return false; |
| 3101 } |
| 3102 |
3091 bool finished = false; | 3103 bool finished = false; |
3092 | 3104 |
3093 if (last_gc_count == gc_count_) { | 3105 if (last_gc_count == gc_count_) { |
3094 number_idle_notifications++; | 3106 number_idle_notifications++; |
3095 } else { | 3107 } else { |
3096 number_idle_notifications = 0; | 3108 number_idle_notifications = 0; |
3097 last_gc_count = gc_count_; | 3109 last_gc_count = gc_count_; |
3098 } | 3110 } |
3099 | 3111 |
3100 if (number_idle_notifications == kIdlesBeforeScavenge) { | 3112 if (number_idle_notifications == kIdlesBeforeScavenge) { |
(...skipping 1137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4238 void ExternalStringTable::TearDown() { | 4250 void ExternalStringTable::TearDown() { |
4239 new_space_strings_.Free(); | 4251 new_space_strings_.Free(); |
4240 old_space_strings_.Free(); | 4252 old_space_strings_.Free(); |
4241 } | 4253 } |
4242 | 4254 |
4243 | 4255 |
4244 List<Object*> ExternalStringTable::new_space_strings_; | 4256 List<Object*> ExternalStringTable::new_space_strings_; |
4245 List<Object*> ExternalStringTable::old_space_strings_; | 4257 List<Object*> ExternalStringTable::old_space_strings_; |
4246 | 4258 |
4247 } } // namespace v8::internal | 4259 } } // namespace v8::internal |
OLD | NEW |