OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
(...skipping 741 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
752 AllocationSpace space, const char* gc_reason = NULL, | 752 AllocationSpace space, const char* gc_reason = NULL, |
753 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 753 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
754 | 754 |
755 static const int kNoGCFlags = 0; | 755 static const int kNoGCFlags = 0; |
756 static const int kReduceMemoryFootprintMask = 1; | 756 static const int kReduceMemoryFootprintMask = 1; |
757 static const int kAbortIncrementalMarkingMask = 2; | 757 static const int kAbortIncrementalMarkingMask = 2; |
758 | 758 |
759 // Making the heap iterable requires us to abort incremental marking. | 759 // Making the heap iterable requires us to abort incremental marking. |
760 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; | 760 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; |
761 | 761 |
| 762 // Invoked when GC was requested via the stack guard. |
| 763 void HandleGCRequest(); |
| 764 |
| 765 // Attempt to over-approximate the weak closure by marking object groups and |
| 766 // implicit references from global handles, but don't atomically complete |
| 767 // marking. If we continue to mark incrementally, we might have marked |
| 768 // objects that die later. |
| 769 void OverApproximateWeakClosure(const char* gc_reason); |
| 770 |
762 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is | 771 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is |
763 // non-zero, then the slower precise sweeper is used, which leaves the heap | 772 // non-zero, then the slower precise sweeper is used, which leaves the heap |
764 // in a state where we can iterate over the heap visiting all objects. | 773 // in a state where we can iterate over the heap visiting all objects. |
765 void CollectAllGarbage( | 774 void CollectAllGarbage( |
766 int flags, const char* gc_reason = NULL, | 775 int flags, const char* gc_reason = NULL, |
767 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 776 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
768 | 777 |
769 // Last hope GC, should try to squeeze as much as possible. | 778 // Last hope GC, should try to squeeze as much as possible. |
770 void CollectAllAvailableGarbage(const char* gc_reason = NULL); | 779 void CollectAllAvailableGarbage(const char* gc_reason = NULL); |
771 | 780 |
(...skipping 1813 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2585 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2594 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
2586 | 2595 |
2587 private: | 2596 private: |
2588 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2597 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
2589 }; | 2598 }; |
2590 #endif // DEBUG | 2599 #endif // DEBUG |
2591 } | 2600 } |
2592 } // namespace v8::internal | 2601 } // namespace v8::internal |
2593 | 2602 |
2594 #endif // V8_HEAP_HEAP_H_ | 2603 #endif // V8_HEAP_HEAP_H_ |
OLD | NEW |