OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_HEAP_HEAP_H_ | 5 #ifndef V8_HEAP_HEAP_H_ |
6 #define V8_HEAP_HEAP_H_ | 6 #define V8_HEAP_HEAP_H_ |
7 | 7 |
8 #include <cmath> | 8 #include <cmath> |
9 | 9 |
10 #include "src/allocation.h" | 10 #include "src/allocation.h" |
(...skipping 736 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
747 AllocationSpace space, const char* gc_reason = NULL, | 747 AllocationSpace space, const char* gc_reason = NULL, |
748 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 748 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
749 | 749 |
750 static const int kNoGCFlags = 0; | 750 static const int kNoGCFlags = 0; |
751 static const int kReduceMemoryFootprintMask = 1; | 751 static const int kReduceMemoryFootprintMask = 1; |
752 static const int kAbortIncrementalMarkingMask = 2; | 752 static const int kAbortIncrementalMarkingMask = 2; |
753 | 753 |
754 // Making the heap iterable requires us to abort incremental marking. | 754 // Making the heap iterable requires us to abort incremental marking. |
755 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; | 755 static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask; |
756 | 756 |
757 // Invoked when GC was requested via the stack guard. | |
758 void HandleGCRequest(); | |
759 | |
760 void OverApproximateWeakClosure(const char* gc_reason); | |
Hannes Payer (out of office)
2015/02/03 17:40:11
Please add a comment here how the over approximati
jochen (gone - plz use gerrit)
2015/02/09 16:03:43
done
| |
761 | |
757 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is | 762 // Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is |
758 // non-zero, then the slower precise sweeper is used, which leaves the heap | 763 // non-zero, then the slower precise sweeper is used, which leaves the heap |
759 // in a state where we can iterate over the heap visiting all objects. | 764 // in a state where we can iterate over the heap visiting all objects. |
760 void CollectAllGarbage( | 765 void CollectAllGarbage( |
761 int flags, const char* gc_reason = NULL, | 766 int flags, const char* gc_reason = NULL, |
762 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); | 767 const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags); |
763 | 768 |
764 // Last hope GC, should try to squeeze as much as possible. | 769 // Last hope GC, should try to squeeze as much as possible. |
765 void CollectAllAvailableGarbage(const char* gc_reason = NULL); | 770 void CollectAllAvailableGarbage(const char* gc_reason = NULL); |
766 | 771 |
(...skipping 1817 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2584 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. | 2589 DisallowHeapAllocation no_allocation; // i.e. no gc allowed. |
2585 | 2590 |
2586 private: | 2591 private: |
2587 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); | 2592 DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer); |
2588 }; | 2593 }; |
2589 #endif // DEBUG | 2594 #endif // DEBUG |
2590 } | 2595 } |
2591 } // namespace v8::internal | 2596 } // namespace v8::internal |
2592 | 2597 |
2593 #endif // V8_HEAP_HEAP_H_ | 2598 #endif // V8_HEAP_HEAP_H_ |
OLD | NEW |