| Index: src/heap/heap.h
|
| diff --git a/src/heap/heap.h b/src/heap/heap.h
|
| index f3830dd18bdc55bd04423728947b82f7d521085a..3eb3e1b5ea144cc3a0bb3bb9f1cbe60ffc50b024 100644
|
| --- a/src/heap/heap.h
|
| +++ b/src/heap/heap.h
|
| @@ -736,6 +736,9 @@ class Heap {
|
| static const int kReduceMemoryFootprintMask = 1;
|
| static const int kAbortIncrementalMarkingMask = 2;
|
|
|
| + // We do up to 7 GCs when there is a low memory notification.
|
| + static const int kMaxEmergencyGCs = 7;
|
| +
|
| // Making the heap iterable requires us to abort incremental marking.
|
| static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
|
|
|
| @@ -747,7 +750,8 @@ class Heap {
|
| const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
|
|
|
| // Last hope GC, should try to squeeze as much as possible.
|
| - void CollectAllAvailableGarbage(const char* gc_reason = NULL);
|
| + void CollectAllAvailableGarbage(const char* gc_reason = NULL,
|
| + int maxGCs = kMaxEmergencyGCs);
|
|
|
| // Check whether the heap is currently iterable.
|
| bool IsHeapIterable();
|
| @@ -985,6 +989,9 @@ class Heap {
|
|
|
| inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
|
|
|
| + bool flush_eagerly() { return flush_eagerly_; }
|
| + void set_flush_eagerly(bool to) { flush_eagerly_ = to; }
|
| +
|
| #ifdef DEBUG
|
| void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
|
|
|
| @@ -1551,6 +1558,9 @@ class Heap {
|
| int allocation_timeout_;
|
| #endif // DEBUG
|
|
|
| + // For emergency memory-freeing GCs.
|
| + bool flush_eagerly_;
|
| +
|
| // Limit that triggers a global GC on the next (normally caused) GC. This
|
| // is checked when we have already decided to do a GC to help determine
|
| // which collector to invoke, before expanding a paged space in the old
|
| @@ -2129,6 +2139,17 @@ class HeapStats {
|
| };
|
|
|
|
|
| +class FlushEagerly {
|
| + public:
|
| + explicit inline FlushEagerly(Heap* heap);
|
| + inline ~FlushEagerly();
|
| +
|
| + private:
|
| + Heap* heap_;
|
| + bool old_state_;
|
| +};
|
| +
|
| +
|
| class AlwaysAllocateScope {
|
| public:
|
| explicit inline AlwaysAllocateScope(Isolate* isolate);
|
|
|