| Index: src/heap/heap.cc
|
| diff --git a/src/heap/heap.cc b/src/heap/heap.cc
|
| index 54a8915788237436b72762d6cb303ed9f189725b..2ba75d0f27c6b40eeaec90ca3c2e25c2f0a95cb1 100644
|
| --- a/src/heap/heap.cc
|
| +++ b/src/heap/heap.cc
|
| @@ -395,6 +395,20 @@ void Heap::ReportStatisticsAfterGC() {
|
| #else
|
| if (FLAG_log_gc) new_space_.ReportStatistics();
|
| #endif // DEBUG
|
| + for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
|
| + ++i) {
|
| + int count = deferred_counters_[i];
|
| + deferred_counters_[i] = 0;
|
| + while (count > 0) {
|
| + count--;
|
| + isolate()->CountUsage(static_cast<v8::Isolate::UseCounterFeature>(i));
|
| + }
|
| + }
|
| +}
|
| +
|
| +
|
| +void Heap::IncrementDeferredCount(v8::Isolate::UseCounterFeature feature) {
|
| + deferred_counters_[feature]++;
|
| }
|
|
|
|
|
| @@ -925,6 +939,11 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason,
|
| tracer()->Stop(collector);
|
| }
|
|
|
| + if (collector == MARK_COMPACTOR &&
|
| + (gc_callback_flags & kGCCallbackFlagForced) != 0) {
|
| + isolate()->CountUsage(v8::Isolate::kForcedGC);
|
| + }
|
| +
|
| // Start incremental marking for the next cycle. The heap snapshot
|
| // generator needs incremental marking to stay off after it aborted.
|
| if (!mark_compact_collector()->abort_incremental_marking() &&
|
| @@ -5422,6 +5441,12 @@ bool Heap::SetUp() {
|
| }
|
| }
|
|
|
| + for (int i = 0; i < static_cast<int>(v8::Isolate::kUseCounterFeatureCount);
|
| + i++) {
|
| + deferred_counters_[i] = 0;
|
| + }
|
| +
|
| +
|
| LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
|
| LOG(isolate_, IntPtrTEvent("heap-available", Available()));
|
|
|
|
|