Index: src/counters.h |
diff --git a/src/counters.h b/src/counters.h |
index f778f556be6118a5657f5e785ce0d2c7cacfd374..d2da19c37aca432383d6830fffe010043e19c5a5 100644 |
--- a/src/counters.h |
+++ b/src/counters.h |
@@ -425,130 +425,128 @@ class HistogramTimerScope BASE_EMBEDDED { |
SC(store_buffer_overflows, V8.StoreBufferOverflows) |
-#define STATS_COUNTER_LIST_2(SC) \ |
- /* Number of code stubs. */ \ |
- SC(code_stubs, V8.CodeStubs) \ |
- /* Amount of stub code. */ \ |
- SC(total_stubs_code_size, V8.TotalStubsCodeSize) \ |
- /* Amount of (JS) compiled code. */ \ |
- SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \ |
- SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \ |
- SC(gc_compactor_caused_by_promoted_data, \ |
- V8.GCCompactorCausedByPromotedData) \ |
- SC(gc_compactor_caused_by_oldspace_exhaustion, \ |
- V8.GCCompactorCausedByOldspaceExhaustion) \ |
- SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \ |
- SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \ |
- /* How is the generic keyed-load stub used? */ \ |
- SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \ |
- SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \ |
- SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \ |
- SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \ |
- SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \ |
- SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \ |
- /* How is the generic keyed-call stub used? */ \ |
- SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \ |
- SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \ |
- SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \ |
- SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \ |
- SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \ |
- SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \ |
- SC(named_load_global_stub, V8.NamedLoadGlobalStub) \ |
- SC(named_store_global_inline, V8.NamedStoreGlobalInline) \ |
- SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \ |
- SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \ |
- SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \ |
- SC(store_normal_miss, V8.StoreNormalMiss) \ |
- SC(store_normal_hit, V8.StoreNormalHit) \ |
- SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \ |
- SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \ |
- SC(cow_arrays_converted, V8.COWArraysConverted) \ |
- SC(call_miss, V8.CallMiss) \ |
- SC(keyed_call_miss, V8.KeyedCallMiss) \ |
- SC(load_miss, V8.LoadMiss) \ |
- SC(keyed_load_miss, V8.KeyedLoadMiss) \ |
- SC(call_const, V8.CallConst) \ |
- SC(call_const_fast_api, V8.CallConstFastApi) \ |
- SC(call_const_interceptor, V8.CallConstInterceptor) \ |
- SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \ |
- SC(call_global_inline, V8.CallGlobalInline) \ |
- SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \ |
- SC(constructed_objects, V8.ConstructedObjects) \ |
- SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \ |
- SC(negative_lookups, V8.NegativeLookups) \ |
- SC(negative_lookups_miss, V8.NegativeLookupsMiss) \ |
- SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \ |
- SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \ |
- SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \ |
- SC(array_function_runtime, V8.ArrayFunctionRuntime) \ |
- SC(array_function_native, V8.ArrayFunctionNative) \ |
- SC(for_in, V8.ForIn) \ |
- SC(enum_cache_hits, V8.EnumCacheHits) \ |
- SC(enum_cache_misses, V8.EnumCacheMisses) \ |
- SC(zone_segment_bytes, V8.ZoneSegmentBytes) \ |
- SC(fast_new_closure_total, V8.FastNewClosureTotal) \ |
- SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \ |
- SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \ |
- SC(string_add_runtime, V8.StringAddRuntime) \ |
- SC(string_add_native, V8.StringAddNative) \ |
- SC(string_add_runtime_ext_to_ascii, V8.StringAddRuntimeExtToAscii) \ |
- SC(sub_string_runtime, V8.SubStringRuntime) \ |
- SC(sub_string_native, V8.SubStringNative) \ |
- SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \ |
- SC(string_compare_native, V8.StringCompareNative) \ |
- SC(string_compare_runtime, V8.StringCompareRuntime) \ |
- SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \ |
- SC(regexp_entry_native, V8.RegExpEntryNative) \ |
- SC(number_to_string_native, V8.NumberToStringNative) \ |
- SC(number_to_string_runtime, V8.NumberToStringRuntime) \ |
- SC(math_acos, V8.MathAcos) \ |
- SC(math_asin, V8.MathAsin) \ |
- SC(math_atan, V8.MathAtan) \ |
- SC(math_atan2, V8.MathAtan2) \ |
- SC(math_exp, V8.MathExp) \ |
- SC(math_floor, V8.MathFloor) \ |
- SC(math_log, V8.MathLog) \ |
- SC(math_pow, V8.MathPow) \ |
- SC(math_round, V8.MathRound) \ |
- SC(math_sqrt, V8.MathSqrt) \ |
- SC(stack_interrupts, V8.StackInterrupts) \ |
- SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \ |
- SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \ |
- SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \ |
- SC(soft_deopts_requested, V8.SoftDeoptsRequested) \ |
- SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \ |
- SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \ |
- /* Number of write barriers in generated code. */ \ |
- SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \ |
- SC(write_barriers_static, V8.WriteBarriersStatic) \ |
- SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \ |
- SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \ |
- SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \ |
- SC(old_pointer_space_bytes_available, \ |
- V8.MemoryOldPointerSpaceBytesAvailable) \ |
- SC(old_pointer_space_bytes_committed, \ |
- V8.MemoryOldPointerSpaceBytesCommitted) \ |
- SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \ |
- SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \ |
- SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \ |
- SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \ |
- SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \ |
- SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \ |
- SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \ |
- SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \ |
- SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \ |
- SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \ |
- SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \ |
- SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \ |
- SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \ |
- SC(property_cell_space_bytes_available, \ |
- V8.MemoryPropertyCellSpaceBytesAvailable) \ |
- SC(property_cell_space_bytes_committed, \ |
- V8.MemoryPropertyCellSpaceBytesCommitted) \ |
- SC(property_cell_space_bytes_used, \ |
- V8.MemoryPropertyCellSpaceBytesUsed) \ |
- SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \ |
- SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \ |
+#define STATS_COUNTER_LIST_2(SC) \ |
+ /* Number of code stubs. */ \ |
+ SC(code_stubs, V8.CodeStubs) \ |
+ /* Amount of stub code. */ \ |
+ SC(total_stubs_code_size, V8.TotalStubsCodeSize) \ |
+ /* Amount of (JS) compiled code. */ \ |
+ SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \ |
+ SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \ |
+ SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \ |
+ SC(gc_compactor_caused_by_oldspace_exhaustion, \ |
+ V8.GCCompactorCausedByOldspaceExhaustion) \ |
+ SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \ |
+ SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \ |
+ /* How is the generic keyed-load stub used? */ \ |
+ SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \ |
+ SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \ |
+ SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \ |
+ SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \ |
+ SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \ |
+ SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \ |
+ /* How is the generic keyed-call stub used? */ \ |
+ SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \ |
+ SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \ |
+ SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \ |
+ SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \ |
+ SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \ |
+ SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \ |
+ SC(named_load_global_stub, V8.NamedLoadGlobalStub) \ |
+ SC(named_store_global_inline, V8.NamedStoreGlobalInline) \ |
+ SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \ |
+ SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \ |
+ SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \ |
+ SC(store_normal_miss, V8.StoreNormalMiss) \ |
+ SC(store_normal_hit, V8.StoreNormalHit) \ |
+ SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \ |
+ SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \ |
+ SC(cow_arrays_converted, V8.COWArraysConverted) \ |
+ SC(call_miss, V8.CallMiss) \ |
+ SC(keyed_call_miss, V8.KeyedCallMiss) \ |
+ SC(load_miss, V8.LoadMiss) \ |
+ SC(keyed_load_miss, V8.KeyedLoadMiss) \ |
+ SC(call_const, V8.CallConst) \ |
+ SC(call_const_fast_api, V8.CallConstFastApi) \ |
+ SC(call_const_interceptor, V8.CallConstInterceptor) \ |
+ SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \ |
+ SC(call_global_inline, V8.CallGlobalInline) \ |
+ SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \ |
+ SC(constructed_objects, V8.ConstructedObjects) \ |
+ SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \ |
+ SC(negative_lookups, V8.NegativeLookups) \ |
+ SC(negative_lookups_miss, V8.NegativeLookupsMiss) \ |
+ SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \ |
+ SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \ |
+ SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \ |
+ SC(array_function_runtime, V8.ArrayFunctionRuntime) \ |
+ SC(array_function_native, V8.ArrayFunctionNative) \ |
+ SC(for_in, V8.ForIn) \ |
+ SC(enum_cache_hits, V8.EnumCacheHits) \ |
+ SC(enum_cache_misses, V8.EnumCacheMisses) \ |
+ SC(zone_segment_bytes, V8.ZoneSegmentBytes) \ |
+ SC(fast_new_closure_total, V8.FastNewClosureTotal) \ |
+ SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \ |
+ SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \ |
+ SC(string_add_runtime, V8.StringAddRuntime) \ |
+ SC(string_add_native, V8.StringAddNative) \ |
+ SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \ |
+ SC(sub_string_runtime, V8.SubStringRuntime) \ |
+ SC(sub_string_native, V8.SubStringNative) \ |
+ SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \ |
+ SC(string_compare_native, V8.StringCompareNative) \ |
+ SC(string_compare_runtime, V8.StringCompareRuntime) \ |
+ SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \ |
+ SC(regexp_entry_native, V8.RegExpEntryNative) \ |
+ SC(number_to_string_native, V8.NumberToStringNative) \ |
+ SC(number_to_string_runtime, V8.NumberToStringRuntime) \ |
+ SC(math_acos, V8.MathAcos) \ |
+ SC(math_asin, V8.MathAsin) \ |
+ SC(math_atan, V8.MathAtan) \ |
+ SC(math_atan2, V8.MathAtan2) \ |
+ SC(math_exp, V8.MathExp) \ |
+ SC(math_floor, V8.MathFloor) \ |
+ SC(math_log, V8.MathLog) \ |
+ SC(math_pow, V8.MathPow) \ |
+ SC(math_round, V8.MathRound) \ |
+ SC(math_sqrt, V8.MathSqrt) \ |
+ SC(stack_interrupts, V8.StackInterrupts) \ |
+ SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \ |
+ SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \ |
+ SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \ |
+ SC(soft_deopts_requested, V8.SoftDeoptsRequested) \ |
+ SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \ |
+ SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \ |
+ /* Number of write barriers in generated code. */ \ |
+ SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \ |
+ SC(write_barriers_static, V8.WriteBarriersStatic) \ |
+ SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \ |
+ SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \ |
+ SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \ |
+ SC(old_pointer_space_bytes_available, \ |
+ V8.MemoryOldPointerSpaceBytesAvailable) \ |
+ SC(old_pointer_space_bytes_committed, \ |
+ V8.MemoryOldPointerSpaceBytesCommitted) \ |
+ SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \ |
+ SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \ |
+ SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \ |
+ SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \ |
+ SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \ |
+ SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \ |
+ SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \ |
+ SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \ |
+ SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \ |
+ SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \ |
+ SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \ |
+ SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \ |
+ SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \ |
+ SC(property_cell_space_bytes_available, \ |
+ V8.MemoryPropertyCellSpaceBytesAvailable) \ |
+ SC(property_cell_space_bytes_committed, \ |
+ V8.MemoryPropertyCellSpaceBytesCommitted) \ |
+ SC(property_cell_space_bytes_used, V8.MemoryPropertyCellSpaceBytesUsed) \ |
+ SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \ |
+ SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \ |
SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed) |