OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COUNTERS_H_ | 5 #ifndef V8_COUNTERS_H_ |
6 #define V8_COUNTERS_H_ | 6 #define V8_COUNTERS_H_ |
7 | 7 |
8 #include "include/v8.h" | 8 #include "include/v8.h" |
9 #include "src/allocation.h" | 9 #include "src/allocation.h" |
10 #include "src/base/platform/elapsed-timer.h" | 10 #include "src/base/platform/elapsed-timer.h" |
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
803 std::unique_ptr<char[]> buffer_c_str_; | 803 std::unique_ptr<char[]> buffer_c_str_; |
804 size_t len_ = 0; | 804 size_t len_ = 0; |
805 // Counter to track recursive time events. | 805 // Counter to track recursive time events. |
806 RuntimeCallTimer* current_timer_ = NULL; | 806 RuntimeCallTimer* current_timer_ = NULL; |
807 // Used to track nested tracing scopes. | 807 // Used to track nested tracing scopes. |
808 bool in_use_; | 808 bool in_use_; |
809 }; | 809 }; |
810 | 810 |
811 #define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \ | 811 #define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \ |
812 do { \ | 812 do { \ |
813 if (FLAG_runtime_call_stats) { \ | 813 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { \ |
| 814 RuntimeCallStats::CorrectCurrentCounterId( \ |
| 815 isolate->counters()->tracing_runtime_call_stats(), \ |
| 816 &RuntimeCallStats::counter_name); \ |
| 817 } else if (FLAG_runtime_call_stats) { \ |
814 RuntimeCallStats::CorrectCurrentCounterId( \ | 818 RuntimeCallStats::CorrectCurrentCounterId( \ |
815 isolate->counters()->runtime_call_stats(), \ | 819 isolate->counters()->runtime_call_stats(), \ |
816 &RuntimeCallStats::counter_name); \ | 820 &RuntimeCallStats::counter_name); \ |
817 } \ | 821 } \ |
818 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { \ | |
819 RuntimeCallStats::CorrectCurrentCounterId( \ | |
820 isolate->counters()->tracing_runtime_call_stats(), \ | |
821 &RuntimeCallStats::counter_name); \ | |
822 } \ | |
823 } while (false) | 822 } while (false) |
824 | 823 |
825 #define TRACE_HANDLER_STATS(isolate, counter_name) \ | 824 #define TRACE_HANDLER_STATS(isolate, counter_name) \ |
826 TRACE_RUNTIME_CALL_STATS(isolate, Handler_##counter_name) | 825 TRACE_RUNTIME_CALL_STATS(isolate, Handler_##counter_name) |
827 | 826 |
828 #define HISTOGRAM_RANGE_LIST(HR) \ | 827 #define HISTOGRAM_RANGE_LIST(HR) \ |
829 /* Generic range histograms */ \ | 828 /* Generic range histograms */ \ |
830 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \ | 829 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \ |
831 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \ | 830 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \ |
832 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \ | 831 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \ |
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1242 explicit Counters(Isolate* isolate); | 1241 explicit Counters(Isolate* isolate); |
1243 | 1242 |
1244 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); | 1243 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); |
1245 }; | 1244 }; |
1246 | 1245 |
1247 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the | 1246 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the |
1248 // the time of C++ scope. | 1247 // the time of C++ scope. |
1249 class RuntimeCallTimerScope { | 1248 class RuntimeCallTimerScope { |
1250 public: | 1249 public: |
1251 inline RuntimeCallTimerScope(Isolate* isolate, | 1250 inline RuntimeCallTimerScope(Isolate* isolate, |
1252 RuntimeCallStats::CounterId counter_id) { | 1251 RuntimeCallStats::CounterId counter_id); |
1253 if (V8_UNLIKELY(FLAG_runtime_call_stats)) { | |
1254 isolate_ = isolate; | |
1255 RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), | |
1256 &timer_, counter_id); | |
1257 } | |
1258 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { | |
1259 isolate_for_tracing_ = isolate; | |
1260 RuntimeCallStats::Enter( | |
1261 isolate_for_tracing_->counters()->tracing_runtime_call_stats(), | |
1262 &trace_event_timer_, counter_id); | |
1263 } | |
1264 } | |
1265 // This constructor is here just to avoid calling GetIsolate() when the | 1252 // This constructor is here just to avoid calling GetIsolate() when the |
1266 // stats are disabled and the isolate is not directly available. | 1253 // stats are disabled and the isolate is not directly available. |
1267 inline RuntimeCallTimerScope(HeapObject* heap_object, | 1254 inline RuntimeCallTimerScope(HeapObject* heap_object, |
1268 RuntimeCallStats::CounterId counter_id); | 1255 RuntimeCallStats::CounterId counter_id); |
1269 | 1256 |
1270 inline ~RuntimeCallTimerScope() { | 1257 inline ~RuntimeCallTimerScope() { |
1271 if (V8_UNLIKELY(FLAG_runtime_call_stats)) { | 1258 if (V8_UNLIKELY(isolate_ != nullptr)) { |
1272 RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(), | 1259 if (is_tracing_) { |
1273 &timer_); | 1260 RuntimeCallStats::Leave( |
1274 } | 1261 isolate_->counters()->tracing_runtime_call_stats(), &timer_); |
1275 if (V8_UNLIKELY(isolate_for_tracing_ != nullptr)) { | 1262 } else { |
1276 RuntimeCallStats::Leave( | 1263 RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(), |
1277 isolate_for_tracing_->counters()->tracing_runtime_call_stats(), | 1264 &timer_); |
1278 &trace_event_timer_); | 1265 } |
1279 isolate_for_tracing_ = nullptr; | |
1280 } | 1266 } |
1281 } | 1267 } |
1282 | 1268 |
1283 private: | 1269 private: |
1284 Isolate* isolate_; | 1270 Isolate* isolate_ = nullptr; |
1285 // TODO(lpy): --runtime-call-stats and tracing should be mutually exclusive | 1271 bool is_tracing_ = false; |
1286 // with tracing taking precendence. We need to add checks, and use a single | |
1287 // isolate reference and a timer for both. | |
1288 Isolate* isolate_for_tracing_ = nullptr; | |
1289 RuntimeCallTimer timer_; | 1272 RuntimeCallTimer timer_; |
1290 RuntimeCallTimer trace_event_timer_; | |
1291 }; | 1273 }; |
1292 | 1274 |
1293 } // namespace internal | 1275 } // namespace internal |
1294 } // namespace v8 | 1276 } // namespace v8 |
1295 | 1277 |
1296 #endif // V8_COUNTERS_H_ | 1278 #endif // V8_COUNTERS_H_ |
OLD | NEW |