OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_COUNTERS_H_ | 5 #ifndef V8_COUNTERS_H_ |
6 #define V8_COUNTERS_H_ | 6 #define V8_COUNTERS_H_ |
7 | 7 |
8 #include "include/v8.h" | 8 #include "include/v8.h" |
9 #include "src/allocation.h" | 9 #include "src/allocation.h" |
10 #include "src/base/platform/elapsed-timer.h" | 10 #include "src/base/platform/elapsed-timer.h" |
(...skipping 792 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
803 std::unique_ptr<char[]> buffer_c_str_; | 803 std::unique_ptr<char[]> buffer_c_str_; |
804 size_t len_ = 0; | 804 size_t len_ = 0; |
805 // Counter to track recursive time events. | 805 // Counter to track recursive time events. |
806 RuntimeCallTimer* current_timer_ = NULL; | 806 RuntimeCallTimer* current_timer_ = NULL; |
807 // Used to track nested tracing scopes. | 807 // Used to track nested tracing scopes. |
808 bool in_use_; | 808 bool in_use_; |
809 }; | 809 }; |
810 | 810 |
811 #define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \ | 811 #define TRACE_RUNTIME_CALL_STATS(isolate, counter_name) \ |
812 do { \ | 812 do { \ |
813 if (FLAG_runtime_call_stats) { \ | 813 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { \ |
814 RuntimeCallStats::CorrectCurrentCounterId( \ | |
815 isolate->counters()->tracing_runtime_call_stats(), \ | |
816 &RuntimeCallStats::counter_name); \ | |
817 } else if (FLAG_runtime_call_stats) { \ | |
814 RuntimeCallStats::CorrectCurrentCounterId( \ | 818 RuntimeCallStats::CorrectCurrentCounterId( \ |
815 isolate->counters()->runtime_call_stats(), \ | 819 isolate->counters()->runtime_call_stats(), \ |
816 &RuntimeCallStats::counter_name); \ | 820 &RuntimeCallStats::counter_name); \ |
817 } \ | 821 } \ |
818 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { \ | |
819 RuntimeCallStats::CorrectCurrentCounterId( \ | |
820 isolate->counters()->tracing_runtime_call_stats(), \ | |
821 &RuntimeCallStats::counter_name); \ | |
822 } \ | |
823 } while (false) | 822 } while (false) |
824 | 823 |
825 #define TRACE_HANDLER_STATS(isolate, counter_name) \ | 824 #define TRACE_HANDLER_STATS(isolate, counter_name) \ |
826 TRACE_RUNTIME_CALL_STATS(isolate, Handler_##counter_name) | 825 TRACE_RUNTIME_CALL_STATS(isolate, Handler_##counter_name) |
827 | 826 |
828 #define HISTOGRAM_RANGE_LIST(HR) \ | 827 #define HISTOGRAM_RANGE_LIST(HR) \ |
829 /* Generic range histograms */ \ | 828 /* Generic range histograms */ \ |
830 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \ | 829 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \ |
831 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \ | 830 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \ |
832 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \ | 831 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \ |
(...skipping 407 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1240 | 1239 |
1241 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); | 1240 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters); |
1242 }; | 1241 }; |
1243 | 1242 |
1244 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the | 1243 // A RuntimeCallTimerScopes wraps around a RuntimeCallTimer to measure the |
1245 // the time of C++ scope. | 1244 // the time of C++ scope. |
1246 class RuntimeCallTimerScope { | 1245 class RuntimeCallTimerScope { |
1247 public: | 1246 public: |
1248 inline RuntimeCallTimerScope(Isolate* isolate, | 1247 inline RuntimeCallTimerScope(Isolate* isolate, |
1249 RuntimeCallStats::CounterId counter_id) { | 1248 RuntimeCallStats::CounterId counter_id) { |
1250 if (V8_UNLIKELY(FLAG_runtime_call_stats)) { | 1249 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { |
1250 isolate_ = isolate; | |
1251 is_tracing_ = true; | |
1252 RuntimeCallStats::Enter( | |
1253 isolate_->counters()->tracing_runtime_call_stats(), &timer_, | |
1254 counter_id); | |
1255 } else if (V8_UNLIKELY(FLAG_runtime_call_stats)) { | |
1251 isolate_ = isolate; | 1256 isolate_ = isolate; |
1252 RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), | 1257 RuntimeCallStats::Enter(isolate_->counters()->runtime_call_stats(), |
1253 &timer_, counter_id); | 1258 &timer_, counter_id); |
1254 } | 1259 } |
1255 if (V8_UNLIKELY(TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED())) { | |
1256 isolate_for_tracing_ = isolate; | |
1257 RuntimeCallStats::Enter( | |
1258 isolate_for_tracing_->counters()->tracing_runtime_call_stats(), | |
1259 &trace_event_timer_, counter_id); | |
1260 } | |
1261 } | 1260 } |
1262 // This constructor is here just to avoid calling GetIsolate() when the | 1261 // This constructor is here just to avoid calling GetIsolate() when the |
1263 // stats are disabled and the isolate is not directly available. | 1262 // stats are disabled and the isolate is not directly available. |
1264 inline RuntimeCallTimerScope(HeapObject* heap_object, | 1263 inline RuntimeCallTimerScope(HeapObject* heap_object, |
1265 RuntimeCallStats::CounterId counter_id); | 1264 RuntimeCallStats::CounterId counter_id); |
Camillo Bruni
2016/09/07 02:00:29
nit: could you declare both constructors in the sa
lpy
2016/09/07 16:48:09
Done.
| |
1266 | 1265 |
1267 inline ~RuntimeCallTimerScope() { | 1266 inline ~RuntimeCallTimerScope() { |
1268 if (V8_UNLIKELY(FLAG_runtime_call_stats)) { | 1267 if (V8_UNLIKELY(isolate_ != nullptr)) { |
1269 RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(), | 1268 if (is_tracing_) { |
1270 &timer_); | 1269 RuntimeCallStats::Leave( |
1271 } | 1270 isolate_->counters()->tracing_runtime_call_stats(), &timer_); |
1272 if (V8_UNLIKELY(isolate_for_tracing_ != nullptr)) { | 1271 } else { |
1273 RuntimeCallStats::Leave( | 1272 RuntimeCallStats::Leave(isolate_->counters()->runtime_call_stats(), |
1274 isolate_for_tracing_->counters()->tracing_runtime_call_stats(), | 1273 &timer_); |
1275 &trace_event_timer_); | 1274 } |
1276 isolate_for_tracing_ = nullptr; | |
1277 } | 1275 } |
1278 } | 1276 } |
1279 | 1277 |
1280 private: | 1278 private: |
1281 Isolate* isolate_; | 1279 Isolate* isolate_ = nullptr; |
1282 // TODO(lpy): --runtime-call-stats and tracing should be mutually exclusive | 1280 bool is_tracing_ = false; |
1283 // with tracing taking precendence. We need to add checks, and use a single | |
1284 // isolate reference and a timer for both. | |
1285 Isolate* isolate_for_tracing_ = nullptr; | |
1286 RuntimeCallTimer timer_; | 1281 RuntimeCallTimer timer_; |
1287 RuntimeCallTimer trace_event_timer_; | |
1288 }; | 1282 }; |
1289 | 1283 |
1290 } // namespace internal | 1284 } // namespace internal |
1291 } // namespace v8 | 1285 } // namespace v8 |
1292 | 1286 |
1293 #endif // V8_COUNTERS_H_ | 1287 #endif // V8_COUNTERS_H_ |
OLD | NEW |