Index: tools/perf/benchmarks/metric_dependencies.py |
diff --git a/tools/perf/benchmarks/metric_dependencies.py b/tools/perf/benchmarks/metric_dependencies.py |
new file mode 100644 |
index 0000000000000000000000000000000000000000..ba667deba8d1d3dec1cae32d6c3615b4bda82b92 |
--- /dev/null |
+++ b/tools/perf/benchmarks/metric_dependencies.py |
@@ -0,0 +1,49 @@ |
+# Copyright 2017 The Chromium Authors. All rights reserved. |
+# Use of this source code is governed by a BSD-style license that can be |
+# found in the LICENSE file. |
+ |
+_PREFIX_FOR_EXCLUDED_CATEGORY = '-' |
+_PREFIX_FOR_DISABLED_BY_DEFAULT_CATEGORY = 'disabled-by-default-' |
+ |
+# If you add a new metric, then register its categories in this table |
+# and use AugmentOptionsForMetrics in the benchmark configuration. |
+# TODO(ulan): Add all existing metics to this table. |
+_CATEGORIES_FOR_METRICS = { |
nednguyen
2017/03/02 13:36:00
ulan: instead of this, I think we should make a al
|
+ 'expectedQueueingTimeMetric': [ |
+ 'blink.console', # For ranges in cache_temperature.MarkTelemetryInternal. |
+ 'blink.user_timing', # For core navigation events. |
+ 'loading', # For first-meaningful-paint computation. |
+ 'navigation', # For core navigation events. |
+ 'toplevel', # For time-to-interactive computation. |
+ ], |
+ 'loadingMetric': [ |
+ 'blink.console', # For ranges in cache_temperature.MarkTelemetryInternal. |
+ 'blink.user_timing', # For core navigation events. |
+ 'loading', # For first-meaningful-paint computation. |
+ 'navigation', # For core navigation events. |
+ 'toplevel', # For time-to-interactive computation. |
+ ], |
+} |
+ |
+def AugmentOptionsForMetrics(tbm_options, metrics): |
+ """ For each metric in the given metrics list adds its category dependencies |
+ to the tracing config of the given tmb_options. |
+ Args: |
+ tbm_options: An instance of timeline_based_measurement.Options |
+ metrics: A list of timeline based metric names. |
+ """ |
+ category_filter = tbm_options.config.chrome_trace_config.category_filter |
+ for metric in metrics: |
+ assert metric in _CATEGORIES_FOR_METRICS, 'Unknown metric.' |
+ categories = _CATEGORIES_FOR_METRICS[metric] |
+ for category in categories: |
+ if category.startswith(_PREFIX_FOR_EXCLUDED_CATEGORY): |
+ category_filter.AddExcludedCategory(category) |
+ elif category.startswith(_PREFIX_FOR_DISABLED_BY_DEFAULT_CATEGORY): |
+ category_filter.AddDisabledByDefault(category) |
+ else: |
+ category_filter.AddIncludedCategory(category) |
+ |
+def SetMetricDependenciesForTesting(categories_for_metrics): |
+ global _CATEGORIES_FOR_METRICS |
+ _CATEGORIES_FOR_METRICS = categories_for_metrics |