Chromium Code Reviews| Index: tools/perf/benchmarks/blink_perf.py |
| diff --git a/tools/perf/benchmarks/blink_perf.py b/tools/perf/benchmarks/blink_perf.py |
| index fc2b6f82fe36699ff6153893456c90f8d2870711..97b8f471f36f1a99fdb6486ef04bcab68181f6d0 100644 |
| --- a/tools/perf/benchmarks/blink_perf.py |
| +++ b/tools/perf/benchmarks/blink_perf.py |
| @@ -12,8 +12,14 @@ from telemetry import page as page_module |
| from telemetry.page import legacy_page_test |
| from telemetry.page import shared_page_state |
| from telemetry import story |
| +from telemetry.timeline import bounds |
| +from telemetry.timeline import model as model_module |
| +from telemetry.timeline import tracing_config |
| + |
| from telemetry.value import list_of_scalar_values |
| from telemetry.value import scalar |
| +from telemetry.value import trace |
| + |
| from benchmarks import pywebsocket_server |
| from measurements import timeline_controller |
| @@ -98,8 +104,83 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
| if 'content-shell' in options.browser_type: |
| options.AppendExtraBrowserArgs('--expose-internals-for-testing') |
| + def _ContinueTestRunWithTracing(self, tab): |
| + tracing_categories = tab.EvaluateJavaScript( |
| + 'testRunner.tracingCategories') |
| + config = tracing_config.TracingConfig() |
| + config.enable_chrome_trace = True |
| + config.chrome_trace_config.category_filter.AddFilterString( |
| + 'blink.console') # This is always required for js land trace event |
| + config.chrome_trace_config.category_filter.AddFilterString( |
| + tracing_categories) |
| + tab.browser.platform.tracing_controller.StartTracing(config) |
| + tab.EvaluateJavaScript('testRunner.scheduleTestRun()') |
| + tab.WaitForJavaScriptCondition('testRunner.isDone') |
| + return tab.browser.platform.tracing_controller.StopTracing() |
| + |
| + def _ComputeTraceEventsThreadTime(self, trace_results, |
| + trace_events_to_measure, tab_id): |
| + trace_cpu_time_metrics = {} |
| + |
| + # Collect the bounds of "blink_perf.runTest" events. |
| + model = model_module.TimelineModel(trace_results) |
| + renderer_thread = model.GetRendererThreadFromTabId(tab_id) |
| + test_runs_bounds = [] |
| + for event in renderer_thread.async_slices: |
| + if event.name == "blink_perf.runTest": |
| + test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) |
| + test_runs_bounds.sort(key=lambda b: b.min) |
| + |
| + for t in trace_events_to_measure: |
| + trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) |
| + |
| + for event_name in trace_events_to_measure: |
| + curr_test_runs_bound_index = 0 |
| + for event in renderer_thread.IterAllSlicesOfName(event_name): |
| + while (curr_test_runs_bound_index < len(test_runs_bounds) and |
| + event.start > test_runs_bounds[curr_test_runs_bound_index].max): |
| + curr_test_runs_bound_index += 1 |
|
Xianzhu
2017/04/21 01:31:51
Nit: remove extra space before '1'.
nednguyen
2017/04/24 20:55:21
Done.
|
| + if curr_test_runs_bound_index >= len(test_runs_bounds): |
| + break |
| + curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] |
| + intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( |
| + curr_test_bound, bounds.Bounds.CreateFromEvent(event)) |
| + intersect_cpu_time = ( |
| + intersect_wall_time * event.thread_duration / event.duration) |
| + trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( |
| + intersect_cpu_time) |
| + |
| + return trace_cpu_time_metrics |
| + |
| + def PrintAndCollectTraceEventMetrics(self, trace_cpu_time_metrics, results): |
| + unit = 'ms' |
| + for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems(): |
| + print 'CPU times of trace event "%s":' % trace_event_name |
| + cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times]) |
| + print 'values %s %s' % (cpu_times_string, unit) |
| + avg = 0.0 |
| + if cpu_times: |
| + avg = sum(cpu_times)/len(cpu_times) |
| + print 'avg', '{0:.10f}'.format(avg), unit |
| + results.AddValue(list_of_scalar_values.ListOfScalarValues( |
| + results.current_page, name=trace_event_name, units=unit, |
| + values=cpu_times)) |
| + print '\n' |
| + |
| def ValidateAndMeasurePage(self, page, tab, results): |
| - tab.WaitForJavaScriptCondition('testRunner.isDone', timeout=600) |
| + tab.WaitForJavaScriptCondition( |
| + 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600) |
|
Xianzhu
2017/04/21 01:31:51
Nit: indent
nednguyen
2017/04/24 20:55:21
Done.
|
| + trace_cpu_time_metrics = {} |
| + if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): |
| + trace_result = self._ContinueTestRunWithTracing(tab) |
| + trace_value = trace.TraceValue(page, trace_result) |
| + results.AddValue(trace_value) |
| + trace_events_to_measure = tab.EvaluateJavaScript( |
| + 'window.testRunner.traceEventsToMeasure') |
| + trace_cpu_time_metrics = self._ComputeTraceEventsThreadTime( |
| + trace_result, trace_events_to_measure, tab.id) |
| log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') |
| @@ -120,6 +201,8 @@ class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
| print log |
| + self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results) |
| + |
| # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf |
| # tracing once it's ready. |
| @@ -200,6 +283,12 @@ class BlinkPerfBindings(_BlinkPerfBenchmark): |
| possible_browser.platform.GetOSName() == 'android')) |
| +#TODO(nednguyen): remove this before landing code |
| +class BlinkPerfTest(_BlinkPerfBenchmark): |
| + tag = 'testing' |
| + subdir = 'TestData' |
| + |
| + |
| @benchmark.Enabled('content-shell') |
| class BlinkPerfBlinkGC(_BlinkPerfBenchmark): |
| tag = 'blink_gc' |