OLD | NEW |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import os | 5 import os |
6 | 6 |
7 from core import path_util | 7 from core import path_util |
8 from core import perf_benchmark | 8 from core import perf_benchmark |
9 | 9 |
10 from telemetry import benchmark | 10 from telemetry import benchmark |
11 from telemetry import page as page_module | 11 from telemetry import page as page_module |
12 from telemetry.page import legacy_page_test | 12 from telemetry.page import legacy_page_test |
13 from telemetry.page import shared_page_state | 13 from telemetry.page import shared_page_state |
14 from telemetry import story | 14 from telemetry import story |
| 15 from telemetry.timeline import bounds |
| 16 from telemetry.timeline import model as model_module |
| 17 from telemetry.timeline import tracing_config |
| 18 |
15 from telemetry.value import list_of_scalar_values | 19 from telemetry.value import list_of_scalar_values |
16 from telemetry.value import scalar | 20 from telemetry.value import scalar |
| 21 from telemetry.value import trace |
| 22 |
17 | 23 |
18 from benchmarks import pywebsocket_server | 24 from benchmarks import pywebsocket_server |
19 from measurements import timeline_controller | 25 from measurements import timeline_controller |
20 from page_sets import webgl_supported_shared_state | 26 from page_sets import webgl_supported_shared_state |
21 | 27 |
22 | 28 |
23 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), | 29 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), |
24 'third_party', 'WebKit', 'PerformanceTests') | 30 'third_party', 'WebKit', 'PerformanceTests') |
25 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') | 31 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') |
26 | 32 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 _AddPage(path) | 72 _AddPage(path) |
67 ps = story.StorySet(base_dir=os.getcwd() + os.sep, | 73 ps = story.StorySet(base_dir=os.getcwd() + os.sep, |
68 serving_dirs=serving_dirs) | 74 serving_dirs=serving_dirs) |
69 for url in page_urls: | 75 for url in page_urls: |
70 ps.AddStory(page_module.Page( | 76 ps.AddStory(page_module.Page( |
71 url, ps, ps.base_dir, | 77 url, ps, ps.base_dir, |
72 shared_page_state_class=shared_page_state_class)) | 78 shared_page_state_class=shared_page_state_class)) |
73 return ps | 79 return ps |
74 | 80 |
75 | 81 |
| 82 def _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 83 renderer_thread, trace_events_to_measure): |
| 84 """ Compute the CPU duration for each of |trace_events_to_measure| during |
| 85 blink_perf test. |
| 86 |
| 87 Args: |
| 88 renderer_thread: the renderer thread which run blink_perf test. |
| 89 trace_events_to_measure: a list of string names of trace events to measure |
| 90 CPU duration for. |
| 91 |
| 92 Returns: |
| 93 a dictionary in which each key is a trace event' name (from |
| 94 |trace_events_to_measure| list), and value is a list of numbers that |
| 95 represents to total cpu time of that trace events in each blink_perf test. |
| 96 """ |
| 97 trace_cpu_time_metrics = {} |
| 98 |
| 99 # Collect the bounds of "blink_perf.runTest" events. |
| 100 test_runs_bounds = [] |
| 101 for event in renderer_thread.async_slices: |
| 102 if event.name == "blink_perf.runTest": |
| 103 test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) |
| 104 test_runs_bounds.sort(key=lambda b: b.min) |
| 105 |
| 106 for t in trace_events_to_measure: |
| 107 trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) |
| 108 |
| 109 for event_name in trace_events_to_measure: |
| 110 curr_test_runs_bound_index = 0 |
| 111 for event in renderer_thread.IterAllSlicesOfName(event_name): |
| 112 while (curr_test_runs_bound_index < len(test_runs_bounds) and |
| 113 event.start > test_runs_bounds[curr_test_runs_bound_index].max): |
| 114 curr_test_runs_bound_index += 1 |
| 115 if curr_test_runs_bound_index >= len(test_runs_bounds): |
| 116 break |
| 117 curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] |
| 118 intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( |
| 119 curr_test_bound, bounds.Bounds.CreateFromEvent(event)) |
| 120 intersect_cpu_time = ( |
| 121 intersect_wall_time * event.thread_duration / event.duration) |
| 122 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( |
| 123 intersect_cpu_time) |
| 124 |
| 125 return trace_cpu_time_metrics |
| 126 |
| 127 |
| 128 |
76 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): | 129 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
77 """Tuns a blink performance test and reports the results.""" | 130 """Tuns a blink performance test and reports the results.""" |
78 | 131 |
79 def __init__(self): | 132 def __init__(self): |
80 super(_BlinkPerfMeasurement, self).__init__() | 133 super(_BlinkPerfMeasurement, self).__init__() |
81 with open(os.path.join(os.path.dirname(__file__), | 134 with open(os.path.join(os.path.dirname(__file__), |
82 'blink_perf.js'), 'r') as f: | 135 'blink_perf.js'), 'r') as f: |
83 self._blink_perf_js = f.read() | 136 self._blink_perf_js = f.read() |
84 | 137 |
85 def WillNavigateToPage(self, page, tab): | 138 def WillNavigateToPage(self, page, tab): |
86 del tab # unused | 139 del tab # unused |
87 page.script_to_evaluate_on_commit = self._blink_perf_js | 140 page.script_to_evaluate_on_commit = self._blink_perf_js |
88 | 141 |
89 def CustomizeBrowserOptions(self, options): | 142 def CustomizeBrowserOptions(self, options): |
90 options.AppendExtraBrowserArgs([ | 143 options.AppendExtraBrowserArgs([ |
91 '--js-flags=--expose_gc', | 144 '--js-flags=--expose_gc', |
92 '--enable-experimental-web-platform-features', | 145 '--enable-experimental-web-platform-features', |
93 '--disable-gesture-requirement-for-media-playback', | 146 '--disable-gesture-requirement-for-media-playback', |
94 '--enable-experimental-canvas-features', | 147 '--enable-experimental-canvas-features', |
95 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. | 148 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. |
96 '--reduce-security-for-testing' | 149 '--reduce-security-for-testing' |
97 ]) | 150 ]) |
98 if 'content-shell' in options.browser_type: | 151 if 'content-shell' in options.browser_type: |
99 options.AppendExtraBrowserArgs('--expose-internals-for-testing') | 152 options.AppendExtraBrowserArgs('--expose-internals-for-testing') |
100 | 153 |
| 154 def _ContinueTestRunWithTracing(self, tab): |
| 155 tracing_categories = tab.EvaluateJavaScript( |
| 156 'testRunner.tracingCategories') |
| 157 config = tracing_config.TracingConfig() |
| 158 config.enable_chrome_trace = True |
| 159 config.chrome_trace_config.category_filter.AddFilterString( |
| 160 'blink.console') # This is always required for js land trace event |
| 161 config.chrome_trace_config.category_filter.AddFilterString( |
| 162 tracing_categories) |
| 163 tab.browser.platform.tracing_controller.StartTracing(config) |
| 164 tab.EvaluateJavaScript('testRunner.scheduleTestRun()') |
| 165 tab.WaitForJavaScriptCondition('testRunner.isDone') |
| 166 return tab.browser.platform.tracing_controller.StopTracing() |
| 167 |
| 168 |
| 169 def PrintAndCollectTraceEventMetrics(self, trace_cpu_time_metrics, results): |
| 170 unit = 'ms' |
| 171 print |
| 172 for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems(): |
| 173 print 'CPU times of trace event "%s":' % trace_event_name |
| 174 cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times]) |
| 175 print 'values %s %s' % (cpu_times_string, unit) |
| 176 avg = 0.0 |
| 177 if cpu_times: |
| 178 avg = sum(cpu_times)/len(cpu_times) |
| 179 print 'avg', '{0:.10f}'.format(avg), unit |
| 180 results.AddValue(list_of_scalar_values.ListOfScalarValues( |
| 181 results.current_page, name=trace_event_name, units=unit, |
| 182 values=cpu_times)) |
| 183 print |
| 184 print '\n' |
| 185 |
101 def ValidateAndMeasurePage(self, page, tab, results): | 186 def ValidateAndMeasurePage(self, page, tab, results): |
102 tab.WaitForJavaScriptCondition('testRunner.isDone', timeout=600) | 187 tab.WaitForJavaScriptCondition( |
| 188 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600) |
| 189 trace_cpu_time_metrics = {} |
| 190 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): |
| 191 trace_data = self._ContinueTestRunWithTracing(tab) |
| 192 trace_value = trace.TraceValue(page, trace_data) |
| 193 results.AddValue(trace_value) |
| 194 |
| 195 trace_events_to_measure = tab.EvaluateJavaScript( |
| 196 'window.testRunner.traceEventsToMeasure') |
| 197 model = model_module.TimelineModel(trace_data) |
| 198 renderer_thread = model.GetRendererThreadFromTabId(tab.id) |
| 199 trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 200 renderer_thread, trace_events_to_measure) |
103 | 201 |
104 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') | 202 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') |
105 | 203 |
106 for line in log.splitlines(): | 204 for line in log.splitlines(): |
107 if line.startswith("FATAL: "): | 205 if line.startswith("FATAL: "): |
108 print line | 206 print line |
109 continue | 207 continue |
110 if not line.startswith('values '): | 208 if not line.startswith('values '): |
111 continue | 209 continue |
112 parts = line.split() | 210 parts = line.split() |
113 values = [float(v.replace(',', '')) for v in parts[1:-1]] | 211 values = [float(v.replace(',', '')) for v in parts[1:-1]] |
114 units = parts[-1] | 212 units = parts[-1] |
115 metric = page.display_name.split('.')[0].replace('/', '_') | 213 metric = page.display_name.split('.')[0].replace('/', '_') |
116 results.AddValue(list_of_scalar_values.ListOfScalarValues( | 214 results.AddValue(list_of_scalar_values.ListOfScalarValues( |
117 results.current_page, metric, units, values)) | 215 results.current_page, metric, units, values)) |
118 | 216 |
119 break | 217 break |
120 | 218 |
121 print log | 219 print log |
122 | 220 |
| 221 self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results) |
| 222 |
123 | 223 |
124 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf | 224 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf |
125 # tracing once it's ready. | 225 # tracing once it's ready. |
126 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): | 226 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): |
127 """Also collects prePaint and paint timing from traces.""" | 227 """Also collects prePaint and paint timing from traces.""" |
128 | 228 |
129 def __init__(self): | 229 def __init__(self): |
130 super(_BlinkPerfPaintMeasurement, self).__init__() | 230 super(_BlinkPerfPaintMeasurement, self).__init__() |
131 self._controller = None | 231 self._controller = None |
132 | 232 |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
322 | 422 |
323 def CreateStorySet(self, options): | 423 def CreateStorySet(self, options): |
324 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) | 424 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) |
325 return CreateStorySetFromPath( | 425 return CreateStorySetFromPath( |
326 path, SKIPPED_FILE, | 426 path, SKIPPED_FILE, |
327 shared_page_state_class=_SharedPywebsocketPageState) | 427 shared_page_state_class=_SharedPywebsocketPageState) |
328 | 428 |
329 @classmethod | 429 @classmethod |
330 def ShouldDisable(cls, possible_browser): | 430 def ShouldDisable(cls, possible_browser): |
331 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 | 431 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 |
OLD | NEW |