OLD | NEW |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import os | 5 import os |
6 | 6 |
7 from core import path_util | 7 from core import path_util |
8 from core import perf_benchmark | 8 from core import perf_benchmark |
9 | 9 |
10 from telemetry import benchmark | 10 from telemetry import benchmark |
11 from telemetry import page as page_module | 11 from telemetry import page as page_module |
12 from telemetry.page import legacy_page_test | 12 from telemetry.page import legacy_page_test |
13 from telemetry.page import shared_page_state | 13 from telemetry.page import shared_page_state |
14 from telemetry import story | 14 from telemetry import story |
| 15 from telemetry.timeline import bounds |
| 16 from telemetry.timeline import model as model_module |
| 17 from telemetry.timeline import tracing_config |
| 18 |
15 from telemetry.value import list_of_scalar_values | 19 from telemetry.value import list_of_scalar_values |
16 from telemetry.value import scalar | 20 from telemetry.value import scalar |
| 21 from telemetry.value import trace |
| 22 |
17 | 23 |
18 from benchmarks import pywebsocket_server | 24 from benchmarks import pywebsocket_server |
19 from measurements import timeline_controller | 25 from measurements import timeline_controller |
20 from page_sets import webgl_supported_shared_state | 26 from page_sets import webgl_supported_shared_state |
21 | 27 |
22 | 28 |
23 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), | 29 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), |
24 'third_party', 'WebKit', 'PerformanceTests') | 30 'third_party', 'WebKit', 'PerformanceTests') |
25 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') | 31 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') |
26 | 32 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 _AddPage(path) | 72 _AddPage(path) |
67 ps = story.StorySet(base_dir=os.getcwd() + os.sep, | 73 ps = story.StorySet(base_dir=os.getcwd() + os.sep, |
68 serving_dirs=serving_dirs) | 74 serving_dirs=serving_dirs) |
69 for url in page_urls: | 75 for url in page_urls: |
70 ps.AddStory(page_module.Page( | 76 ps.AddStory(page_module.Page( |
71 url, ps, ps.base_dir, | 77 url, ps, ps.base_dir, |
72 shared_page_state_class=shared_page_state_class)) | 78 shared_page_state_class=shared_page_state_class)) |
73 return ps | 79 return ps |
74 | 80 |
75 | 81 |
| 82 def _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 83 renderer_thread, trace_events_to_measure): |
| 84 """ Compute the CPU duration for each of |trace_events_to_measure| during |
| 85 blink_perf test. |
| 86 |
| 87 Args: |
| 88 renderer_thread: the renderer thread which run blink_perf test. |
| 89 trace_events_to_measure: a list of string names of trace events to measure |
| 90 CPU duration for. |
| 91 |
| 92 Returns: |
| 93 a dictionary in which each key is a trace event' name (from |
| 94 |trace_events_to_measure| list), and value is a list of numbers that |
| 95 represents to total cpu time of that trace events in each blink_perf test. |
| 96 """ |
| 97 trace_cpu_time_metrics = {} |
| 98 |
| 99 # Collect the bounds of "blink_perf.runTest" events. |
| 100 test_runs_bounds = [] |
| 101 for event in renderer_thread.async_slices: |
| 102 if event.name == "blink_perf.runTest": |
| 103 test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) |
| 104 test_runs_bounds.sort(key=lambda b: b.min) |
| 105 |
| 106 for t in trace_events_to_measure: |
| 107 trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) |
| 108 |
| 109 for event_name in trace_events_to_measure: |
| 110 curr_test_runs_bound_index = 0 |
| 111 for event in renderer_thread.IterAllSlicesOfName(event_name): |
| 112 while (curr_test_runs_bound_index < len(test_runs_bounds) and |
| 113 event.start > test_runs_bounds[curr_test_runs_bound_index].max): |
| 114 curr_test_runs_bound_index += 1 |
| 115 if curr_test_runs_bound_index >= len(test_runs_bounds): |
| 116 break |
| 117 curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] |
| 118 intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( |
| 119 curr_test_bound, bounds.Bounds.CreateFromEvent(event)) |
| 120 if event.thread_duration and event.duration: |
| 121 intersect_cpu_time = ( |
| 122 intersect_wall_time * event.thread_duration / event.duration) |
| 123 else: |
| 124 intersect_cpu_time = intersect_wall_time |
| 125 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( |
| 126 intersect_cpu_time) |
| 127 |
| 128 return trace_cpu_time_metrics |
| 129 |
| 130 |
| 131 |
76 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): | 132 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
77 """Tuns a blink performance test and reports the results.""" | 133 """Tuns a blink performance test and reports the results.""" |
78 | 134 |
79 def __init__(self): | 135 def __init__(self): |
80 super(_BlinkPerfMeasurement, self).__init__() | 136 super(_BlinkPerfMeasurement, self).__init__() |
81 with open(os.path.join(os.path.dirname(__file__), | 137 with open(os.path.join(os.path.dirname(__file__), |
82 'blink_perf.js'), 'r') as f: | 138 'blink_perf.js'), 'r') as f: |
83 self._blink_perf_js = f.read() | 139 self._blink_perf_js = f.read() |
84 | 140 |
85 def WillNavigateToPage(self, page, tab): | 141 def WillNavigateToPage(self, page, tab): |
86 del tab # unused | 142 del tab # unused |
87 page.script_to_evaluate_on_commit = self._blink_perf_js | 143 page.script_to_evaluate_on_commit = self._blink_perf_js |
88 | 144 |
89 def CustomizeBrowserOptions(self, options): | 145 def CustomizeBrowserOptions(self, options): |
90 options.AppendExtraBrowserArgs([ | 146 options.AppendExtraBrowserArgs([ |
91 '--js-flags=--expose_gc', | 147 '--js-flags=--expose_gc', |
92 '--enable-experimental-web-platform-features', | 148 '--enable-experimental-web-platform-features', |
93 '--disable-gesture-requirement-for-media-playback', | 149 '--disable-gesture-requirement-for-media-playback', |
94 '--enable-experimental-canvas-features', | 150 '--enable-experimental-canvas-features', |
95 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. | 151 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. |
96 '--reduce-security-for-testing' | 152 '--reduce-security-for-testing' |
97 ]) | 153 ]) |
98 if 'content-shell' in options.browser_type: | 154 if 'content-shell' in options.browser_type: |
99 options.AppendExtraBrowserArgs('--expose-internals-for-testing') | 155 options.AppendExtraBrowserArgs('--expose-internals-for-testing') |
100 | 156 |
| 157 def _ContinueTestRunWithTracing(self, tab): |
| 158 tracing_categories = tab.EvaluateJavaScript( |
| 159 'testRunner.tracingCategories') |
| 160 config = tracing_config.TracingConfig() |
| 161 config.enable_chrome_trace = True |
| 162 config.chrome_trace_config.category_filter.AddFilterString( |
| 163 'blink.console') # This is always required for js land trace event |
| 164 config.chrome_trace_config.category_filter.AddFilterString( |
| 165 tracing_categories) |
| 166 tab.browser.platform.tracing_controller.StartTracing(config) |
| 167 tab.EvaluateJavaScript('testRunner.scheduleTestRun()') |
| 168 tab.WaitForJavaScriptCondition('testRunner.isDone') |
| 169 return tab.browser.platform.tracing_controller.StopTracing() |
| 170 |
| 171 |
| 172 def PrintAndCollectTraceEventMetrics(self, trace_cpu_time_metrics, results): |
| 173 unit = 'ms' |
| 174 print |
| 175 for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems(): |
| 176 print 'CPU times of trace event "%s":' % trace_event_name |
| 177 cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times]) |
| 178 print 'values %s %s' % (cpu_times_string, unit) |
| 179 avg = 0.0 |
| 180 if cpu_times: |
| 181 avg = sum(cpu_times)/len(cpu_times) |
| 182 print 'avg', '{0:.10f}'.format(avg), unit |
| 183 results.AddValue(list_of_scalar_values.ListOfScalarValues( |
| 184 results.current_page, name=trace_event_name, units=unit, |
| 185 values=cpu_times)) |
| 186 print |
| 187 print '\n' |
| 188 |
101 def ValidateAndMeasurePage(self, page, tab, results): | 189 def ValidateAndMeasurePage(self, page, tab, results): |
102 tab.WaitForJavaScriptCondition('testRunner.isDone', timeout=600) | 190 tab.WaitForJavaScriptCondition( |
| 191 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600) |
| 192 trace_cpu_time_metrics = {} |
| 193 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): |
| 194 trace_data = self._ContinueTestRunWithTracing(tab) |
| 195 trace_value = trace.TraceValue(page, trace_data) |
| 196 results.AddValue(trace_value) |
| 197 |
| 198 trace_events_to_measure = tab.EvaluateJavaScript( |
| 199 'window.testRunner.traceEventsToMeasure') |
| 200 model = model_module.TimelineModel(trace_data) |
| 201 renderer_thread = model.GetRendererThreadFromTabId(tab.id) |
| 202 trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf( |
| 203 renderer_thread, trace_events_to_measure) |
103 | 204 |
104 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') | 205 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') |
105 | 206 |
106 for line in log.splitlines(): | 207 for line in log.splitlines(): |
107 if line.startswith("FATAL: "): | 208 if line.startswith("FATAL: "): |
108 print line | 209 print line |
109 continue | 210 continue |
110 if not line.startswith('values '): | 211 if not line.startswith('values '): |
111 continue | 212 continue |
112 parts = line.split() | 213 parts = line.split() |
113 values = [float(v.replace(',', '')) for v in parts[1:-1]] | 214 values = [float(v.replace(',', '')) for v in parts[1:-1]] |
114 units = parts[-1] | 215 units = parts[-1] |
115 metric = page.display_name.split('.')[0].replace('/', '_') | 216 metric = page.display_name.split('.')[0].replace('/', '_') |
116 results.AddValue(list_of_scalar_values.ListOfScalarValues( | 217 results.AddValue(list_of_scalar_values.ListOfScalarValues( |
117 results.current_page, metric, units, values)) | 218 results.current_page, metric, units, values)) |
118 | 219 |
119 break | 220 break |
120 | 221 |
121 print log | 222 print log |
122 | 223 |
| 224 self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results) |
| 225 |
123 | 226 |
124 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf | 227 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf |
125 # tracing once it's ready. | 228 # tracing once it's ready. |
126 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): | 229 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): |
127 """Also collects prePaint and paint timing from traces.""" | 230 """Also collects prePaint and paint timing from traces.""" |
128 | 231 |
129 def __init__(self): | 232 def __init__(self): |
130 super(_BlinkPerfPaintMeasurement, self).__init__() | 233 super(_BlinkPerfPaintMeasurement, self).__init__() |
131 self._controller = None | 234 self._controller = None |
132 | 235 |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
315 | 418 |
316 def CreateStorySet(self, options): | 419 def CreateStorySet(self, options): |
317 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) | 420 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) |
318 return CreateStorySetFromPath( | 421 return CreateStorySetFromPath( |
319 path, SKIPPED_FILE, | 422 path, SKIPPED_FILE, |
320 shared_page_state_class=_SharedPywebsocketPageState) | 423 shared_page_state_class=_SharedPywebsocketPageState) |
321 | 424 |
322 @classmethod | 425 @classmethod |
323 def ShouldDisable(cls, possible_browser): | 426 def ShouldDisable(cls, possible_browser): |
324 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 | 427 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 |
OLD | NEW |