OLD | NEW |
1 # Copyright 2013 The Chromium Authors. All rights reserved. | 1 # Copyright 2013 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import os | 5 import os |
6 | 6 |
7 from core import path_util | 7 from core import path_util |
8 from core import perf_benchmark | 8 from core import perf_benchmark |
9 | 9 |
10 from telemetry import benchmark | 10 from telemetry import benchmark |
11 from telemetry import page as page_module | 11 from telemetry import page as page_module |
12 from telemetry.page import legacy_page_test | 12 from telemetry.page import legacy_page_test |
13 from telemetry.page import shared_page_state | 13 from telemetry.page import shared_page_state |
14 from telemetry import story | 14 from telemetry import story |
15 from telemetry.timeline import bounds | |
16 from telemetry.timeline import model as model_module | |
17 from telemetry.timeline import tracing_config | |
18 | |
19 from telemetry.value import list_of_scalar_values | 15 from telemetry.value import list_of_scalar_values |
20 from telemetry.value import scalar | 16 from telemetry.value import scalar |
21 from telemetry.value import trace | |
22 | |
23 | 17 |
24 from benchmarks import pywebsocket_server | 18 from benchmarks import pywebsocket_server |
25 from measurements import timeline_controller | 19 from measurements import timeline_controller |
26 from page_sets import webgl_supported_shared_state | 20 from page_sets import webgl_supported_shared_state |
27 | 21 |
28 | 22 |
29 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), | 23 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), |
30 'third_party', 'WebKit', 'PerformanceTests') | 24 'third_party', 'WebKit', 'PerformanceTests') |
31 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') | 25 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') |
32 | 26 |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
72 _AddPage(path) | 66 _AddPage(path) |
73 ps = story.StorySet(base_dir=os.getcwd() + os.sep, | 67 ps = story.StorySet(base_dir=os.getcwd() + os.sep, |
74 serving_dirs=serving_dirs) | 68 serving_dirs=serving_dirs) |
75 for url in page_urls: | 69 for url in page_urls: |
76 ps.AddStory(page_module.Page( | 70 ps.AddStory(page_module.Page( |
77 url, ps, ps.base_dir, | 71 url, ps, ps.base_dir, |
78 shared_page_state_class=shared_page_state_class)) | 72 shared_page_state_class=shared_page_state_class)) |
79 return ps | 73 return ps |
80 | 74 |
81 | 75 |
82 def _ComputeTraceEventsThreadTimeForBlinkPerf( | |
83 renderer_thread, trace_events_to_measure): | |
84 """ Compute the CPU duration for each of |trace_events_to_measure| during | |
85 blink_perf test. | |
86 | |
87 Args: | |
88 renderer_thread: the renderer thread which run blink_perf test. | |
89 trace_events_to_measure: a list of string names of trace events to measure | |
90 CPU duration for. | |
91 | |
92 Returns: | |
93 a dictionary in which each key is a trace event' name (from | |
94 |trace_events_to_measure| list), and value is a list of numbers that | |
95 represents to total cpu time of that trace events in each blink_perf test. | |
96 """ | |
97 trace_cpu_time_metrics = {} | |
98 | |
99 # Collect the bounds of "blink_perf.runTest" events. | |
100 test_runs_bounds = [] | |
101 for event in renderer_thread.async_slices: | |
102 if event.name == "blink_perf.runTest": | |
103 test_runs_bounds.append(bounds.Bounds.CreateFromEvent(event)) | |
104 test_runs_bounds.sort(key=lambda b: b.min) | |
105 | |
106 for t in trace_events_to_measure: | |
107 trace_cpu_time_metrics[t] = [0.0] * len(test_runs_bounds) | |
108 | |
109 for event_name in trace_events_to_measure: | |
110 curr_test_runs_bound_index = 0 | |
111 for event in renderer_thread.IterAllSlicesOfName(event_name): | |
112 while (curr_test_runs_bound_index < len(test_runs_bounds) and | |
113 event.start > test_runs_bounds[curr_test_runs_bound_index].max): | |
114 curr_test_runs_bound_index += 1 | |
115 if curr_test_runs_bound_index >= len(test_runs_bounds): | |
116 break | |
117 curr_test_bound = test_runs_bounds[curr_test_runs_bound_index] | |
118 intersect_wall_time = bounds.Bounds.GetOverlapBetweenBounds( | |
119 curr_test_bound, bounds.Bounds.CreateFromEvent(event)) | |
120 intersect_cpu_time = ( | |
121 intersect_wall_time * event.thread_duration / event.duration) | |
122 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( | |
123 intersect_cpu_time) | |
124 | |
125 return trace_cpu_time_metrics | |
126 | |
127 | |
128 | |
129 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): | 76 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): |
130 """Tuns a blink performance test and reports the results.""" | 77 """Tuns a blink performance test and reports the results.""" |
131 | 78 |
132 def __init__(self): | 79 def __init__(self): |
133 super(_BlinkPerfMeasurement, self).__init__() | 80 super(_BlinkPerfMeasurement, self).__init__() |
134 with open(os.path.join(os.path.dirname(__file__), | 81 with open(os.path.join(os.path.dirname(__file__), |
135 'blink_perf.js'), 'r') as f: | 82 'blink_perf.js'), 'r') as f: |
136 self._blink_perf_js = f.read() | 83 self._blink_perf_js = f.read() |
137 | 84 |
138 def WillNavigateToPage(self, page, tab): | 85 def WillNavigateToPage(self, page, tab): |
139 del tab # unused | 86 del tab # unused |
140 page.script_to_evaluate_on_commit = self._blink_perf_js | 87 page.script_to_evaluate_on_commit = self._blink_perf_js |
141 | 88 |
142 def CustomizeBrowserOptions(self, options): | 89 def CustomizeBrowserOptions(self, options): |
143 options.AppendExtraBrowserArgs([ | 90 options.AppendExtraBrowserArgs([ |
144 '--js-flags=--expose_gc', | 91 '--js-flags=--expose_gc', |
145 '--enable-experimental-web-platform-features', | 92 '--enable-experimental-web-platform-features', |
146 '--disable-gesture-requirement-for-media-playback', | 93 '--disable-gesture-requirement-for-media-playback', |
147 '--enable-experimental-canvas-features', | 94 '--enable-experimental-canvas-features', |
148 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. | 95 # TODO(qinmin): After fixing crbug.com/592017, remove this command line. |
149 '--reduce-security-for-testing' | 96 '--reduce-security-for-testing' |
150 ]) | 97 ]) |
151 if 'content-shell' in options.browser_type: | 98 if 'content-shell' in options.browser_type: |
152 options.AppendExtraBrowserArgs('--expose-internals-for-testing') | 99 options.AppendExtraBrowserArgs('--expose-internals-for-testing') |
153 | 100 |
154 def _ContinueTestRunWithTracing(self, tab): | |
155 tracing_categories = tab.EvaluateJavaScript( | |
156 'testRunner.tracingCategories') | |
157 config = tracing_config.TracingConfig() | |
158 config.enable_chrome_trace = True | |
159 config.chrome_trace_config.category_filter.AddFilterString( | |
160 'blink.console') # This is always required for js land trace event | |
161 config.chrome_trace_config.category_filter.AddFilterString( | |
162 tracing_categories) | |
163 tab.browser.platform.tracing_controller.StartTracing(config) | |
164 tab.EvaluateJavaScript('testRunner.scheduleTestRun()') | |
165 tab.WaitForJavaScriptCondition('testRunner.isDone') | |
166 return tab.browser.platform.tracing_controller.StopTracing() | |
167 | |
168 | |
169 def PrintAndCollectTraceEventMetrics(self, trace_cpu_time_metrics, results): | |
170 unit = 'ms' | |
171 print | |
172 for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems(): | |
173 print 'CPU times of trace event "%s":' % trace_event_name | |
174 cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times]) | |
175 print 'values %s %s' % (cpu_times_string, unit) | |
176 avg = 0.0 | |
177 if cpu_times: | |
178 avg = sum(cpu_times)/len(cpu_times) | |
179 print 'avg', '{0:.10f}'.format(avg), unit | |
180 results.AddValue(list_of_scalar_values.ListOfScalarValues( | |
181 results.current_page, name=trace_event_name, units=unit, | |
182 values=cpu_times)) | |
183 print | |
184 print '\n' | |
185 | |
186 def ValidateAndMeasurePage(self, page, tab, results): | 101 def ValidateAndMeasurePage(self, page, tab, results): |
187 tab.WaitForJavaScriptCondition( | 102 tab.WaitForJavaScriptCondition('testRunner.isDone', timeout=600) |
188 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600) | |
189 trace_cpu_time_metrics = {} | |
190 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): | |
191 trace_data = self._ContinueTestRunWithTracing(tab) | |
192 trace_value = trace.TraceValue(page, trace_data) | |
193 results.AddValue(trace_value) | |
194 | |
195 trace_events_to_measure = tab.EvaluateJavaScript( | |
196 'window.testRunner.traceEventsToMeasure') | |
197 model = model_module.TimelineModel(trace_data) | |
198 renderer_thread = model.GetRendererThreadFromTabId(tab.id) | |
199 trace_cpu_time_metrics = _ComputeTraceEventsThreadTimeForBlinkPerf( | |
200 renderer_thread, trace_events_to_measure) | |
201 | 103 |
202 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') | 104 log = tab.EvaluateJavaScript('document.getElementById("log").innerHTML') |
203 | 105 |
204 for line in log.splitlines(): | 106 for line in log.splitlines(): |
205 if line.startswith("FATAL: "): | 107 if line.startswith("FATAL: "): |
206 print line | 108 print line |
207 continue | 109 continue |
208 if not line.startswith('values '): | 110 if not line.startswith('values '): |
209 continue | 111 continue |
210 parts = line.split() | 112 parts = line.split() |
211 values = [float(v.replace(',', '')) for v in parts[1:-1]] | 113 values = [float(v.replace(',', '')) for v in parts[1:-1]] |
212 units = parts[-1] | 114 units = parts[-1] |
213 metric = page.display_name.split('.')[0].replace('/', '_') | 115 metric = page.display_name.split('.')[0].replace('/', '_') |
214 results.AddValue(list_of_scalar_values.ListOfScalarValues( | 116 results.AddValue(list_of_scalar_values.ListOfScalarValues( |
215 results.current_page, metric, units, values)) | 117 results.current_page, metric, units, values)) |
216 | 118 |
217 break | 119 break |
218 | 120 |
219 print log | 121 print log |
220 | 122 |
221 self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results) | |
222 | |
223 | 123 |
224 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf | 124 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf |
225 # tracing once it's ready. | 125 # tracing once it's ready. |
226 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): | 126 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement): |
227 """Also collects prePaint and paint timing from traces.""" | 127 """Also collects prePaint and paint timing from traces.""" |
228 | 128 |
229 def __init__(self): | 129 def __init__(self): |
230 super(_BlinkPerfPaintMeasurement, self).__init__() | 130 super(_BlinkPerfPaintMeasurement, self).__init__() |
231 self._controller = None | 131 self._controller = None |
232 | 132 |
(...skipping 189 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 | 322 |
423 def CreateStorySet(self, options): | 323 def CreateStorySet(self, options): |
424 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) | 324 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) |
425 return CreateStorySetFromPath( | 325 return CreateStorySetFromPath( |
426 path, SKIPPED_FILE, | 326 path, SKIPPED_FILE, |
427 shared_page_state_class=_SharedPywebsocketPageState) | 327 shared_page_state_class=_SharedPywebsocketPageState) |
428 | 328 |
429 @classmethod | 329 @classmethod |
430 def ShouldDisable(cls, possible_browser): | 330 def ShouldDisable(cls, possible_browser): |
431 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 | 331 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 |
OLD | NEW |