Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(147)

Side by Side Diff: tools/perf/benchmarks/blink_perf.py

Issue 2883603002: Use the new tracing based measurement for blink_perf.paint (Closed)
Patch Set: Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « third_party/WebKit/PerformanceTests/Paint/transform-changes.html ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2013 The Chromium Authors. All rights reserved. 1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import os 5 import os
6 6
7 from core import path_util 7 from core import path_util
8 from core import perf_benchmark 8 from core import perf_benchmark
9 9
10 from page_sets import webgl_supported_shared_state
11
10 from telemetry import benchmark 12 from telemetry import benchmark
11 from telemetry import page as page_module 13 from telemetry import page as page_module
12 from telemetry.page import legacy_page_test 14 from telemetry.page import legacy_page_test
13 from telemetry.page import shared_page_state 15 from telemetry.page import shared_page_state
14 from telemetry import story 16 from telemetry import story
15 from telemetry.timeline import bounds 17 from telemetry.timeline import bounds
16 from telemetry.timeline import model as model_module 18 from telemetry.timeline import model as model_module
17 from telemetry.timeline import tracing_config 19 from telemetry.timeline import tracing_config
18 20
19 from telemetry.value import list_of_scalar_values 21 from telemetry.value import list_of_scalar_values
20 from telemetry.value import scalar
21 from telemetry.value import trace 22 from telemetry.value import trace
22 23
23 24
24 from measurements import timeline_controller
25 from page_sets import webgl_supported_shared_state
26
27
28 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(), 25 BLINK_PERF_BASE_DIR = os.path.join(path_util.GetChromiumSrcDir(),
29 'third_party', 'WebKit', 'PerformanceTests') 26 'third_party', 'WebKit', 'PerformanceTests')
30 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped') 27 SKIPPED_FILE = os.path.join(BLINK_PERF_BASE_DIR, 'Skipped')
31 28
32 29
33 def CreateStorySetFromPath(path, skipped_file, 30 def CreateStorySetFromPath(path, skipped_file,
34 shared_page_state_class=( 31 shared_page_state_class=(
35 shared_page_state.SharedPageState)): 32 shared_page_state.SharedPageState)):
36 assert os.path.exists(path) 33 assert os.path.exists(path)
37 34
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after
132 else: 129 else:
133 intersect_cpu_time = intersect_wall_time 130 intersect_cpu_time = intersect_wall_time
134 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += ( 131 trace_cpu_time_metrics[event_name][curr_test_runs_bound_index] += (
135 intersect_cpu_time) 132 intersect_cpu_time)
136 return trace_cpu_time_metrics 133 return trace_cpu_time_metrics
137 134
138 135
139 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest): 136 class _BlinkPerfMeasurement(legacy_page_test.LegacyPageTest):
140 """Tuns a blink performance test and reports the results.""" 137 """Tuns a blink performance test and reports the results."""
141 138
139 trace_event_to_metric_map = {}
140
142 def __init__(self): 141 def __init__(self):
143 super(_BlinkPerfMeasurement, self).__init__() 142 super(_BlinkPerfMeasurement, self).__init__()
144 with open(os.path.join(os.path.dirname(__file__), 143 with open(os.path.join(os.path.dirname(__file__),
145 'blink_perf.js'), 'r') as f: 144 'blink_perf.js'), 'r') as f:
146 self._blink_perf_js = f.read() 145 self._blink_perf_js = f.read()
147 146
148 def WillNavigateToPage(self, page, tab): 147 def WillNavigateToPage(self, page, tab):
149 del tab # unused 148 del tab # unused
150 page.script_to_evaluate_on_commit = self._blink_perf_js 149 page.script_to_evaluate_on_commit = self._blink_perf_js
151 150
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
191 unit = 'ms' 190 unit = 'ms'
192 print 191 print
193 for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems(): 192 for trace_event_name, cpu_times in trace_cpu_time_metrics.iteritems():
194 print 'CPU times of trace event "%s":' % trace_event_name 193 print 'CPU times of trace event "%s":' % trace_event_name
195 cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times]) 194 cpu_times_string = ', '.join(['{0:.10f}'.format(t) for t in cpu_times])
196 print 'values %s %s' % (cpu_times_string, unit) 195 print 'values %s %s' % (cpu_times_string, unit)
197 avg = 0.0 196 avg = 0.0
198 if cpu_times: 197 if cpu_times:
199 avg = sum(cpu_times)/len(cpu_times) 198 avg = sum(cpu_times)/len(cpu_times)
200 print 'avg', '{0:.10f}'.format(avg), unit 199 print 'avg', '{0:.10f}'.format(avg), unit
200 if self.trace_event_to_metric_map:
201 metric_name = self.trace_event_to_metric_map[trace_event_name]
202 else:
203 metric_name = trace_event_name
201 results.AddValue(list_of_scalar_values.ListOfScalarValues( 204 results.AddValue(list_of_scalar_values.ListOfScalarValues(
202 results.current_page, name=trace_event_name, units=unit, 205 results.current_page, name=metric_name, units=unit,
203 values=cpu_times)) 206 values=cpu_times))
204 print 207 print
205 print '\n' 208 print '\n'
206 209
207 def ValidateAndMeasurePage(self, page, tab, results): 210 def ValidateAndMeasurePage(self, page, tab, results):
208 tab.WaitForJavaScriptCondition( 211 tab.WaitForJavaScriptCondition(
209 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600) 212 'testRunner.isDone || testRunner.isWaitingForTracingStart', timeout=600)
210 trace_cpu_time_metrics = {} 213 trace_cpu_time_metrics = {}
211 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'): 214 if tab.EvaluateJavaScript('testRunner.isWaitingForTracingStart'):
212 trace_data = self._ContinueTestRunWithTracing(tab) 215 trace_data = self._ContinueTestRunWithTracing(tab)
(...skipping 22 matching lines...) Expand all
235 results.AddValue(list_of_scalar_values.ListOfScalarValues( 238 results.AddValue(list_of_scalar_values.ListOfScalarValues(
236 results.current_page, metric, units, values)) 239 results.current_page, metric, units, values))
237 240
238 break 241 break
239 242
240 print log 243 print log
241 244
242 self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results) 245 self.PrintAndCollectTraceEventMetrics(trace_cpu_time_metrics, results)
243 246
244 247
245 # TODO(wangxianzhu): Convert the paint benchmarks to use the new blink_perf
246 # tracing once it's ready.
247 class _BlinkPerfPaintMeasurement(_BlinkPerfMeasurement):
248 """Also collects prePaint and paint timing from traces."""
249
250 def __init__(self):
251 super(_BlinkPerfPaintMeasurement, self).__init__()
252 self._controller = None
253
254 def WillNavigateToPage(self, page, tab):
255 super(_BlinkPerfPaintMeasurement, self).WillNavigateToPage(page, tab)
256 self._controller = timeline_controller.TimelineController()
257 self._controller.trace_categories = 'blink,blink.console'
258 self._controller.SetUp(page, tab)
259 self._controller.Start(tab)
260
261 def DidRunPage(self, platform):
262 if self._controller:
263 self._controller.CleanUp(platform)
264
265 def ValidateAndMeasurePage(self, page, tab, results):
266 super(_BlinkPerfPaintMeasurement, self).ValidateAndMeasurePage(
267 page, tab, results)
268 self._controller.Stop(tab, results)
269 renderer = self._controller.model.GetRendererThreadFromTabId(tab.id)
270 # The marker marks the beginning and ending of the measured runs.
271 marker = next(event for event in renderer.async_slices
272 if event.name == 'blink_perf'
273 and event.category == 'blink.console')
274 assert marker
275
276 for event in renderer.all_slices:
277 if event.start < marker.start or event.end > marker.end:
278 continue
279 if event.name == 'FrameView::prePaint':
280 results.AddValue(
281 scalar.ScalarValue(page, 'prePaint', 'ms', event.duration))
282 if event.name == 'FrameView::paintTree':
283 results.AddValue(
284 scalar.ScalarValue(page, 'paint', 'ms', event.duration))
285
286
287 class _BlinkPerfBenchmark(perf_benchmark.PerfBenchmark): 248 class _BlinkPerfBenchmark(perf_benchmark.PerfBenchmark):
288 249
289 test = _BlinkPerfMeasurement 250 test = _BlinkPerfMeasurement
290 251
291 @classmethod 252 @classmethod
292 def Name(cls): 253 def Name(cls):
293 return 'blink_perf.' + cls.tag 254 return 'blink_perf.' + cls.tag
294 255
295 def CreateStorySet(self, options): 256 def CreateStorySet(self, options):
296 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir) 257 path = os.path.join(BLINK_PERF_BASE_DIR, self.subdir)
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
370 tag = 'layout' 331 tag = 'layout'
371 subdir = 'Layout' 332 subdir = 'Layout'
372 333
373 @classmethod 334 @classmethod
374 def ShouldDisable(cls, possible_browser): 335 def ShouldDisable(cls, possible_browser):
375 return cls.IsSvelte(possible_browser) # http://crbug.com/551950 336 return cls.IsSvelte(possible_browser) # http://crbug.com/551950
376 337
377 338
378 @benchmark.Owner(emails=['wangxianzhu@chromium.org']) 339 @benchmark.Owner(emails=['wangxianzhu@chromium.org'])
379 class BlinkPerfPaint(_BlinkPerfBenchmark): 340 class BlinkPerfPaint(_BlinkPerfBenchmark):
380 test = _BlinkPerfPaintMeasurement 341 class _PaintMeasurement(_BlinkPerfMeasurement):
342 trace_event_to_metric_map = {
343 'FrameView::prePaint' : 'prePaint',
nednguyen 2017/05/12 21:45:30 Besides the data continuity problem, do you think
344 'FrameView::paintTree' : 'paint',
345 }
346
347 test = _PaintMeasurement
381 tag = 'paint' 348 tag = 'paint'
382 subdir = 'Paint' 349 subdir = 'Paint'
383 350
384 @classmethod 351 @classmethod
385 def ShouldDisable(cls, possible_browser): 352 def ShouldDisable(cls, possible_browser):
386 return cls.IsSvelte(possible_browser) # http://crbug.com/574483 353 return cls.IsSvelte(possible_browser) # http://crbug.com/574483
387 354
388 355
389 @benchmark.Disabled('win') # crbug.com/488493 356 @benchmark.Disabled('win') # crbug.com/488493
390 @benchmark.Owner(emails=['yukishiino@chromium.org', 357 @benchmark.Owner(emails=['yukishiino@chromium.org',
(...skipping 11 matching lines...) Expand all
402 369
403 370
404 @benchmark.Owner(emails=['hayato@chromium.org']) 371 @benchmark.Owner(emails=['hayato@chromium.org'])
405 class BlinkPerfShadowDOM(_BlinkPerfBenchmark): 372 class BlinkPerfShadowDOM(_BlinkPerfBenchmark):
406 tag = 'shadow_dom' 373 tag = 'shadow_dom'
407 subdir = 'ShadowDOM' 374 subdir = 'ShadowDOM'
408 375
409 @classmethod 376 @classmethod
410 def ShouldDisable(cls, possible_browser): # http://crbug.com/702319 377 def ShouldDisable(cls, possible_browser): # http://crbug.com/702319
411 return possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X' 378 return possible_browser.platform.GetDeviceTypeName() == 'Nexus 5X'
OLDNEW
« no previous file with comments | « third_party/WebKit/PerformanceTests/Paint/transform-changes.html ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698