| Index: infra/bots/recipes/swarm_perf.py
|
| diff --git a/infra/bots/recipes/swarm_perf.py b/infra/bots/recipes/swarm_perf.py
|
| index 93332b707fc455e470deb7a0c9208ff3915c10e3..f2ea06ce87806f605a4317a4daa8efe469e290ea 100644
|
| --- a/infra/bots/recipes/swarm_perf.py
|
| +++ b/infra/bots/recipes/swarm_perf.py
|
| @@ -7,6 +7,7 @@
|
|
|
|
|
| DEPS = [
|
| + 'build/file',
|
| 'core',
|
| 'recipe_engine/json',
|
| 'recipe_engine/path',
|
| @@ -14,6 +15,8 @@ DEPS = [
|
| 'recipe_engine/properties',
|
| 'recipe_engine/raw_io',
|
| 'run',
|
| + 'flavor',
|
| + 'vars',
|
| ]
|
|
|
|
|
| @@ -31,10 +34,85 @@ TEST_BUILDERS = {
|
| }
|
|
|
|
|
| +def perf_steps(api):
|
| + """Run Skia benchmarks."""
|
| + if api.vars.upload_perf_results:
|
| + api.flavor.create_clean_device_dir(
|
| + api.flavor.device_dirs.perf_data_dir)
|
| +
|
| + # Run nanobench.
|
| + properties = [
|
| + '--properties',
|
| + 'gitHash', api.vars.got_revision,
|
| + 'build_number', api.vars.build_number,
|
| + ]
|
| + if api.vars.is_trybot:
|
| + properties.extend([
|
| + 'issue', api.vars.issue,
|
| + 'patchset', api.vars.patchset,
|
| + ])
|
| +
|
| + target = 'nanobench'
|
| + if 'VisualBench' in api.vars.builder_name:
|
| + target = 'visualbench'
|
| + args = [
|
| + target,
|
| + '--undefok', # This helps branches that may not know new flags.
|
| + '-i', api.flavor.device_dirs.resource_dir,
|
| + '--skps', api.flavor.device_dirs.skp_dir,
|
| + '--images', api.flavor.device_path_join(
|
| + api.flavor.device_dirs.images_dir, 'nanobench'),
|
| + ]
|
| +
|
| + skip_flag = None
|
| + if api.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
|
| + skip_flag = '--nogpu'
|
| + elif api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
|
| + skip_flag = '--nocpu'
|
| + if skip_flag:
|
| + args.append(skip_flag)
|
| + args.extend(api.vars.nanobench_flags)
|
| +
|
| + if api.vars.upload_perf_results:
|
| + json_path = api.flavor.device_path_join(
|
| + api.flavor.device_dirs.perf_data_dir,
|
| + 'nanobench_%s.json' % api.vars.got_revision)
|
| + args.extend(['--outResultsFile', json_path])
|
| + args.extend(properties)
|
| +
|
| + keys_blacklist = ['configuration', 'role', 'is_trybot']
|
| + args.append('--key')
|
| + for k in sorted(api.vars.builder_cfg.keys()):
|
| + if not k in keys_blacklist:
|
| + args.extend([k, api.vars.builder_cfg[k]])
|
| +
|
| + api.run(api.flavor.step, target, cmd=args,
|
| + abort_on_failure=False,
|
| + env=api.vars.default_env)
|
| +
|
| + # See skia:2789.
|
| + if ('Valgrind' in api.vars.builder_name and
|
| + api.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
|
| + abandonGpuContext = list(args)
|
| + abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
|
| + api.run(api.flavor.step,
|
| + '%s --abandonGpuContext' % target,
|
| + cmd=abandonGpuContext, abort_on_failure=False,
|
| + env=api.vars.default_env)
|
| +
|
| + # Copy results to swarming out dir.
|
| + if api.vars.upload_perf_results:
|
| + api.file.makedirs('perf_dir', api.vars.perf_data_dir)
|
| + api.flavor.copy_directory_contents_to_host(
|
| + api.flavor.device_dirs.perf_data_dir,
|
| + api.vars.perf_data_dir)
|
| +
|
| +
|
| def RunSteps(api):
|
| api.core.setup()
|
| - api.core.perf_steps()
|
| - api.core.cleanup_steps()
|
| + api.flavor.install()
|
| + perf_steps(api)
|
| + api.flavor.cleanup_steps()
|
| api.run.check_failure()
|
|
|
|
|
|
|