Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(208)

Side by Side Diff: infra/bots/recipes/swarm_trigger.py

Issue 2175373002: Move Skia recipes from build repo (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: Fixes, add simulation test to presubmit Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5
6 # Recipe module for Skia Swarming trigger.
7
8
9 import json
10
11
12 DEPS = [
13 'build/file',
14 'build/gsutil',
15 'depot_tools/depot_tools',
16 'depot_tools/gclient',
17 'depot_tools/git',
18 'depot_tools/tryserver',
19 'recipe_engine/path',
20 'recipe_engine/properties',
21 'recipe_engine/python',
22 'recipe_engine/raw_io',
23 'recipe_engine/step',
24 'recipe_engine/time',
25 'skia',
26 'skia_swarming',
27 ]
28
29
30 TEST_BUILDERS = {
31 'client.skia': {
32 'skiabot-linux-swarm-000': [
33 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind',
34 'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot',
35 'Build-Mac-Clang-x86_64-Release',
36 'Build-Ubuntu-GCC-Arm64-Debug-Android_Vulkan',
37 'Build-Ubuntu-GCC-x86_64-Debug',
38 'Build-Ubuntu-GCC-x86_64-Release-RemoteRun',
39 'Build-Ubuntu-GCC-x86_64-Release-Trybot',
40 'Build-Win-MSVC-x86_64-Release',
41 'Build-Win-MSVC-x86_64-Release-Vulkan',
42 'Housekeeper-PerCommit',
43 'Housekeeper-Nightly-RecreateSKPs_Canary',
44 'Infra-PerCommit',
45 'Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot',
46 'Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release',
47 'Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan',
48 'Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release',
49 'Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release',
50 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug',
51 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-MSAN',
52 'Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release',
53 'Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release',
54 ],
55 },
56 }
57
58
59 def derive_compile_bot_name(builder_name, builder_spec):
60 builder_cfg = builder_spec['builder_cfg']
61 if builder_cfg['role'] == 'Housekeeper':
62 return 'Build-Ubuntu-GCC-x86_64-Release-Shared'
63 if builder_cfg['role'] in ('Test', 'Perf'):
64 os = builder_cfg['os']
65 extra_config = builder_cfg.get('extra_config')
66 if os == 'Android':
67 if extra_config == 'Vulkan':
68 extra_config = '%s_%s' % (os, 'Vulkan')
69 else:
70 extra_config = os
71 os = 'Ubuntu'
72 elif os == 'iOS':
73 extra_config = os
74 os = 'Mac'
75 elif 'Win' in os:
76 os = 'Win'
77 builder_name = 'Build-%s-%s-%s-%s' % (
78 os,
79 builder_cfg['compiler'],
80 builder_cfg['arch'],
81 builder_cfg['configuration']
82 )
83 if extra_config:
84 builder_name += '-%s' % extra_config
85 if builder_cfg['is_trybot']:
86 builder_name += '-Trybot'
87 return builder_name
88
89
90 def swarm_dimensions(builder_spec):
91 """Return a dict of keys and values to be used as Swarming bot dimensions."""
92 dimensions = {
93 'pool': 'Skia',
94 }
95 builder_cfg = builder_spec['builder_cfg']
96 dimensions['os'] = builder_cfg.get('os', 'Ubuntu')
97 if 'Win' in builder_cfg.get('os', ''):
98 dimensions['os'] = 'Windows'
99 if builder_cfg['role'] in ('Test', 'Perf'):
100 if 'Android' in builder_cfg['os']:
101 # For Android, the device type is a better dimension than CPU or GPU.
102 dimensions['device_type'] = builder_spec['product.board']
103 elif 'iOS' in builder_cfg['os']:
104 # For iOS, the device type is a better dimension than CPU or GPU.
105 dimensions['device'] = builder_spec['device_cfg']
106 # TODO(borenet): Replace this hack with something better.
107 dimensions['os'] = 'iOS-9.2'
108 elif builder_cfg['cpu_or_gpu'] == 'CPU':
109 dimensions['gpu'] = 'none'
110 dimensions['cpu'] = {
111 'AVX': 'x86-64',
112 'AVX2': 'x86-64-avx2',
113 'SSE4': 'x86-64',
114 }[builder_cfg['cpu_or_gpu_value']]
115 if ('Win' in builder_cfg['os'] and
116 builder_cfg['cpu_or_gpu_value'] == 'AVX2'):
117 # AVX2 is not correctly detected on Windows. Fall back on other
118 # dimensions to ensure that we correctly target machines which we know
119 # have AVX2 support.
120 dimensions['cpu'] = 'x86-64'
121 dimensions['os'] = 'Windows-2008ServerR2-SP1'
122 else:
123 dimensions['gpu'] = {
124 'GeForce320M': '10de:08a4',
125 'GT610': '10de:104a',
126 'GTX550Ti': '10de:1244',
127 'GTX660': '10de:11c0',
128 'GTX960': '10de:1401',
129 'HD4000': '8086:0a2e',
130 'HD4600': '8086:0412',
131 'HD7770': '1002:683d',
132 }[builder_cfg['cpu_or_gpu_value']]
133 else:
134 dimensions['gpu'] = 'none'
135 return dimensions
136
137
138 def fix_filemodes(api, path):
139 """Set all filemodes to 644 or 755 in the given directory path."""
140 api.python.inline(
141 name='fix filemodes',
142 program='''import os
143 for r, _, files in os.walk(os.getcwd()):
144 for fname in files:
145 f = os.path.join(r, fname)
146 if os.path.isfile(f):
147 if os.access(f, os.X_OK):
148 os.chmod(f, 0755)
149 else:
150 os.chmod(f, 0644)
151 ''',
152 cwd=path)
153
154
155 def trigger_task(api, task_name, builder, master, slave, buildnumber,
156 builder_spec, got_revision, infrabots_dir, idempotent=False,
157 store_output=True, extra_isolate_hashes=None, expiration=None,
158 hard_timeout=None, io_timeout=None, cipd_packages=None):
159 """Trigger the given bot to run as a Swarming task."""
160 # TODO(borenet): We're using Swarming directly to run the recipe through
161 # recipes.py. Once it's possible to track the state of a Buildbucket build,
162 # we should switch to use the trigger recipe module instead.
163
164 properties = {
165 'buildername': builder,
166 'mastername': master,
167 'buildnumber': buildnumber,
168 'reason': 'Triggered by Skia swarm_trigger Recipe',
169 'revision': got_revision,
170 'slavename': slave,
171 'swarm_out_dir': '${ISOLATED_OUTDIR}',
172 }
173 builder_cfg = builder_spec['builder_cfg']
174 if builder_cfg['is_trybot']:
175 properties['issue'] = str(api.properties['issue'])
176 properties['patchset'] = str(api.properties['patchset'])
177 properties['rietveld'] = api.properties['rietveld']
178
179 extra_args = [
180 '--workdir', '../../..',
181 'swarm_%s' % task_name,
182 ]
183 for k, v in properties.iteritems():
184 extra_args.append('%s=%s' % (k, v))
185
186 isolate_base_dir = api.path['slave_build']
187 dimensions = swarm_dimensions(builder_spec)
188 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps']
189 isolate_vars = {
190 'WORKDIR': api.path['slave_build'],
191 }
192
193 isolate_file = '%s_skia.isolate' % task_name
194 if 'Coverage' == builder_cfg.get('configuration'):
195 isolate_file = 'coverage_skia.isolate'
196 if 'RecreateSKPs' in builder:
197 isolate_file = 'compile_skia.isolate'
198 return api.skia_swarming.isolate_and_trigger_task(
199 infrabots_dir.join(isolate_file),
200 isolate_base_dir,
201 '%s_skia' % task_name,
202 isolate_vars,
203 dimensions,
204 isolate_blacklist=isolate_blacklist,
205 extra_isolate_hashes=extra_isolate_hashes,
206 idempotent=idempotent,
207 store_output=store_output,
208 extra_args=extra_args,
209 expiration=expiration,
210 hard_timeout=hard_timeout,
211 io_timeout=io_timeout,
212 cipd_packages=cipd_packages)
213
214
215 def checkout_steps(api):
216 """Run the steps to obtain a checkout of Skia."""
217 # In this case, we're already running inside a checkout of Skia, so just
218 # report the currently-checked-out commit.
219 checkout_path = api.path['root'].join('skia')
220 got_revision = api.git(
221 'rev-parse', 'HEAD', cwd=checkout_path,
222 stdout=api.raw_io.output(),
223 step_test_data=lambda: api.raw_io.test_api.stream_output('abc123\n'),
224 ).stdout.rstrip()
225 cmd = ['python', '-c', '"print \'%s\'"' % got_revision]
226 res = api.step('got_revision', cmd=cmd)
227 res.presentation.properties['got_revision'] = got_revision
228 api.path['checkout'] = checkout_path
229 fix_filemodes(api, api.path['checkout'])
230 return got_revision
231
232
233 def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir,
234 extra_isolate_hashes):
235 task = trigger_task(
236 api,
237 'housekeeper',
238 api.properties['buildername'],
239 api.properties['mastername'],
240 api.properties['slavename'],
241 api.properties['buildnumber'],
242 builder_spec,
243 got_revision,
244 infrabots_dir,
245 idempotent=False,
246 store_output=False,
247 extra_isolate_hashes=extra_isolate_hashes)
248 return api.skia_swarming.collect_swarming_task(task)
249
250
251 def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
252 extra_isolate_hashes):
253 task = trigger_task(
254 api,
255 'RecreateSKPs',
256 api.properties['buildername'],
257 api.properties['mastername'],
258 api.properties['slavename'],
259 api.properties['buildnumber'],
260 builder_spec,
261 got_revision,
262 infrabots_dir,
263 idempotent=False,
264 store_output=False,
265 extra_isolate_hashes=extra_isolate_hashes)
266 return api.skia_swarming.collect_swarming_task(task)
267
268
269 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes):
270 # Fake the builder spec.
271 builder_spec = {
272 'builder_cfg': {
273 'role': 'Infra',
274 'is_trybot': api.properties['buildername'].endswith('-Trybot'),
275 }
276 }
277 task = trigger_task(
278 api,
279 'infra',
280 api.properties['buildername'],
281 api.properties['mastername'],
282 api.properties['slavename'],
283 api.properties['buildnumber'],
284 builder_spec,
285 got_revision,
286 infrabots_dir,
287 idempotent=False,
288 store_output=False,
289 extra_isolate_hashes=extra_isolate_hashes)
290 return api.skia_swarming.collect_swarming_task(task)
291
292
293 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
294 extra_isolate_hashes, cipd_packages):
295 builder_name = derive_compile_bot_name(api.properties['buildername'],
296 builder_spec)
297 compile_builder_spec = builder_spec
298 if builder_name != api.properties['buildername']:
299 compile_builder_spec = api.skia.get_builder_spec(
300 api.path['slave_build'].join('skia'), builder_name)
301
302 extra_hashes = extra_isolate_hashes[:]
303
304 # Windows bots require a toolchain.
305 if 'Win' in builder_name:
306 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION')
307 version = api.skia._readfile(version_file,
308 name='read win_toolchain VERSION',
309 test_data='0').rstrip()
310 version = 'version:%s' % version
311 pkg = ('t', 'skia/bots/win_toolchain', version)
312 cipd_packages.append(pkg)
313
314 if 'Vulkan' in builder_name:
315 version_file = infrabots_dir.join('assets', 'win_vulkan_sdk', 'VERSION')
316 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk'))
317
318 # Fake these properties for compile tasks so that they can be de-duped.
319 master = 'client.skia.compile'
320 slave = 'skiabot-dummy-compile-slave'
321 buildnumber = 1
322
323 task = trigger_task(
324 api,
325 'compile',
326 builder_name,
327 master,
328 slave,
329 buildnumber,
330 compile_builder_spec,
331 got_revision,
332 infrabots_dir,
333 idempotent=True,
334 store_output=False,
335 extra_isolate_hashes=extra_hashes,
336 cipd_packages=cipd_packages)
337
338 # Wait for compile to finish, record the results hash.
339 return api.skia_swarming.collect_swarming_task_isolate_hash(task)
340
341
342 def get_timeouts(builder_cfg):
343 """Some builders require longer than the default timeouts.
344
345 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are
346 none then default timeouts should be used.
347 """
348 expiration = None
349 hard_timeout = None
350 io_timeout = None
351 if 'Valgrind' in builder_cfg.get('extra_config', ''):
352 expiration = 2*24*60*60
353 hard_timeout = 9*60*60
354 io_timeout = 60*60
355 return expiration, hard_timeout, io_timeout
356
357
358 def perf_steps_trigger(api, builder_spec, got_revision, infrabots_dir,
359 extra_hashes, cipd_packages):
360 """Trigger perf tests via Swarming."""
361
362 expiration, hard_timeout, io_timeout = get_timeouts(
363 builder_spec['builder_cfg'])
364 return trigger_task(
365 api,
366 'perf',
367 api.properties['buildername'],
368 api.properties['mastername'],
369 api.properties['slavename'],
370 api.properties['buildnumber'],
371 builder_spec,
372 got_revision,
373 infrabots_dir,
374 extra_isolate_hashes=extra_hashes,
375 expiration=expiration,
376 hard_timeout=hard_timeout,
377 io_timeout=io_timeout,
378 cipd_packages=cipd_packages)
379
380
381 def perf_steps_collect(api, task, upload_perf_results, got_revision,
382 is_trybot):
383 """Wait for perf steps to finish and upload results."""
384 # Wait for nanobench to finish, download the results.
385 api.skia.rmtree(task.task_output_dir)
386 api.skia_swarming.collect_swarming_task(task)
387
388 # Upload the results.
389 if upload_perf_results:
390 perf_data_dir = api.path['slave_build'].join(
391 'perfdata', api.properties['buildername'], 'data')
392 git_timestamp = api.git.get_timestamp(test_data='1408633190',
393 infra_step=True)
394 api.skia.rmtree(perf_data_dir)
395 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True)
396 src_results_file = task.task_output_dir.join(
397 '0', 'perfdata', api.properties['buildername'], 'data',
398 'nanobench_%s.json' % got_revision)
399 dst_results_file = perf_data_dir.join(
400 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
401 api.file.copy('perf_results', src_results_file, dst_results_file,
402 infra_step=True)
403
404 gsutil_path = api.depot_tools.gsutil_py_path
405 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
406 perf_data_dir, got_revision, gsutil_path]
407 if is_trybot:
408 upload_args.append(api.properties['issue'])
409 api.python(
410 'Upload perf results',
411 script=api.skia.resource('upload_bench_results.py'),
412 args=upload_args,
413 cwd=api.path['checkout'],
414 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
415 infra_step=True)
416
417
418 def test_steps_trigger(api, builder_spec, got_revision, infrabots_dir,
419 extra_hashes, cipd_packages):
420 """Trigger DM via Swarming."""
421 expiration, hard_timeout, io_timeout = get_timeouts(
422 builder_spec['builder_cfg'])
423 return trigger_task(
424 api,
425 'test',
426 api.properties['buildername'],
427 api.properties['mastername'],
428 api.properties['slavename'],
429 api.properties['buildnumber'],
430 builder_spec,
431 got_revision,
432 infrabots_dir,
433 extra_isolate_hashes=extra_hashes,
434 expiration=expiration,
435 hard_timeout=hard_timeout,
436 io_timeout=io_timeout,
437 cipd_packages=cipd_packages)
438
439
440 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
441 builder_cfg):
442 """Collect the test results from Swarming."""
443 # Wait for tests to finish, download the results.
444 api.skia.rmtree(task.task_output_dir)
445 api.skia_swarming.collect_swarming_task(task)
446
447 # Upload the results.
448 if upload_dm_results:
449 dm_dir = api.path['slave_build'].join('dm')
450 dm_src = task.task_output_dir.join('0', 'dm')
451 api.skia.rmtree(dm_dir)
452 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True)
453
454 # Upload them to Google Storage.
455 api.python(
456 'Upload DM Results',
457 script=api.skia.resource('upload_dm_results.py'),
458 args=[
459 dm_dir,
460 got_revision,
461 api.properties['buildername'],
462 api.properties['buildnumber'],
463 api.properties['issue'] if is_trybot else '',
464 api.path['slave_build'].join('skia', 'common', 'py', 'utils'),
465 ],
466 cwd=api.path['checkout'],
467 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
468 infra_step=True)
469
470 if builder_cfg['configuration'] == 'Coverage':
471 upload_coverage_results(api, task, got_revision, is_trybot)
472
473
474 def upload_coverage_results(api, task, got_revision, is_trybot):
475 results_dir = task.task_output_dir.join('0')
476 git_timestamp = api.git.get_timestamp(test_data='1408633190',
477 infra_step=True)
478
479 # Upload raw coverage data.
480 cov_file_basename = '%s.cov' % got_revision
481 cov_file = results_dir.join(cov_file_basename)
482 now = api.time.utcnow()
483 gs_json_path = '/'.join((
484 str(now.year).zfill(4), str(now.month).zfill(2),
485 str(now.day).zfill(2), str(now.hour).zfill(2),
486 api.properties['buildername'],
487 str(api.properties['buildnumber'])))
488 if is_trybot:
489 gs_json_path = '/'.join(('trybot', gs_json_path,
490 str(api.properties['issue'])))
491 api.gsutil.upload(
492 name='upload raw coverage data',
493 source=cov_file,
494 bucket='skia-infra',
495 dest='/'.join(('coverage-raw-v1', gs_json_path,
496 cov_file_basename)),
497 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None},
498 )
499
500 # Transform the nanobench_${git_hash}.json file received from swarming bot
501 # into the nanobench_${git_hash}_${timestamp}.json file
502 # upload_bench_results.py expects.
503 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision)
504 dst_nano_file = results_dir.join(
505 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
506 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file,
507 infra_step=True)
508 api.file.remove('old nanobench JSON', src_nano_file)
509
510 # Upload nanobench JSON data.
511 gsutil_path = api.depot_tools.gsutil_py_path
512 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
513 results_dir, got_revision, gsutil_path]
514 if is_trybot:
515 upload_args.append(api.properties['issue'])
516 api.python(
517 'upload nanobench coverage results',
518 script=api.skia.resource('upload_bench_results.py'),
519 args=upload_args,
520 cwd=api.path['checkout'],
521 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
522 infra_step=True)
523
524 # Transform the coverage_by_line_${git_hash}.json file received from
525 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file.
526 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision)
527 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % (
528 got_revision, git_timestamp)
529 dst_lbl_file = results_dir.join(dst_lbl_file_basename)
530 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file,
531 infra_step=True)
532 api.file.remove('old line-by-line coverage JSON', src_lbl_file)
533
534 # Upload line-by-line coverage data.
535 api.gsutil.upload(
536 name='upload line-by-line coverage data',
537 source=dst_lbl_file,
538 bucket='skia-infra',
539 dest='/'.join(('coverage-json-v1', gs_json_path,
540 dst_lbl_file_basename)),
541 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None},
542 )
543
544
545 def cipd_pkg(api, infrabots_dir, asset_name):
546 """Find and return the CIPD package info for the given asset."""
547 version_file = infrabots_dir.join('assets', asset_name, 'VERSION')
548 version = api.skia._readfile(version_file,
549 name='read %s VERSION' % asset_name,
550 test_data='0').rstrip()
551 version = 'version:%s' % version
552 return (asset_name, 'skia/bots/%s' % asset_name, version)
553
554
555 def RunSteps(api):
556 got_revision = checkout_steps(api)
557 infrabots_dir = api.path['checkout'].join('infra', 'bots')
558 api.skia_swarming.setup(
559 infrabots_dir.join('tools', 'luci-go'),
560 swarming_rev='')
561
562 # Run gsutil.py to ensure that it's installed.
563 api.gsutil(['help'])
564
565 extra_hashes = []
566
567 # Get ready to compile.
568 compile_cipd_deps = []
569 extra_compile_hashes = []
570
571 infrabots_dir = api.path['checkout'].join('infra', 'bots')
572 if 'Infra' in api.properties['buildername']:
573 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes)
574
575 builder_spec = api.skia.get_builder_spec(api.path['checkout'],
576 api.properties['buildername'])
577 builder_cfg = builder_spec['builder_cfg']
578
579 if 'RecreateSKPs' in api.properties['buildername']:
580 recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
581 extra_hashes)
582 return
583
584 # Android bots require an SDK.
585 if 'Android' in api.properties['buildername']:
586 compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk'))
587
588 # Compile.
589 do_compile_steps = builder_spec.get('do_compile_steps', True)
590 if do_compile_steps:
591 extra_hashes.append(compile_steps_swarm(
592 api, builder_spec, got_revision, infrabots_dir, extra_compile_hashes,
593 cipd_packages=compile_cipd_deps))
594
595 if builder_cfg['role'] == 'Housekeeper':
596 housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir,
597 extra_hashes)
598 return
599
600 # Get ready to test/perf.
601
602 # CIPD packages needed by test/perf.
603 cipd_packages = []
604
605 do_test_steps = builder_spec['do_test_steps']
606 do_perf_steps = builder_spec['do_perf_steps']
607
608 if not (do_test_steps or do_perf_steps):
609 return
610
611 # SKPs, SkImages.
612 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skp'))
613 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skimage'))
614
615 # Trigger test and perf tasks.
616 test_task = None
617 perf_task = None
618 if do_test_steps:
619 test_task = test_steps_trigger(api, builder_spec, got_revision,
620 infrabots_dir, extra_hashes, cipd_packages)
621 if do_perf_steps:
622 perf_task = perf_steps_trigger(api, builder_spec, got_revision,
623 infrabots_dir, extra_hashes, cipd_packages)
624 is_trybot = builder_cfg['is_trybot']
625 if test_task:
626 test_steps_collect(api, test_task, builder_spec['upload_dm_results'],
627 got_revision, is_trybot, builder_cfg)
628 if perf_task:
629 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'],
630 got_revision, is_trybot)
631
632
633 def test_for_bot(api, builder, mastername, slavename, testname=None):
634 """Generate a test for the given bot."""
635 testname = testname or builder
636 test = (
637 api.test(testname) +
638 api.properties(buildername=builder,
639 mastername=mastername,
640 slavename=slavename,
641 buildnumber=5,
642 path_config='kitchen',
643 revision='abc123')
644 )
645 paths = [
646 api.path['slave_build'].join('skia'),
647 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt'),
648 ]
649 if 'Trybot' in builder:
650 test += api.properties(issue=500,
651 patchset=1,
652 rietveld='https://codereview.chromium.org')
653 if 'Android' in builder:
654 paths.append(api.path['slave_build'].join(
655 'skia', 'infra', 'bots', 'assets', 'android_sdk', 'VERSION'))
656 if 'Test' in builder and 'Coverage' not in builder:
657 test += api.step_data(
658 'upload new .isolated file for test_skia',
659 stdout=api.raw_io.output('def456 XYZ.isolated'))
660 if ('Test' in builder and 'Debug' in builder) or 'Perf' in builder or (
661 'Valgrind' in builder and 'Test' in builder):
662 test += api.step_data(
663 'upload new .isolated file for perf_skia',
664 stdout=api.raw_io.output('def456 XYZ.isolated'))
665 if 'Housekeeper' in builder and 'RecreateSKPs' not in builder:
666 test += api.step_data(
667 'upload new .isolated file for housekeeper_skia',
668 stdout=api.raw_io.output('def456 XYZ.isolated'))
669 if 'Win' in builder:
670 paths.append(api.path['slave_build'].join(
671 'skia', 'infra', 'bots', 'assets', 'win_toolchain', 'VERSION'))
672 paths.append(api.path['slave_build'].join(
673 'skia', 'infra', 'bots', 'assets', 'win_vulkan_sdk', 'VERSION'))
674 paths.append(api.path['slave_build'].join(
675 'skia', 'infra', 'bots', 'assets', 'skimage', 'VERSION'))
676 paths.append(api.path['slave_build'].join(
677 'skia', 'infra', 'bots', 'assets', 'skp', 'VERSION'))
678
679 test += api.path.exists(*paths)
680
681 return test
682
683
684 def GenTests(api):
685 for mastername, slaves in TEST_BUILDERS.iteritems():
686 for slavename, builders_by_slave in slaves.iteritems():
687 for builder in builders_by_slave:
688 yield test_for_bot(api, builder, mastername, slavename)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698