Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1091)

Side by Side Diff: infra/bots/recipes/swarm_trigger.py

Issue 2175373002: Move Skia recipes from build repo (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: Address more comments Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5
6 # Recipe module for Skia Swarming trigger.
7
8
9 import json
10
11
12 DEPS = [
13 'build/file',
14 'build/gsutil',
15 'depot_tools/depot_tools',
16 'depot_tools/git',
17 'depot_tools/tryserver',
18 'recipe_engine/path',
19 'recipe_engine/properties',
20 'recipe_engine/python',
21 'recipe_engine/raw_io',
22 'recipe_engine/step',
23 'recipe_engine/time',
24 'skia',
25 'skia_swarming',
26 ]
27
28
29 TEST_BUILDERS = {
30 'client.skia': {
31 'skiabot-linux-swarm-000': [
32 'Test-Ubuntu-GCC-ShuttleA-GPU-GTX550Ti-x86_64-Release-Valgrind',
33 'Test-Ubuntu-Clang-GCE-CPU-AVX2-x86_64-Coverage-Trybot',
34 'Build-Mac-Clang-x86_64-Release',
35 'Build-Ubuntu-GCC-Arm64-Debug-Android_Vulkan',
36 'Build-Ubuntu-GCC-x86_64-Debug',
37 'Build-Ubuntu-GCC-x86_64-Release-RemoteRun',
38 'Build-Ubuntu-GCC-x86_64-Release-Trybot',
39 'Build-Win-MSVC-x86_64-Release',
40 'Build-Win-MSVC-x86_64-Release-Vulkan',
41 'Housekeeper-PerCommit',
42 'Housekeeper-Nightly-RecreateSKPs_Canary',
43 'Infra-PerCommit',
44 'Perf-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Release-Trybot',
45 'Test-Android-GCC-Nexus7v2-GPU-Tegra3-Arm7-Release',
46 'Test-Android-GCC-NVIDIA_Shield-GPU-TegraX1-Arm64-Debug-Vulkan',
47 'Test-iOS-Clang-iPad4-GPU-SGX554-Arm7-Release',
48 'Test-Mac-Clang-MacMini6.2-CPU-AVX-x86_64-Release',
49 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug',
50 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-MSAN',
51 'Test-Win8-MSVC-ShuttleA-GPU-HD7770-x86_64-Release',
52 'Test-Win8-MSVC-ShuttleB-CPU-AVX2-x86_64-Release',
53 ],
54 },
55 }
56
57
58 def derive_compile_bot_name(builder_name, builder_spec):
59 builder_cfg = builder_spec['builder_cfg']
60 if builder_cfg['role'] == 'Housekeeper':
61 return 'Build-Ubuntu-GCC-x86_64-Release-Shared'
62 if builder_cfg['role'] in ('Test', 'Perf'):
63 os = builder_cfg['os']
64 extra_config = builder_cfg.get('extra_config')
65 if os == 'Android':
66 if extra_config == 'Vulkan':
67 extra_config = '%s_%s' % (os, 'Vulkan')
68 else:
69 extra_config = os
70 os = 'Ubuntu'
71 elif os == 'iOS':
72 extra_config = os
73 os = 'Mac'
74 elif 'Win' in os:
75 os = 'Win'
76 builder_name = 'Build-%s-%s-%s-%s' % (
77 os,
78 builder_cfg['compiler'],
79 builder_cfg['arch'],
80 builder_cfg['configuration']
81 )
82 if extra_config:
83 builder_name += '-%s' % extra_config
84 if builder_cfg['is_trybot']:
85 builder_name += '-Trybot'
86 return builder_name
87
88
89 def swarm_dimensions(builder_spec):
90 """Return a dict of keys and values to be used as Swarming bot dimensions."""
91 dimensions = {
92 'pool': 'Skia',
93 }
94 builder_cfg = builder_spec['builder_cfg']
95 dimensions['os'] = builder_cfg.get('os', 'Ubuntu')
96 if 'Win' in builder_cfg.get('os', ''):
97 dimensions['os'] = 'Windows'
98 if builder_cfg['role'] in ('Test', 'Perf'):
99 if 'Android' in builder_cfg['os']:
100 # For Android, the device type is a better dimension than CPU or GPU.
101 dimensions['device_type'] = builder_spec['product.board']
102 elif 'iOS' in builder_cfg['os']:
103 # For iOS, the device type is a better dimension than CPU or GPU.
104 dimensions['device'] = builder_spec['device_cfg']
105 # TODO(borenet): Replace this hack with something better.
106 dimensions['os'] = 'iOS-9.2'
107 elif builder_cfg['cpu_or_gpu'] == 'CPU':
108 dimensions['gpu'] = 'none'
109 dimensions['cpu'] = {
110 'AVX': 'x86-64',
111 'AVX2': 'x86-64-avx2',
112 'SSE4': 'x86-64',
113 }[builder_cfg['cpu_or_gpu_value']]
114 if ('Win' in builder_cfg['os'] and
115 builder_cfg['cpu_or_gpu_value'] == 'AVX2'):
116 # AVX2 is not correctly detected on Windows. Fall back on other
117 # dimensions to ensure that we correctly target machines which we know
118 # have AVX2 support.
119 dimensions['cpu'] = 'x86-64'
120 dimensions['os'] = 'Windows-2008ServerR2-SP1'
121 else:
122 dimensions['gpu'] = {
123 'GeForce320M': '10de:08a4',
124 'GT610': '10de:104a',
125 'GTX550Ti': '10de:1244',
126 'GTX660': '10de:11c0',
127 'GTX960': '10de:1401',
128 'HD4000': '8086:0a2e',
129 'HD4600': '8086:0412',
130 'HD7770': '1002:683d',
131 }[builder_cfg['cpu_or_gpu_value']]
132 else:
133 dimensions['gpu'] = 'none'
134 return dimensions
135
136
137 def fix_filemodes(api, path):
138 """Set all filemodes to 644 or 755 in the given directory path."""
139 api.python.inline(
140 name='fix filemodes',
141 program='''import os
142 for r, _, files in os.walk(os.getcwd()):
143 for fname in files:
144 f = os.path.join(r, fname)
145 if os.path.isfile(f):
146 if os.access(f, os.X_OK):
147 os.chmod(f, 0755)
148 else:
149 os.chmod(f, 0644)
150 ''',
151 cwd=path)
152
153
154 def trigger_task(api, task_name, builder, master, slave, buildnumber,
155 builder_spec, got_revision, infrabots_dir, idempotent=False,
156 store_output=True, extra_isolate_hashes=None, expiration=None,
157 hard_timeout=None, io_timeout=None, cipd_packages=None):
158 """Trigger the given bot to run as a Swarming task."""
159 # TODO(borenet): We're using Swarming directly to run the recipe through
160 # recipes.py. Once it's possible to track the state of a Buildbucket build,
161 # we should switch to use the trigger recipe module instead.
162
163 properties = {
164 'buildername': builder,
165 'mastername': master,
166 'buildnumber': buildnumber,
167 'reason': 'Triggered by Skia swarm_trigger Recipe',
168 'revision': got_revision,
169 'slavename': slave,
170 'swarm_out_dir': '${ISOLATED_OUTDIR}',
171 }
172 builder_cfg = builder_spec['builder_cfg']
173 if builder_cfg['is_trybot']:
174 properties['issue'] = str(api.properties['issue'])
175 properties['patchset'] = str(api.properties['patchset'])
176 properties['rietveld'] = api.properties['rietveld']
177
178 extra_args = [
179 '--workdir', '../../..',
180 'swarm_%s' % task_name,
181 ]
182 for k, v in properties.iteritems():
183 extra_args.append('%s=%s' % (k, v))
184
185 isolate_base_dir = api.path['slave_build']
186 dimensions = swarm_dimensions(builder_spec)
187 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps']
188 isolate_vars = {
189 'WORKDIR': api.path['slave_build'],
190 }
191
192 isolate_file = '%s_skia.isolate' % task_name
193 if 'Coverage' == builder_cfg.get('configuration'):
194 isolate_file = 'coverage_skia.isolate'
195 if 'RecreateSKPs' in builder:
196 isolate_file = 'compile_skia.isolate'
197 return api.skia_swarming.isolate_and_trigger_task(
198 infrabots_dir.join(isolate_file),
199 isolate_base_dir,
200 '%s_skia' % task_name,
201 isolate_vars,
202 dimensions,
203 isolate_blacklist=isolate_blacklist,
204 extra_isolate_hashes=extra_isolate_hashes,
205 idempotent=idempotent,
206 store_output=store_output,
207 extra_args=extra_args,
208 expiration=expiration,
209 hard_timeout=hard_timeout,
210 io_timeout=io_timeout,
211 cipd_packages=cipd_packages)
212
213
214 def checkout_steps(api):
215 """Run the steps to obtain a checkout of Skia."""
216 # In this case, we're already running inside a checkout of Skia, so just
217 # report the currently-checked-out commit.
218 checkout_path = api.path['root'].join('skia')
219 got_revision = api.git(
220 'rev-parse', 'HEAD', cwd=checkout_path,
221 stdout=api.raw_io.output(),
222 step_test_data=lambda: api.raw_io.test_api.stream_output('abc123\n'),
223 ).stdout.rstrip()
224 cmd = ['python', '-c', '"print \'%s\'"' % got_revision]
225 res = api.step('got_revision', cmd=cmd)
226 res.presentation.properties['got_revision'] = got_revision
227 api.path['checkout'] = checkout_path
228 fix_filemodes(api, api.path['checkout'])
229 return got_revision
230
231
232 def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir,
233 extra_isolate_hashes):
234 task = trigger_task(
235 api,
236 'housekeeper',
237 api.properties['buildername'],
238 api.properties['mastername'],
239 api.properties['slavename'],
240 api.properties['buildnumber'],
241 builder_spec,
242 got_revision,
243 infrabots_dir,
244 idempotent=False,
245 store_output=False,
246 extra_isolate_hashes=extra_isolate_hashes)
247 return api.skia_swarming.collect_swarming_task(task)
248
249
250 def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
251 extra_isolate_hashes):
252 task = trigger_task(
253 api,
254 'RecreateSKPs',
255 api.properties['buildername'],
256 api.properties['mastername'],
257 api.properties['slavename'],
258 api.properties['buildnumber'],
259 builder_spec,
260 got_revision,
261 infrabots_dir,
262 idempotent=False,
263 store_output=False,
264 extra_isolate_hashes=extra_isolate_hashes)
265 return api.skia_swarming.collect_swarming_task(task)
266
267
268 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes):
269 # Fake the builder spec.
270 builder_spec = {
271 'builder_cfg': {
272 'role': 'Infra',
273 'is_trybot': api.properties['buildername'].endswith('-Trybot'),
274 }
275 }
276 task = trigger_task(
277 api,
278 'infra',
279 api.properties['buildername'],
280 api.properties['mastername'],
281 api.properties['slavename'],
282 api.properties['buildnumber'],
283 builder_spec,
284 got_revision,
285 infrabots_dir,
286 idempotent=False,
287 store_output=False,
288 extra_isolate_hashes=extra_isolate_hashes)
289 return api.skia_swarming.collect_swarming_task(task)
290
291
292 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
293 extra_isolate_hashes, cipd_packages):
294 builder_name = derive_compile_bot_name(api.properties['buildername'],
295 builder_spec)
296 compile_builder_spec = builder_spec
297 if builder_name != api.properties['buildername']:
298 compile_builder_spec = api.skia.get_builder_spec(
299 api.path['slave_build'].join('skia'), builder_name)
300
301 extra_hashes = extra_isolate_hashes[:]
302
303 # Windows bots require a toolchain.
304 if 'Win' in builder_name:
305 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION')
306 version = api.skia._readfile(version_file,
307 name='read win_toolchain VERSION',
308 test_data='0').rstrip()
309 version = 'version:%s' % version
310 pkg = ('t', 'skia/bots/win_toolchain', version)
311 cipd_packages.append(pkg)
312
313 if 'Vulkan' in builder_name:
314 version_file = infrabots_dir.join('assets', 'win_vulkan_sdk', 'VERSION')
315 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk'))
316
317 # Fake these properties for compile tasks so that they can be de-duped.
318 master = 'client.skia.compile'
319 slave = 'skiabot-dummy-compile-slave'
320 buildnumber = 1
321
322 task = trigger_task(
323 api,
324 'compile',
325 builder_name,
326 master,
327 slave,
328 buildnumber,
329 compile_builder_spec,
330 got_revision,
331 infrabots_dir,
332 idempotent=True,
333 store_output=False,
334 extra_isolate_hashes=extra_hashes,
335 cipd_packages=cipd_packages)
336
337 # Wait for compile to finish, record the results hash.
338 return api.skia_swarming.collect_swarming_task_isolate_hash(task)
339
340
341 def get_timeouts(builder_cfg):
342 """Some builders require longer than the default timeouts.
343
344 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are
345 none then default timeouts should be used.
346 """
347 expiration = None
348 hard_timeout = None
349 io_timeout = None
350 if 'Valgrind' in builder_cfg.get('extra_config', ''):
351 expiration = 2*24*60*60
352 hard_timeout = 9*60*60
353 io_timeout = 60*60
354 return expiration, hard_timeout, io_timeout
355
356
357 def perf_steps_trigger(api, builder_spec, got_revision, infrabots_dir,
358 extra_hashes, cipd_packages):
359 """Trigger perf tests via Swarming."""
360
361 expiration, hard_timeout, io_timeout = get_timeouts(
362 builder_spec['builder_cfg'])
363 return trigger_task(
364 api,
365 'perf',
366 api.properties['buildername'],
367 api.properties['mastername'],
368 api.properties['slavename'],
369 api.properties['buildnumber'],
370 builder_spec,
371 got_revision,
372 infrabots_dir,
373 extra_isolate_hashes=extra_hashes,
374 expiration=expiration,
375 hard_timeout=hard_timeout,
376 io_timeout=io_timeout,
377 cipd_packages=cipd_packages)
378
379
380 def perf_steps_collect(api, task, upload_perf_results, got_revision,
381 is_trybot):
382 """Wait for perf steps to finish and upload results."""
383 # Wait for nanobench to finish, download the results.
384 api.skia.rmtree(task.task_output_dir)
385 api.skia_swarming.collect_swarming_task(task)
386
387 # Upload the results.
388 if upload_perf_results:
389 perf_data_dir = api.path['slave_build'].join(
390 'perfdata', api.properties['buildername'], 'data')
391 git_timestamp = api.git.get_timestamp(test_data='1408633190',
392 infra_step=True)
393 api.skia.rmtree(perf_data_dir)
394 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True)
395 src_results_file = task.task_output_dir.join(
396 '0', 'perfdata', api.properties['buildername'], 'data',
397 'nanobench_%s.json' % got_revision)
398 dst_results_file = perf_data_dir.join(
399 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
400 api.file.copy('perf_results', src_results_file, dst_results_file,
401 infra_step=True)
402
403 gsutil_path = api.depot_tools.gsutil_py_path
404 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
405 perf_data_dir, got_revision, gsutil_path]
406 if is_trybot:
407 upload_args.append(api.properties['issue'])
408 api.python(
409 'Upload perf results',
410 script=api.skia.resource('upload_bench_results.py'),
411 args=upload_args,
412 cwd=api.path['checkout'],
413 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
414 infra_step=True)
415
416
417 def test_steps_trigger(api, builder_spec, got_revision, infrabots_dir,
418 extra_hashes, cipd_packages):
419 """Trigger DM via Swarming."""
420 expiration, hard_timeout, io_timeout = get_timeouts(
421 builder_spec['builder_cfg'])
422 return trigger_task(
423 api,
424 'test',
425 api.properties['buildername'],
426 api.properties['mastername'],
427 api.properties['slavename'],
428 api.properties['buildnumber'],
429 builder_spec,
430 got_revision,
431 infrabots_dir,
432 extra_isolate_hashes=extra_hashes,
433 expiration=expiration,
434 hard_timeout=hard_timeout,
435 io_timeout=io_timeout,
436 cipd_packages=cipd_packages)
437
438
439 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
440 builder_cfg):
441 """Collect the test results from Swarming."""
442 # Wait for tests to finish, download the results.
443 api.skia.rmtree(task.task_output_dir)
444 api.skia_swarming.collect_swarming_task(task)
445
446 # Upload the results.
447 if upload_dm_results:
448 dm_dir = api.path['slave_build'].join('dm')
449 dm_src = task.task_output_dir.join('0', 'dm')
450 api.skia.rmtree(dm_dir)
451 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True)
452
453 # Upload them to Google Storage.
454 api.python(
455 'Upload DM Results',
456 script=api.skia.resource('upload_dm_results.py'),
457 args=[
458 dm_dir,
459 got_revision,
460 api.properties['buildername'],
461 api.properties['buildnumber'],
462 api.properties['issue'] if is_trybot else '',
463 api.path['slave_build'].join('skia', 'common', 'py', 'utils'),
464 ],
465 cwd=api.path['checkout'],
466 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
467 infra_step=True)
468
469 if builder_cfg['configuration'] == 'Coverage':
470 upload_coverage_results(api, task, got_revision, is_trybot)
471
472
473 def upload_coverage_results(api, task, got_revision, is_trybot):
474 results_dir = task.task_output_dir.join('0')
475 git_timestamp = api.git.get_timestamp(test_data='1408633190',
476 infra_step=True)
477
478 # Upload raw coverage data.
479 cov_file_basename = '%s.cov' % got_revision
480 cov_file = results_dir.join(cov_file_basename)
481 now = api.time.utcnow()
482 gs_json_path = '/'.join((
483 str(now.year).zfill(4), str(now.month).zfill(2),
484 str(now.day).zfill(2), str(now.hour).zfill(2),
485 api.properties['buildername'],
486 str(api.properties['buildnumber'])))
487 if is_trybot:
488 gs_json_path = '/'.join(('trybot', gs_json_path,
489 str(api.properties['issue'])))
490 api.gsutil.upload(
491 name='upload raw coverage data',
492 source=cov_file,
493 bucket='skia-infra',
494 dest='/'.join(('coverage-raw-v1', gs_json_path,
495 cov_file_basename)),
496 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None},
497 )
498
499 # Transform the nanobench_${git_hash}.json file received from swarming bot
500 # into the nanobench_${git_hash}_${timestamp}.json file
501 # upload_bench_results.py expects.
502 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision)
503 dst_nano_file = results_dir.join(
504 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
505 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file,
506 infra_step=True)
507 api.file.remove('old nanobench JSON', src_nano_file)
508
509 # Upload nanobench JSON data.
510 gsutil_path = api.depot_tools.gsutil_py_path
511 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
512 results_dir, got_revision, gsutil_path]
513 if is_trybot:
514 upload_args.append(api.properties['issue'])
515 api.python(
516 'upload nanobench coverage results',
517 script=api.skia.resource('upload_bench_results.py'),
518 args=upload_args,
519 cwd=api.path['checkout'],
520 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
521 infra_step=True)
522
523 # Transform the coverage_by_line_${git_hash}.json file received from
524 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file.
525 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision)
526 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % (
527 got_revision, git_timestamp)
528 dst_lbl_file = results_dir.join(dst_lbl_file_basename)
529 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file,
530 infra_step=True)
531 api.file.remove('old line-by-line coverage JSON', src_lbl_file)
532
533 # Upload line-by-line coverage data.
534 api.gsutil.upload(
535 name='upload line-by-line coverage data',
536 source=dst_lbl_file,
537 bucket='skia-infra',
538 dest='/'.join(('coverage-json-v1', gs_json_path,
539 dst_lbl_file_basename)),
540 env={'AWS_CREDENTIAL_FILE': None, 'BOTO_CONFIG': None},
541 )
542
543
544 def cipd_pkg(api, infrabots_dir, asset_name):
545 """Find and return the CIPD package info for the given asset."""
546 version_file = infrabots_dir.join('assets', asset_name, 'VERSION')
547 version = api.skia._readfile(version_file,
548 name='read %s VERSION' % asset_name,
549 test_data='0').rstrip()
550 version = 'version:%s' % version
551 return (asset_name, 'skia/bots/%s' % asset_name, version)
552
553
554 def RunSteps(api):
555 got_revision = checkout_steps(api)
556 infrabots_dir = api.path['checkout'].join('infra', 'bots')
557 api.skia_swarming.setup(
558 infrabots_dir.join('tools', 'luci-go'),
559 swarming_rev='')
560
561 # Run gsutil.py to ensure that it's installed.
562 api.gsutil(['help'])
563
564 extra_hashes = []
565
566 # Get ready to compile.
567 compile_cipd_deps = []
568 extra_compile_hashes = []
569
570 infrabots_dir = api.path['checkout'].join('infra', 'bots')
571 if 'Infra' in api.properties['buildername']:
572 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes)
573
574 builder_spec = api.skia.get_builder_spec(api.path['checkout'],
575 api.properties['buildername'])
576 builder_cfg = builder_spec['builder_cfg']
577
578 if 'RecreateSKPs' in api.properties['buildername']:
579 recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir,
580 extra_hashes)
581 return
582
583 # Android bots require an SDK.
584 if 'Android' in api.properties['buildername']:
585 compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk'))
586
587 # Compile.
588 do_compile_steps = builder_spec.get('do_compile_steps', True)
589 if do_compile_steps:
590 extra_hashes.append(compile_steps_swarm(
591 api, builder_spec, got_revision, infrabots_dir, extra_compile_hashes,
592 cipd_packages=compile_cipd_deps))
593
594 if builder_cfg['role'] == 'Housekeeper':
595 housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir,
596 extra_hashes)
597 return
598
599 # Get ready to test/perf.
600
601 # CIPD packages needed by test/perf.
602 cipd_packages = []
603
604 do_test_steps = builder_spec['do_test_steps']
605 do_perf_steps = builder_spec['do_perf_steps']
606
607 if not (do_test_steps or do_perf_steps):
608 return
609
610 # SKPs, SkImages.
611 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skp'))
612 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skimage'))
613
614 # Trigger test and perf tasks.
615 test_task = None
616 perf_task = None
617 if do_test_steps:
618 test_task = test_steps_trigger(api, builder_spec, got_revision,
619 infrabots_dir, extra_hashes, cipd_packages)
620 if do_perf_steps:
621 perf_task = perf_steps_trigger(api, builder_spec, got_revision,
622 infrabots_dir, extra_hashes, cipd_packages)
623 is_trybot = builder_cfg['is_trybot']
624 if test_task:
625 test_steps_collect(api, test_task, builder_spec['upload_dm_results'],
626 got_revision, is_trybot, builder_cfg)
627 if perf_task:
628 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'],
629 got_revision, is_trybot)
630
631
632 def test_for_bot(api, builder, mastername, slavename, testname=None):
633 """Generate a test for the given bot."""
634 testname = testname or builder
635 test = (
636 api.test(testname) +
637 api.properties(buildername=builder,
638 mastername=mastername,
639 slavename=slavename,
640 buildnumber=5,
641 path_config='kitchen',
642 revision='abc123')
643 )
644 paths = [
645 api.path['slave_build'].join('skia'),
646 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt'),
647 ]
648 if 'Trybot' in builder:
649 test += api.properties(issue=500,
650 patchset=1,
651 rietveld='https://codereview.chromium.org')
652 if 'Android' in builder:
653 paths.append(api.path['slave_build'].join(
654 'skia', 'infra', 'bots', 'assets', 'android_sdk', 'VERSION'))
655 if 'Test' in builder and 'Coverage' not in builder:
656 test += api.step_data(
657 'upload new .isolated file for test_skia',
658 stdout=api.raw_io.output('def456 XYZ.isolated'))
659 if ('Test' in builder and 'Debug' in builder) or 'Perf' in builder or (
660 'Valgrind' in builder and 'Test' in builder):
661 test += api.step_data(
662 'upload new .isolated file for perf_skia',
663 stdout=api.raw_io.output('def456 XYZ.isolated'))
664 if 'Housekeeper' in builder and 'RecreateSKPs' not in builder:
665 test += api.step_data(
666 'upload new .isolated file for housekeeper_skia',
667 stdout=api.raw_io.output('def456 XYZ.isolated'))
668 if 'Win' in builder:
669 paths.append(api.path['slave_build'].join(
670 'skia', 'infra', 'bots', 'assets', 'win_toolchain', 'VERSION'))
671 paths.append(api.path['slave_build'].join(
672 'skia', 'infra', 'bots', 'assets', 'win_vulkan_sdk', 'VERSION'))
673 paths.append(api.path['slave_build'].join(
674 'skia', 'infra', 'bots', 'assets', 'skimage', 'VERSION'))
675 paths.append(api.path['slave_build'].join(
676 'skia', 'infra', 'bots', 'assets', 'skp', 'VERSION'))
677
678 test += api.path.exists(*paths)
679
680 return test
681
682
683 def GenTests(api):
684 for mastername, slaves in TEST_BUILDERS.iteritems():
685 for slavename, builders_by_slave in slaves.iteritems():
686 for builder in builders_by_slave:
687 yield test_for_bot(api, builder, mastername, slavename)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698