Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(117)

Side by Side Diff: scripts/slave/recipes/skia/swarm_trigger.py

Issue 1919193002: build: roll infra_paths changes (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: merge Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 5
6 # Recipe module for Skia Swarming trigger. 6 # Recipe module for Skia Swarming trigger.
7 7
8 8
9 import json 9 import json
10 10
11 11
12 DEPS = [ 12 DEPS = [
13 'depot_tools/gclient', 13 'depot_tools/gclient',
14 'depot_tools/git', 14 'depot_tools/git',
15 'depot_tools/infra_paths',
15 'depot_tools/tryserver', 16 'depot_tools/tryserver',
16 'file', 17 'file',
17 'gsutil', 18 'gsutil',
18 'recipe_engine/path', 19 'recipe_engine/path',
19 'recipe_engine/properties', 20 'recipe_engine/properties',
20 'recipe_engine/python', 21 'recipe_engine/python',
21 'recipe_engine/raw_io', 22 'recipe_engine/raw_io',
22 'recipe_engine/time', 23 'recipe_engine/time',
23 'skia', 24 'skia',
24 'skia_swarming', 25 'skia_swarming',
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
111 else: 112 else:
112 dimensions['gpu'] = 'none' 113 dimensions['gpu'] = 'none'
113 return dimensions 114 return dimensions
114 115
115 116
116 def isolate_recipes(api): 117 def isolate_recipes(api):
117 """Isolate the recipes.""" 118 """Isolate the recipes."""
118 # This directory tends to be missing for some reason. 119 # This directory tends to be missing for some reason.
119 api.file.makedirs( 120 api.file.makedirs(
120 'third_party_infra', 121 'third_party_infra',
121 api.path['build'].join('third_party', 'infra'), 122 api.infra_paths['build'].join('third_party', 'infra'),
122 infra_step=True) 123 infra_step=True)
123 skia_recipes_dir = api.path['build'].join( 124 skia_recipes_dir = api.infra_paths['build'].join(
124 'scripts', 'slave', 'recipes', 'skia') 125 'scripts', 'slave', 'recipes', 'skia')
125 api.skia_swarming.create_isolated_gen_json( 126 api.skia_swarming.create_isolated_gen_json(
126 skia_recipes_dir.join('swarm_recipe.isolate'), 127 skia_recipes_dir.join('swarm_recipe.isolate'),
127 skia_recipes_dir, 128 skia_recipes_dir,
128 'linux', 129 'linux',
129 'isolate_recipes', 130 'isolate_recipes',
130 {}) 131 {})
131 return api.skia_swarming.batcharchive(['isolate_recipes'])[0][1] 132 return api.skia_swarming.batcharchive(['isolate_recipes'])[0][1]
132 133
133 134
(...skipping 21 matching lines...) Expand all
155 properties['patchset'] = str(api.properties['patchset']) 156 properties['patchset'] = str(api.properties['patchset'])
156 properties['rietveld'] = api.properties['rietveld'] 157 properties['rietveld'] = api.properties['rietveld']
157 158
158 extra_args = [ 159 extra_args = [
159 '--workdir', '../../..', 160 '--workdir', '../../..',
160 'skia/swarm_%s' % task_name, 161 'skia/swarm_%s' % task_name,
161 ] 162 ]
162 for k, v in properties.iteritems(): 163 for k, v in properties.iteritems():
163 extra_args.append('%s=%s' % (k, v)) 164 extra_args.append('%s=%s' % (k, v))
164 165
165 isolate_base_dir = api.path['slave_build'] 166 isolate_base_dir = api.infra_paths['slave_build']
166 dimensions = swarm_dimensions(builder_spec) 167 dimensions = swarm_dimensions(builder_spec)
167 isolate_blacklist = ['.git', 'out', '*.pyc'] 168 isolate_blacklist = ['.git', 'out', '*.pyc']
168 isolate_vars = { 169 isolate_vars = {
169 'BUILD': api.path['build'], 170 'BUILD': api.infra_paths['build'],
170 'WORKDIR': api.path['slave_build'], 171 'WORKDIR': api.infra_paths['slave_build'],
171 } 172 }
172 173
173 isolate_file = '%s_skia.isolate' % task_name 174 isolate_file = '%s_skia.isolate' % task_name
174 if 'Coverage' == builder_cfg['configuration']: 175 if 'Coverage' == builder_cfg['configuration']:
175 isolate_file = 'coverage_skia.isolate' 176 isolate_file = 'coverage_skia.isolate'
176 return api.skia_swarming.isolate_and_trigger_task( 177 return api.skia_swarming.isolate_and_trigger_task(
177 infrabots_dir.join(isolate_file), 178 infrabots_dir.join(isolate_file),
178 isolate_base_dir, 179 isolate_base_dir,
179 '%s_skia' % task_name, 180 '%s_skia' % task_name,
180 isolate_vars, 181 isolate_vars,
(...skipping 28 matching lines...) Expand all
209 return got_revision 210 return got_revision
210 211
211 212
212 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir, 213 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
213 extra_isolate_hashes): 214 extra_isolate_hashes):
214 builder_name = derive_compile_bot_name(api.properties['buildername'], 215 builder_name = derive_compile_bot_name(api.properties['buildername'],
215 builder_spec) 216 builder_spec)
216 compile_builder_spec = builder_spec 217 compile_builder_spec = builder_spec
217 if builder_name != api.properties['buildername']: 218 if builder_name != api.properties['buildername']:
218 compile_builder_spec = api.skia.get_builder_spec( 219 compile_builder_spec = api.skia.get_builder_spec(
219 api.path['slave_build'].join('skia'), builder_name) 220 api.infra_paths['slave_build'].join('skia'), builder_name)
220 # Windows bots require a toolchain. 221 # Windows bots require a toolchain.
221 extra_hashes = extra_isolate_hashes[:] 222 extra_hashes = extra_isolate_hashes[:]
222 if 'Win' in builder_name: 223 if 'Win' in builder_name:
223 test_data = '''{ 224 test_data = '''{
224 "2013": "705384d88f80da637eb367e5acc6f315c0e1db2f", 225 "2013": "705384d88f80da637eb367e5acc6f315c0e1db2f",
225 "2015": "38380d77eec9164e5818ae45e2915a6f22d60e85" 226 "2015": "38380d77eec9164e5818ae45e2915a6f22d60e85"
226 }''' 227 }'''
227 hash_file = infrabots_dir.join('win_toolchain_hash.json') 228 hash_file = infrabots_dir.join('win_toolchain_hash.json')
228 j = api.skia._readfile(hash_file, 229 j = api.skia._readfile(hash_file,
229 name='Read win_toolchain_hash.json', 230 name='Read win_toolchain_hash.json',
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
285 286
286 def perf_steps_collect(api, task, upload_perf_results, got_revision, 287 def perf_steps_collect(api, task, upload_perf_results, got_revision,
287 is_trybot): 288 is_trybot):
288 """Wait for perf steps to finish and upload results.""" 289 """Wait for perf steps to finish and upload results."""
289 # Wait for nanobench to finish, download the results. 290 # Wait for nanobench to finish, download the results.
290 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) 291 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True)
291 api.skia_swarming.collect_swarming_task(task) 292 api.skia_swarming.collect_swarming_task(task)
292 293
293 # Upload the results. 294 # Upload the results.
294 if upload_perf_results: 295 if upload_perf_results:
295 perf_data_dir = api.path['slave_build'].join( 296 perf_data_dir = api.infra_paths['slave_build'].join(
296 'perfdata', api.properties['buildername'], 'data') 297 'perfdata', api.properties['buildername'], 'data')
297 git_timestamp = api.git.get_timestamp(test_data='1408633190', 298 git_timestamp = api.git.get_timestamp(test_data='1408633190',
298 infra_step=True) 299 infra_step=True)
299 api.file.rmtree('perf_dir', perf_data_dir, infra_step=True) 300 api.file.rmtree('perf_dir', perf_data_dir, infra_step=True)
300 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) 301 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True)
301 src_results_file = task.task_output_dir.join( 302 src_results_file = task.task_output_dir.join(
302 '0', 'perfdata', api.properties['buildername'], 'data', 303 '0', 'perfdata', api.properties['buildername'], 'data',
303 'nanobench_%s.json' % got_revision) 304 'nanobench_%s.json' % got_revision)
304 dst_results_file = perf_data_dir.join( 305 dst_results_file = perf_data_dir.join(
305 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) 306 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
306 api.file.copy('perf_results', src_results_file, dst_results_file, 307 api.file.copy('perf_results', src_results_file, dst_results_file,
307 infra_step=True) 308 infra_step=True)
308 309
309 gsutil_path = api.path['depot_tools'].join( 310 gsutil_path = api.infra_paths['depot_tools'].join(
310 'third_party', 'gsutil', 'gsutil') 311 'third_party', 'gsutil', 'gsutil')
311 upload_args = [api.properties['buildername'], api.properties['buildnumber'], 312 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
312 perf_data_dir, got_revision, gsutil_path] 313 perf_data_dir, got_revision, gsutil_path]
313 if is_trybot: 314 if is_trybot:
314 upload_args.append(api.properties['issue']) 315 upload_args.append(api.properties['issue'])
315 api.python( 316 api.python(
316 'Upload perf results', 317 'Upload perf results',
317 script=api.skia.resource('upload_bench_results.py'), 318 script=api.skia.resource('upload_bench_results.py'),
318 args=upload_args, 319 args=upload_args,
319 cwd=api.path['checkout'], 320 cwd=api.path['checkout'],
(...skipping 21 matching lines...) Expand all
341 342
342 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, 343 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
343 builder_cfg): 344 builder_cfg):
344 """Collect the test results from Swarming.""" 345 """Collect the test results from Swarming."""
345 # Wait for tests to finish, download the results. 346 # Wait for tests to finish, download the results.
346 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) 347 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True)
347 api.skia_swarming.collect_swarming_task(task) 348 api.skia_swarming.collect_swarming_task(task)
348 349
349 # Upload the results. 350 # Upload the results.
350 if upload_dm_results: 351 if upload_dm_results:
351 dm_dir = api.path['slave_build'].join('dm') 352 dm_dir = api.infra_paths['slave_build'].join('dm')
352 dm_src = task.task_output_dir.join('0', 'dm') 353 dm_src = task.task_output_dir.join('0', 'dm')
353 api.file.rmtree('dm_dir', dm_dir, infra_step=True) 354 api.file.rmtree('dm_dir', dm_dir, infra_step=True)
354 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) 355 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True)
355 356
356 # Upload them to Google Storage. 357 # Upload them to Google Storage.
357 api.python( 358 api.python(
358 'Upload DM Results', 359 'Upload DM Results',
359 script=api.skia.resource('upload_dm_results.py'), 360 script=api.skia.resource('upload_dm_results.py'),
360 args=[ 361 args=[
361 dm_dir, 362 dm_dir,
362 got_revision, 363 got_revision,
363 api.properties['buildername'], 364 api.properties['buildername'],
364 api.properties['buildnumber'], 365 api.properties['buildnumber'],
365 api.properties['issue'] if is_trybot else '', 366 api.properties['issue'] if is_trybot else '',
366 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), 367 api.infra_paths['slave_build'].join('skia', 'common', 'py', 'utils'),
367 ], 368 ],
368 cwd=api.path['checkout'], 369 cwd=api.path['checkout'],
369 env=api.skia.gsutil_env('chromium-skia-gm.boto'), 370 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
370 infra_step=True) 371 infra_step=True)
371 372
372 if builder_cfg['configuration'] == 'Coverage': 373 if builder_cfg['configuration'] == 'Coverage':
373 upload_coverage_results(api, task, got_revision, is_trybot) 374 upload_coverage_results(api, task, got_revision, is_trybot)
374 375
375 376
376 def upload_coverage_results(api, task, got_revision, is_trybot): 377 def upload_coverage_results(api, task, got_revision, is_trybot):
(...skipping 26 matching lines...) Expand all
403 # into the nanobench_${git_hash}_${timestamp}.json file 404 # into the nanobench_${git_hash}_${timestamp}.json file
404 # upload_bench_results.py expects. 405 # upload_bench_results.py expects.
405 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision) 406 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision)
406 dst_nano_file = results_dir.join( 407 dst_nano_file = results_dir.join(
407 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) 408 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
408 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file, 409 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file,
409 infra_step=True) 410 infra_step=True)
410 api.file.remove('old nanobench JSON', src_nano_file) 411 api.file.remove('old nanobench JSON', src_nano_file)
411 412
412 # Upload nanobench JSON data. 413 # Upload nanobench JSON data.
413 gsutil_path = api.path['depot_tools'].join( 414 gsutil_path = api.infra_paths['depot_tools'].join(
414 'third_party', 'gsutil', 'gsutil') 415 'third_party', 'gsutil', 'gsutil')
415 upload_args = [api.properties['buildername'], api.properties['buildnumber'], 416 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
416 results_dir, got_revision, gsutil_path] 417 results_dir, got_revision, gsutil_path]
417 if is_trybot: 418 if is_trybot:
418 upload_args.append(api.properties['issue']) 419 upload_args.append(api.properties['issue'])
419 api.python( 420 api.python(
420 'upload nanobench coverage results', 421 'upload nanobench coverage results',
421 script=api.skia.resource('upload_bench_results.py'), 422 script=api.skia.resource('upload_bench_results.py'),
422 args=upload_args, 423 args=upload_args,
423 cwd=api.path['checkout'], 424 cwd=api.path['checkout'],
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
467 do_test_steps = builder_spec['do_test_steps'] 468 do_test_steps = builder_spec['do_test_steps']
468 do_perf_steps = builder_spec['do_perf_steps'] 469 do_perf_steps = builder_spec['do_perf_steps']
469 470
470 if not (do_test_steps or do_perf_steps): 471 if not (do_test_steps or do_perf_steps):
471 return 472 return
472 473
473 extra_hashes = [recipes_hash] 474 extra_hashes = [recipes_hash]
474 if compile_hash: 475 if compile_hash:
475 extra_hashes.append(compile_hash) 476 extra_hashes.append(compile_hash)
476 477
477 api.skia.download_skps(api.path['slave_build'].join('tmp'), 478 api.skia.download_skps(api.infra_paths['slave_build'].join('tmp'),
478 api.path['slave_build'].join('skps'), 479 api.infra_paths['slave_build'].join('skps'),
479 False) 480 False)
480 api.skia.download_images(api.path['slave_build'].join('tmp'), 481 api.skia.download_images(api.infra_paths['slave_build'].join('tmp'),
481 api.path['slave_build'].join('images'), 482 api.infra_paths['slave_build'].join('images'),
482 False) 483 False)
483 484
484 test_task = None 485 test_task = None
485 perf_task = None 486 perf_task = None
486 if do_test_steps: 487 if do_test_steps:
487 test_task = test_steps_trigger(api, builder_spec, got_revision, 488 test_task = test_steps_trigger(api, builder_spec, got_revision,
488 infrabots_dir, extra_hashes) 489 infrabots_dir, extra_hashes)
489 if do_perf_steps: 490 if do_perf_steps:
490 perf_task = perf_steps_trigger(api, builder_spec, got_revision, 491 perf_task = perf_steps_trigger(api, builder_spec, got_revision,
491 infrabots_dir, extra_hashes) 492 infrabots_dir, extra_hashes)
492 is_trybot = builder_cfg['is_trybot'] 493 is_trybot = builder_cfg['is_trybot']
493 if test_task: 494 if test_task:
494 test_steps_collect(api, test_task, builder_spec['upload_dm_results'], 495 test_steps_collect(api, test_task, builder_spec['upload_dm_results'],
495 got_revision, is_trybot, builder_cfg) 496 got_revision, is_trybot, builder_cfg)
496 if perf_task: 497 if perf_task:
497 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'], 498 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'],
498 got_revision, is_trybot) 499 got_revision, is_trybot)
499 500
500 501
501 def test_for_bot(api, builder, mastername, slavename, testname=None): 502 def test_for_bot(api, builder, mastername, slavename, testname=None):
502 """Generate a test for the given bot.""" 503 """Generate a test for the given bot."""
503 testname = testname or builder 504 testname = testname or builder
504 test = ( 505 test = (
505 api.test(testname) + 506 api.test(testname) +
506 api.properties(buildername=builder, 507 api.properties(buildername=builder,
507 mastername=mastername, 508 mastername=mastername,
508 slavename=slavename, 509 slavename=slavename,
509 buildnumber=5, 510 buildnumber=5,
510 revision='abc123') + 511 revision='abc123') +
511 api.path.exists( 512 api.infra_paths.exists(
512 api.path['slave_build'].join('skia'), 513 api.infra_paths['slave_build'].join('skia'),
513 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') 514 api.infra_paths['slave_build'].join('tmp', 'uninteresting_hashes.txt')
514 ) 515 )
515 ) 516 )
516 if 'Trybot' in builder: 517 if 'Trybot' in builder:
517 test += api.properties(issue=500, 518 test += api.properties(issue=500,
518 patchset=1, 519 patchset=1,
519 rietveld='https://codereview.chromium.org') 520 rietveld='https://codereview.chromium.org')
520 if 'Coverage' not in builder: 521 if 'Coverage' not in builder:
521 test += api.step_data( 522 test += api.step_data(
522 'upload new .isolated file for compile_skia', 523 'upload new .isolated file for compile_skia',
523 stdout=api.raw_io.output('def456 XYZ.isolated')) 524 stdout=api.raw_io.output('def456 XYZ.isolated'))
(...skipping 14 matching lines...) Expand all
538 for mastername, slaves in TEST_BUILDERS.iteritems(): 539 for mastername, slaves in TEST_BUILDERS.iteritems():
539 for slavename, builders_by_slave in slaves.iteritems(): 540 for slavename, builders_by_slave in slaves.iteritems():
540 for builder in builders_by_slave: 541 for builder in builders_by_slave:
541 yield test_for_bot(api, builder, mastername, slavename) 542 yield test_for_bot(api, builder, mastername, slavename)
542 543
543 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming' 544 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming'
544 master = 'client.skia' 545 master = 'client.skia'
545 slave = 'skiabot-linux-test-000' 546 slave = 'skiabot-linux-test-000'
546 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION') 547 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION')
547 test += api.step_data('Get downloaded SKP_VERSION', retcode=1) 548 test += api.step_data('Get downloaded SKP_VERSION', retcode=1)
548 test += api.path.exists( 549 test += api.infra_paths.exists(
549 api.path['slave_build'].join('skia'), 550 api.infra_paths['slave_build'].join('skia'),
550 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt') 551 api.infra_paths['slave_build'].join('tmp', 'uninteresting_hashes.txt')
551 ) 552 )
552 yield test 553 yield test
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698