Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(335)

Side by Side Diff: scripts/slave/recipes/skia/swarm_trigger.py

Issue 1917243002: Revert "build: roll infra_paths changes" (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 5
6 # Recipe module for Skia Swarming trigger. 6 # Recipe module for Skia Swarming trigger.
7 7
8 8
9 import json 9 import json
10 10
11 11
12 DEPS = [ 12 DEPS = [
13 'depot_tools/gclient', 13 'depot_tools/gclient',
14 'depot_tools/git', 14 'depot_tools/git',
15 'depot_tools/infra_paths',
16 'depot_tools/tryserver', 15 'depot_tools/tryserver',
17 'file', 16 'file',
18 'gsutil', 17 'gsutil',
19 'recipe_engine/path', 18 'recipe_engine/path',
20 'recipe_engine/properties', 19 'recipe_engine/properties',
21 'recipe_engine/python', 20 'recipe_engine/python',
22 'recipe_engine/raw_io', 21 'recipe_engine/raw_io',
23 'recipe_engine/time', 22 'recipe_engine/time',
24 'skia', 23 'skia',
25 'skia_swarming', 24 'skia_swarming',
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
112 else: 111 else:
113 dimensions['gpu'] = 'none' 112 dimensions['gpu'] = 'none'
114 return dimensions 113 return dimensions
115 114
116 115
117 def isolate_recipes(api): 116 def isolate_recipes(api):
118 """Isolate the recipes.""" 117 """Isolate the recipes."""
119 # This directory tends to be missing for some reason. 118 # This directory tends to be missing for some reason.
120 api.file.makedirs( 119 api.file.makedirs(
121 'third_party_infra', 120 'third_party_infra',
122 api.infra_paths['build'].join('third_party', 'infra'), 121 api.path['build'].join('third_party', 'infra'),
123 infra_step=True) 122 infra_step=True)
124 skia_recipes_dir = api.infra_paths['build'].join( 123 skia_recipes_dir = api.path['build'].join(
125 'scripts', 'slave', 'recipes', 'skia') 124 'scripts', 'slave', 'recipes', 'skia')
126 api.skia_swarming.create_isolated_gen_json( 125 api.skia_swarming.create_isolated_gen_json(
127 skia_recipes_dir.join('swarm_recipe.isolate'), 126 skia_recipes_dir.join('swarm_recipe.isolate'),
128 skia_recipes_dir, 127 skia_recipes_dir,
129 'linux', 128 'linux',
130 'isolate_recipes', 129 'isolate_recipes',
131 {}) 130 {})
132 return api.skia_swarming.batcharchive(['isolate_recipes'])[0][1] 131 return api.skia_swarming.batcharchive(['isolate_recipes'])[0][1]
133 132
134 133
(...skipping 21 matching lines...) Expand all
156 properties['patchset'] = str(api.properties['patchset']) 155 properties['patchset'] = str(api.properties['patchset'])
157 properties['rietveld'] = api.properties['rietveld'] 156 properties['rietveld'] = api.properties['rietveld']
158 157
159 extra_args = [ 158 extra_args = [
160 '--workdir', '../../..', 159 '--workdir', '../../..',
161 'skia/swarm_%s' % task_name, 160 'skia/swarm_%s' % task_name,
162 ] 161 ]
163 for k, v in properties.iteritems(): 162 for k, v in properties.iteritems():
164 extra_args.append('%s=%s' % (k, v)) 163 extra_args.append('%s=%s' % (k, v))
165 164
166 isolate_base_dir = api.infra_paths['slave_build'] 165 isolate_base_dir = api.path['slave_build']
167 dimensions = swarm_dimensions(builder_spec) 166 dimensions = swarm_dimensions(builder_spec)
168 isolate_blacklist = ['.git', 'out', '*.pyc'] 167 isolate_blacklist = ['.git', 'out', '*.pyc']
169 isolate_vars = { 168 isolate_vars = {
170 'BUILD': api.infra_paths['build'], 169 'BUILD': api.path['build'],
171 'WORKDIR': api.infra_paths['slave_build'], 170 'WORKDIR': api.path['slave_build'],
172 } 171 }
173 172
174 isolate_file = '%s_skia.isolate' % task_name 173 isolate_file = '%s_skia.isolate' % task_name
175 if 'Coverage' == builder_cfg['configuration']: 174 if 'Coverage' == builder_cfg['configuration']:
176 isolate_file = 'coverage_skia.isolate' 175 isolate_file = 'coverage_skia.isolate'
177 return api.skia_swarming.isolate_and_trigger_task( 176 return api.skia_swarming.isolate_and_trigger_task(
178 infrabots_dir.join(isolate_file), 177 infrabots_dir.join(isolate_file),
179 isolate_base_dir, 178 isolate_base_dir,
180 '%s_skia' % task_name, 179 '%s_skia' % task_name,
181 isolate_vars, 180 isolate_vars,
(...skipping 28 matching lines...) Expand all
210 return got_revision 209 return got_revision
211 210
212 211
213 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir, 212 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir,
214 extra_isolate_hashes): 213 extra_isolate_hashes):
215 builder_name = derive_compile_bot_name(api.properties['buildername'], 214 builder_name = derive_compile_bot_name(api.properties['buildername'],
216 builder_spec) 215 builder_spec)
217 compile_builder_spec = builder_spec 216 compile_builder_spec = builder_spec
218 if builder_name != api.properties['buildername']: 217 if builder_name != api.properties['buildername']:
219 compile_builder_spec = api.skia.get_builder_spec( 218 compile_builder_spec = api.skia.get_builder_spec(
220 api.infra_paths['slave_build'].join('skia'), builder_name) 219 api.path['slave_build'].join('skia'), builder_name)
221 # Windows bots require a toolchain. 220 # Windows bots require a toolchain.
222 extra_hashes = extra_isolate_hashes[:] 221 extra_hashes = extra_isolate_hashes[:]
223 if 'Win' in builder_name: 222 if 'Win' in builder_name:
224 test_data = '''{ 223 test_data = '''{
225 "2013": "705384d88f80da637eb367e5acc6f315c0e1db2f", 224 "2013": "705384d88f80da637eb367e5acc6f315c0e1db2f",
226 "2015": "38380d77eec9164e5818ae45e2915a6f22d60e85" 225 "2015": "38380d77eec9164e5818ae45e2915a6f22d60e85"
227 }''' 226 }'''
228 hash_file = infrabots_dir.join('win_toolchain_hash.json') 227 hash_file = infrabots_dir.join('win_toolchain_hash.json')
229 j = api.skia._readfile(hash_file, 228 j = api.skia._readfile(hash_file,
230 name='Read win_toolchain_hash.json', 229 name='Read win_toolchain_hash.json',
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after
286 285
287 def perf_steps_collect(api, task, upload_perf_results, got_revision, 286 def perf_steps_collect(api, task, upload_perf_results, got_revision,
288 is_trybot): 287 is_trybot):
289 """Wait for perf steps to finish and upload results.""" 288 """Wait for perf steps to finish and upload results."""
290 # Wait for nanobench to finish, download the results. 289 # Wait for nanobench to finish, download the results.
291 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) 290 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True)
292 api.skia_swarming.collect_swarming_task(task) 291 api.skia_swarming.collect_swarming_task(task)
293 292
294 # Upload the results. 293 # Upload the results.
295 if upload_perf_results: 294 if upload_perf_results:
296 perf_data_dir = api.infra_paths['slave_build'].join( 295 perf_data_dir = api.path['slave_build'].join(
297 'perfdata', api.properties['buildername'], 'data') 296 'perfdata', api.properties['buildername'], 'data')
298 git_timestamp = api.git.get_timestamp(test_data='1408633190', 297 git_timestamp = api.git.get_timestamp(test_data='1408633190',
299 infra_step=True) 298 infra_step=True)
300 api.file.rmtree('perf_dir', perf_data_dir, infra_step=True) 299 api.file.rmtree('perf_dir', perf_data_dir, infra_step=True)
301 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) 300 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True)
302 src_results_file = task.task_output_dir.join( 301 src_results_file = task.task_output_dir.join(
303 '0', 'perfdata', api.properties['buildername'], 'data', 302 '0', 'perfdata', api.properties['buildername'], 'data',
304 'nanobench_%s.json' % got_revision) 303 'nanobench_%s.json' % got_revision)
305 dst_results_file = perf_data_dir.join( 304 dst_results_file = perf_data_dir.join(
306 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) 305 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
307 api.file.copy('perf_results', src_results_file, dst_results_file, 306 api.file.copy('perf_results', src_results_file, dst_results_file,
308 infra_step=True) 307 infra_step=True)
309 308
310 gsutil_path = api.infra_paths['depot_tools'].join( 309 gsutil_path = api.path['depot_tools'].join(
311 'third_party', 'gsutil', 'gsutil') 310 'third_party', 'gsutil', 'gsutil')
312 upload_args = [api.properties['buildername'], api.properties['buildnumber'], 311 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
313 perf_data_dir, got_revision, gsutil_path] 312 perf_data_dir, got_revision, gsutil_path]
314 if is_trybot: 313 if is_trybot:
315 upload_args.append(api.properties['issue']) 314 upload_args.append(api.properties['issue'])
316 api.python( 315 api.python(
317 'Upload perf results', 316 'Upload perf results',
318 script=api.skia.resource('upload_bench_results.py'), 317 script=api.skia.resource('upload_bench_results.py'),
319 args=upload_args, 318 args=upload_args,
320 cwd=api.path['checkout'], 319 cwd=api.path['checkout'],
(...skipping 21 matching lines...) Expand all
342 341
343 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, 342 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot,
344 builder_cfg): 343 builder_cfg):
345 """Collect the test results from Swarming.""" 344 """Collect the test results from Swarming."""
346 # Wait for tests to finish, download the results. 345 # Wait for tests to finish, download the results.
347 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True) 346 api.file.rmtree('results_dir', task.task_output_dir, infra_step=True)
348 api.skia_swarming.collect_swarming_task(task) 347 api.skia_swarming.collect_swarming_task(task)
349 348
350 # Upload the results. 349 # Upload the results.
351 if upload_dm_results: 350 if upload_dm_results:
352 dm_dir = api.infra_paths['slave_build'].join('dm') 351 dm_dir = api.path['slave_build'].join('dm')
353 dm_src = task.task_output_dir.join('0', 'dm') 352 dm_src = task.task_output_dir.join('0', 'dm')
354 api.file.rmtree('dm_dir', dm_dir, infra_step=True) 353 api.file.rmtree('dm_dir', dm_dir, infra_step=True)
355 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) 354 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True)
356 355
357 # Upload them to Google Storage. 356 # Upload them to Google Storage.
358 api.python( 357 api.python(
359 'Upload DM Results', 358 'Upload DM Results',
360 script=api.skia.resource('upload_dm_results.py'), 359 script=api.skia.resource('upload_dm_results.py'),
361 args=[ 360 args=[
362 dm_dir, 361 dm_dir,
363 got_revision, 362 got_revision,
364 api.properties['buildername'], 363 api.properties['buildername'],
365 api.properties['buildnumber'], 364 api.properties['buildnumber'],
366 api.properties['issue'] if is_trybot else '', 365 api.properties['issue'] if is_trybot else '',
367 api.infra_paths['slave_build'].join('skia', 'common', 'py', 'utils'), 366 api.path['slave_build'].join('skia', 'common', 'py', 'utils'),
368 ], 367 ],
369 cwd=api.path['checkout'], 368 cwd=api.path['checkout'],
370 env=api.skia.gsutil_env('chromium-skia-gm.boto'), 369 env=api.skia.gsutil_env('chromium-skia-gm.boto'),
371 infra_step=True) 370 infra_step=True)
372 371
373 if builder_cfg['configuration'] == 'Coverage': 372 if builder_cfg['configuration'] == 'Coverage':
374 upload_coverage_results(api, task, got_revision, is_trybot) 373 upload_coverage_results(api, task, got_revision, is_trybot)
375 374
376 375
377 def upload_coverage_results(api, task, got_revision, is_trybot): 376 def upload_coverage_results(api, task, got_revision, is_trybot):
(...skipping 26 matching lines...) Expand all
404 # into the nanobench_${git_hash}_${timestamp}.json file 403 # into the nanobench_${git_hash}_${timestamp}.json file
405 # upload_bench_results.py expects. 404 # upload_bench_results.py expects.
406 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision) 405 src_nano_file = results_dir.join('nanobench_%s.json' % got_revision)
407 dst_nano_file = results_dir.join( 406 dst_nano_file = results_dir.join(
408 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) 407 'nanobench_%s_%s.json' % (got_revision, git_timestamp))
409 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file, 408 api.file.copy('nanobench JSON', src_nano_file, dst_nano_file,
410 infra_step=True) 409 infra_step=True)
411 api.file.remove('old nanobench JSON', src_nano_file) 410 api.file.remove('old nanobench JSON', src_nano_file)
412 411
413 # Upload nanobench JSON data. 412 # Upload nanobench JSON data.
414 gsutil_path = api.infra_paths['depot_tools'].join( 413 gsutil_path = api.path['depot_tools'].join(
415 'third_party', 'gsutil', 'gsutil') 414 'third_party', 'gsutil', 'gsutil')
416 upload_args = [api.properties['buildername'], api.properties['buildnumber'], 415 upload_args = [api.properties['buildername'], api.properties['buildnumber'],
417 results_dir, got_revision, gsutil_path] 416 results_dir, got_revision, gsutil_path]
418 if is_trybot: 417 if is_trybot:
419 upload_args.append(api.properties['issue']) 418 upload_args.append(api.properties['issue'])
420 api.python( 419 api.python(
421 'upload nanobench coverage results', 420 'upload nanobench coverage results',
422 script=api.skia.resource('upload_bench_results.py'), 421 script=api.skia.resource('upload_bench_results.py'),
423 args=upload_args, 422 args=upload_args,
424 cwd=api.path['checkout'], 423 cwd=api.path['checkout'],
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
468 do_test_steps = builder_spec['do_test_steps'] 467 do_test_steps = builder_spec['do_test_steps']
469 do_perf_steps = builder_spec['do_perf_steps'] 468 do_perf_steps = builder_spec['do_perf_steps']
470 469
471 if not (do_test_steps or do_perf_steps): 470 if not (do_test_steps or do_perf_steps):
472 return 471 return
473 472
474 extra_hashes = [recipes_hash] 473 extra_hashes = [recipes_hash]
475 if compile_hash: 474 if compile_hash:
476 extra_hashes.append(compile_hash) 475 extra_hashes.append(compile_hash)
477 476
478 api.skia.download_skps(api.infra_paths['slave_build'].join('tmp'), 477 api.skia.download_skps(api.path['slave_build'].join('tmp'),
479 api.infra_paths['slave_build'].join('skps'), 478 api.path['slave_build'].join('skps'),
480 False) 479 False)
481 api.skia.download_images(api.infra_paths['slave_build'].join('tmp'), 480 api.skia.download_images(api.path['slave_build'].join('tmp'),
482 api.infra_paths['slave_build'].join('images'), 481 api.path['slave_build'].join('images'),
483 False) 482 False)
484 483
485 test_task = None 484 test_task = None
486 perf_task = None 485 perf_task = None
487 if do_test_steps: 486 if do_test_steps:
488 test_task = test_steps_trigger(api, builder_spec, got_revision, 487 test_task = test_steps_trigger(api, builder_spec, got_revision,
489 infrabots_dir, extra_hashes) 488 infrabots_dir, extra_hashes)
490 if do_perf_steps: 489 if do_perf_steps:
491 perf_task = perf_steps_trigger(api, builder_spec, got_revision, 490 perf_task = perf_steps_trigger(api, builder_spec, got_revision,
492 infrabots_dir, extra_hashes) 491 infrabots_dir, extra_hashes)
493 is_trybot = builder_cfg['is_trybot'] 492 is_trybot = builder_cfg['is_trybot']
494 if test_task: 493 if test_task:
495 test_steps_collect(api, test_task, builder_spec['upload_dm_results'], 494 test_steps_collect(api, test_task, builder_spec['upload_dm_results'],
496 got_revision, is_trybot, builder_cfg) 495 got_revision, is_trybot, builder_cfg)
497 if perf_task: 496 if perf_task:
498 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'], 497 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'],
499 got_revision, is_trybot) 498 got_revision, is_trybot)
500 499
501 500
502 def test_for_bot(api, builder, mastername, slavename, testname=None): 501 def test_for_bot(api, builder, mastername, slavename, testname=None):
503 """Generate a test for the given bot.""" 502 """Generate a test for the given bot."""
504 testname = testname or builder 503 testname = testname or builder
505 test = ( 504 test = (
506 api.test(testname) + 505 api.test(testname) +
507 api.properties(buildername=builder, 506 api.properties(buildername=builder,
508 mastername=mastername, 507 mastername=mastername,
509 slavename=slavename, 508 slavename=slavename,
510 buildnumber=5, 509 buildnumber=5,
511 revision='abc123') + 510 revision='abc123') +
512 api.infra_paths.exists( 511 api.path.exists(
513 api.infra_paths['slave_build'].join('skia'), 512 api.path['slave_build'].join('skia'),
514 api.infra_paths['slave_build'].join('tmp', 'uninteresting_hashes.txt') 513 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
515 ) 514 )
516 ) 515 )
517 if 'Trybot' in builder: 516 if 'Trybot' in builder:
518 test += api.properties(issue=500, 517 test += api.properties(issue=500,
519 patchset=1, 518 patchset=1,
520 rietveld='https://codereview.chromium.org') 519 rietveld='https://codereview.chromium.org')
521 if 'Coverage' not in builder: 520 if 'Coverage' not in builder:
522 test += api.step_data( 521 test += api.step_data(
523 'upload new .isolated file for compile_skia', 522 'upload new .isolated file for compile_skia',
524 stdout=api.raw_io.output('def456 XYZ.isolated')) 523 stdout=api.raw_io.output('def456 XYZ.isolated'))
(...skipping 14 matching lines...) Expand all
539 for mastername, slaves in TEST_BUILDERS.iteritems(): 538 for mastername, slaves in TEST_BUILDERS.iteritems():
540 for slavename, builders_by_slave in slaves.iteritems(): 539 for slavename, builders_by_slave in slaves.iteritems():
541 for builder in builders_by_slave: 540 for builder in builders_by_slave:
542 yield test_for_bot(api, builder, mastername, slavename) 541 yield test_for_bot(api, builder, mastername, slavename)
543 542
544 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming' 543 builder = 'Test-Ubuntu-GCC-GCE-CPU-AVX2-x86_64-Debug-Swarming'
545 master = 'client.skia' 544 master = 'client.skia'
546 slave = 'skiabot-linux-test-000' 545 slave = 'skiabot-linux-test-000'
547 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION') 546 test = test_for_bot(api, builder, master, slave, 'No_downloaded_SKP_VERSION')
548 test += api.step_data('Get downloaded SKP_VERSION', retcode=1) 547 test += api.step_data('Get downloaded SKP_VERSION', retcode=1)
549 test += api.infra_paths.exists( 548 test += api.path.exists(
550 api.infra_paths['slave_build'].join('skia'), 549 api.path['slave_build'].join('skia'),
551 api.infra_paths['slave_build'].join('tmp', 'uninteresting_hashes.txt') 550 api.path['slave_build'].join('tmp', 'uninteresting_hashes.txt')
552 ) 551 )
553 yield test 552 yield test
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698