| OLD | NEW |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 | 5 |
| 6 # Recipe module for Skia Swarming trigger. | 6 # Recipe module for Skia Swarming trigger. |
| 7 | 7 |
| 8 | 8 |
| 9 import os |
| 9 import json | 10 import json |
| 10 | 11 |
| 11 | 12 |
| 12 DEPS = [ | 13 DEPS = [ |
| 13 'build/file', | 14 'build/file', |
| 14 'build/gsutil', | 15 'build/gsutil', |
| 15 'builder_name_schema', | 16 'builder_name_schema', |
| 16 'core', | 17 'core', |
| 17 'depot_tools/depot_tools', | 18 'depot_tools/depot_tools', |
| 18 'depot_tools/git', | 19 'depot_tools/git', |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 59 }, | 60 }, |
| 60 } | 61 } |
| 61 | 62 |
| 62 | 63 |
| 63 def derive_compile_bot_name(api): | 64 def derive_compile_bot_name(api): |
| 64 builder_name = api.properties['buildername'] | 65 builder_name = api.properties['buildername'] |
| 65 builder_cfg = api.builder_name_schema.DictForBuilderName(builder_name) | 66 builder_cfg = api.builder_name_schema.DictForBuilderName(builder_name) |
| 66 if builder_cfg['role'] == 'Housekeeper': | 67 if builder_cfg['role'] == 'Housekeeper': |
| 67 return 'Build-Ubuntu-GCC-x86_64-Release-Shared' | 68 return 'Build-Ubuntu-GCC-x86_64-Release-Shared' |
| 68 if builder_cfg['role'] in ('Test', 'Perf'): | 69 if builder_cfg['role'] in ('Test', 'Perf'): |
| 69 os = builder_cfg['os'] | 70 task_os = builder_cfg['os'] |
| 70 extra_config = builder_cfg.get('extra_config') | 71 extra_config = builder_cfg.get('extra_config') |
| 71 if os == 'Android': | 72 if task_os == 'Android': |
| 72 if extra_config == 'Vulkan': | 73 if extra_config == 'Vulkan': |
| 73 extra_config = '%s_%s' % (os, 'Vulkan') | 74 extra_config = '%s_%s' % (task_os, 'Vulkan') |
| 74 else: | 75 else: |
| 75 extra_config = os | 76 extra_config = task_os |
| 76 os = 'Ubuntu' | 77 task_os = 'Ubuntu' |
| 77 elif os == 'iOS': | 78 elif task_os == 'iOS': |
| 78 extra_config = os | 79 extra_config = task_os |
| 79 os = 'Mac' | 80 task_os = 'Mac' |
| 80 elif 'Win' in os: | 81 elif 'Win' in task_os: |
| 81 os = 'Win' | 82 task_os = 'Win' |
| 82 return api.builder_name_schema.MakeBuilderName( | 83 return api.builder_name_schema.MakeBuilderName( |
| 83 role=api.builder_name_schema.BUILDER_ROLE_BUILD, | 84 role=api.builder_name_schema.BUILDER_ROLE_BUILD, |
| 84 os=os, | 85 os=task_os, |
| 85 compiler=builder_cfg['compiler'], | 86 compiler=builder_cfg['compiler'], |
| 86 target_arch=builder_cfg['arch'], | 87 target_arch=builder_cfg['arch'], |
| 87 configuration=builder_cfg['configuration'], | 88 configuration=builder_cfg['configuration'], |
| 88 extra_config=extra_config, | 89 extra_config=extra_config, |
| 89 is_trybot=api.builder_name_schema.IsTrybot(builder_name)) | 90 is_trybot=api.builder_name_schema.IsTrybot(builder_name)) |
| 90 return builder_name | 91 return builder_name |
| 91 | 92 |
| 92 | 93 |
| 93 def swarm_dimensions(builder_spec): | 94 def swarm_dimensions(builder_cfg): |
| 94 """Return a dict of keys and values to be used as Swarming bot dimensions.""" | 95 """Return a dict of keys and values to be used as Swarming bot dimensions.""" |
| 95 dimensions = { | 96 dimensions = { |
| 96 'pool': 'Skia', | 97 'pool': 'Skia', |
| 97 } | 98 } |
| 98 builder_cfg = builder_spec['builder_cfg'] | |
| 99 dimensions['os'] = builder_cfg.get('os', 'Ubuntu') | 99 dimensions['os'] = builder_cfg.get('os', 'Ubuntu') |
| 100 if 'Win' in builder_cfg.get('os', ''): | 100 if 'Win' in builder_cfg.get('os', ''): |
| 101 dimensions['os'] = 'Windows' | 101 dimensions['os'] = 'Windows' |
| 102 if builder_cfg['role'] in ('Test', 'Perf'): | 102 if builder_cfg['role'] in ('Test', 'Perf'): |
| 103 if 'Android' in builder_cfg['os']: | 103 if 'Android' in builder_cfg['os']: |
| 104 # For Android, the device type is a better dimension than CPU or GPU. | 104 # For Android, the device type is a better dimension than CPU or GPU. |
| 105 dimensions['device_type'] = builder_spec['product.board'] | 105 dimensions['device_type'] = { |
| 106 'AndroidOne': 'sprout', |
| 107 'GalaxyS3': 'm0', #'smdk4x12', Detected incorrectly by swarming? |
| 108 'GalaxyS4': None, # TODO(borenet,kjlubick) |
| 109 'NVIDIA_Shield': 'foster', |
| 110 'Nexus10': 'manta', |
| 111 'Nexus5': 'hammerhead', |
| 112 'Nexus6': 'shamu', |
| 113 'Nexus7': 'grouper', |
| 114 'Nexus7v2': 'flo', |
| 115 'Nexus9': 'flounder', |
| 116 'NexusPlayer': 'fugu', |
| 117 }[builder_cfg['model']] |
| 106 elif 'iOS' in builder_cfg['os']: | 118 elif 'iOS' in builder_cfg['os']: |
| 107 # For iOS, the device type is a better dimension than CPU or GPU. | 119 # For iOS, the device type is a better dimension than CPU or GPU. |
| 108 dimensions['device'] = builder_spec['device_cfg'] | 120 dimensions['device'] = { |
| 121 'iPad4': 'iPad4,1', |
| 122 }[builder_cfg['model']] |
| 109 # TODO(borenet): Replace this hack with something better. | 123 # TODO(borenet): Replace this hack with something better. |
| 110 dimensions['os'] = 'iOS-9.2' | 124 dimensions['os'] = 'iOS-9.2' |
| 111 elif builder_cfg['cpu_or_gpu'] == 'CPU': | 125 elif builder_cfg['cpu_or_gpu'] == 'CPU': |
| 112 dimensions['gpu'] = 'none' | 126 dimensions['gpu'] = 'none' |
| 113 dimensions['cpu'] = { | 127 dimensions['cpu'] = { |
| 114 'AVX': 'x86-64', | 128 'AVX': 'x86-64', |
| 115 'AVX2': 'x86-64-avx2', | 129 'AVX2': 'x86-64-avx2', |
| 116 'SSE4': 'x86-64', | 130 'SSE4': 'x86-64', |
| 117 }[builder_cfg['cpu_or_gpu_value']] | 131 }[builder_cfg['cpu_or_gpu_value']] |
| 118 if ('Win' in builder_cfg['os'] and | 132 if ('Win' in builder_cfg['os'] and |
| (...skipping 30 matching lines...) Expand all Loading... |
| 149 if os.path.isfile(f): | 163 if os.path.isfile(f): |
| 150 if os.access(f, os.X_OK): | 164 if os.access(f, os.X_OK): |
| 151 os.chmod(f, 0755) | 165 os.chmod(f, 0755) |
| 152 else: | 166 else: |
| 153 os.chmod(f, 0644) | 167 os.chmod(f, 0644) |
| 154 ''', | 168 ''', |
| 155 cwd=path) | 169 cwd=path) |
| 156 | 170 |
| 157 | 171 |
| 158 def trigger_task(api, task_name, builder, master, slave, buildnumber, | 172 def trigger_task(api, task_name, builder, master, slave, buildnumber, |
| 159 builder_spec, got_revision, infrabots_dir, idempotent=False, | 173 builder_cfg, got_revision, infrabots_dir, idempotent=False, |
| 160 store_output=True, extra_isolate_hashes=None, expiration=None, | 174 store_output=True, extra_isolate_hashes=None, expiration=None, |
| 161 hard_timeout=None, io_timeout=None, cipd_packages=None): | 175 hard_timeout=None, io_timeout=None, cipd_packages=None): |
| 162 """Trigger the given bot to run as a Swarming task.""" | 176 """Trigger the given bot to run as a Swarming task.""" |
| 163 # TODO(borenet): We're using Swarming directly to run the recipe through | 177 # TODO(borenet): We're using Swarming directly to run the recipe through |
| 164 # recipes.py. Once it's possible to track the state of a Buildbucket build, | 178 # recipes.py. Once it's possible to track the state of a Buildbucket build, |
| 165 # we should switch to use the trigger recipe module instead. | 179 # we should switch to use the trigger recipe module instead. |
| 166 | 180 |
| 167 properties = { | 181 properties = { |
| 168 'buildername': builder, | 182 'buildername': builder, |
| 169 'mastername': master, | 183 'mastername': master, |
| 170 'buildnumber': buildnumber, | 184 'buildnumber': buildnumber, |
| 171 'reason': 'Triggered by Skia swarm_trigger Recipe', | 185 'reason': 'Triggered by Skia swarm_trigger Recipe', |
| 172 'revision': got_revision, | 186 'revision': got_revision, |
| 173 'slavename': slave, | 187 'slavename': slave, |
| 174 'swarm_out_dir': '${ISOLATED_OUTDIR}', | 188 'swarm_out_dir': '${ISOLATED_OUTDIR}', |
| 175 } | 189 } |
| 176 builder_cfg = builder_spec['builder_cfg'] | |
| 177 if builder_cfg['is_trybot']: | 190 if builder_cfg['is_trybot']: |
| 178 properties['issue'] = str(api.properties['issue']) | 191 properties['issue'] = str(api.properties['issue']) |
| 179 properties['patchset'] = str(api.properties['patchset']) | 192 properties['patchset'] = str(api.properties['patchset']) |
| 180 properties['rietveld'] = api.properties['rietveld'] | 193 properties['rietveld'] = api.properties['rietveld'] |
| 181 | 194 |
| 182 extra_args = [ | 195 extra_args = [ |
| 183 '--workdir', '../../..', | 196 '--workdir', '../../..', |
| 184 'swarm_%s' % task_name, | 197 'swarm_%s' % task_name, |
| 185 ] | 198 ] |
| 186 for k, v in properties.iteritems(): | 199 for k, v in properties.iteritems(): |
| 187 extra_args.append('%s=%s' % (k, v)) | 200 extra_args.append('%s=%s' % (k, v)) |
| 188 | 201 |
| 189 isolate_base_dir = api.path['slave_build'] | 202 isolate_base_dir = api.path['slave_build'] |
| 190 dimensions = swarm_dimensions(builder_spec) | 203 dimensions = swarm_dimensions(builder_cfg) |
| 191 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps'] | 204 isolate_blacklist = ['.git', 'out', '*.pyc', '.recipe_deps'] |
| 192 isolate_vars = { | 205 isolate_vars = { |
| 193 'WORKDIR': api.path['slave_build'], | 206 'WORKDIR': api.path['slave_build'], |
| 194 } | 207 } |
| 195 | 208 |
| 196 isolate_file = '%s_skia.isolate' % task_name | 209 isolate_file = '%s_skia.isolate' % task_name |
| 197 if 'Coverage' == builder_cfg.get('configuration'): | 210 if 'Coverage' == builder_cfg.get('configuration'): |
| 198 isolate_file = 'coverage_skia.isolate' | 211 isolate_file = 'coverage_skia.isolate' |
| 199 if 'RecreateSKPs' in builder: | 212 if 'RecreateSKPs' in builder: |
| 200 isolate_file = 'compile_skia.isolate' | 213 isolate_file = 'compile_skia.isolate' |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 232 | 245 |
| 233 # Write a fake .gclient file if none exists. This is required by .isolates. | 246 # Write a fake .gclient file if none exists. This is required by .isolates. |
| 234 dot_gclient = api.path['slave_build'].join('.gclient') | 247 dot_gclient = api.path['slave_build'].join('.gclient') |
| 235 if not api.path.exists(dot_gclient): | 248 if not api.path.exists(dot_gclient): |
| 236 api.run.writefile(dot_gclient, '') | 249 api.run.writefile(dot_gclient, '') |
| 237 | 250 |
| 238 fix_filemodes(api, api.path['checkout']) | 251 fix_filemodes(api, api.path['checkout']) |
| 239 return got_revision | 252 return got_revision |
| 240 | 253 |
| 241 | 254 |
| 242 def housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir, | 255 def housekeeper_swarm(api, builder_cfg, got_revision, infrabots_dir, |
| 243 extra_isolate_hashes): | 256 extra_isolate_hashes): |
| 244 task = trigger_task( | 257 task = trigger_task( |
| 245 api, | 258 api, |
| 246 'housekeeper', | 259 'housekeeper', |
| 247 api.properties['buildername'], | 260 api.properties['buildername'], |
| 248 api.properties['mastername'], | 261 api.properties['mastername'], |
| 249 api.properties['slavename'], | 262 api.properties['slavename'], |
| 250 api.properties['buildnumber'], | 263 api.properties['buildnumber'], |
| 251 builder_spec, | 264 builder_cfg, |
| 252 got_revision, | 265 got_revision, |
| 253 infrabots_dir, | 266 infrabots_dir, |
| 254 idempotent=False, | 267 idempotent=False, |
| 255 store_output=False, | 268 store_output=False, |
| 256 extra_isolate_hashes=extra_isolate_hashes) | 269 extra_isolate_hashes=extra_isolate_hashes) |
| 257 return api.swarming.collect_swarming_task(task) | 270 return api.swarming.collect_swarming_task(task) |
| 258 | 271 |
| 259 | 272 |
| 260 def recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, | 273 def recreate_skps_swarm(api, builder_cfg, got_revision, infrabots_dir, |
| 261 extra_isolate_hashes): | 274 extra_isolate_hashes): |
| 262 task = trigger_task( | 275 task = trigger_task( |
| 263 api, | 276 api, |
| 264 'RecreateSKPs', | 277 'RecreateSKPs', |
| 265 api.properties['buildername'], | 278 api.properties['buildername'], |
| 266 api.properties['mastername'], | 279 api.properties['mastername'], |
| 267 api.properties['slavename'], | 280 api.properties['slavename'], |
| 268 api.properties['buildnumber'], | 281 api.properties['buildnumber'], |
| 269 builder_spec, | 282 builder_cfg, |
| 270 got_revision, | 283 got_revision, |
| 271 infrabots_dir, | 284 infrabots_dir, |
| 272 idempotent=False, | 285 idempotent=False, |
| 273 store_output=False, | 286 store_output=False, |
| 274 extra_isolate_hashes=extra_isolate_hashes) | 287 extra_isolate_hashes=extra_isolate_hashes) |
| 275 return api.swarming.collect_swarming_task(task) | 288 return api.swarming.collect_swarming_task(task) |
| 276 | 289 |
| 277 | 290 |
| 278 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes): | 291 def infra_swarm(api, got_revision, infrabots_dir, extra_isolate_hashes): |
| 279 # Fake the builder spec. | 292 # Fake the builder cfg. |
| 280 builder_spec = { | 293 builder_cfg = { |
| 281 'builder_cfg': { | 294 'role': 'Infra', |
| 282 'role': 'Infra', | 295 'is_trybot': api.builder_name_schema.IsTrybot( |
| 283 'is_trybot': api.builder_name_schema.IsTrybot( | 296 api.properties['buildername']) |
| 284 api.properties['buildername']) | |
| 285 } | |
| 286 } | 297 } |
| 287 task = trigger_task( | 298 task = trigger_task( |
| 288 api, | 299 api, |
| 289 'infra', | 300 'infra', |
| 290 api.properties['buildername'], | 301 api.properties['buildername'], |
| 291 api.properties['mastername'], | 302 api.properties['mastername'], |
| 292 api.properties['slavename'], | 303 api.properties['slavename'], |
| 293 api.properties['buildnumber'], | 304 api.properties['buildnumber'], |
| 294 builder_spec, | 305 builder_cfg, |
| 295 got_revision, | 306 got_revision, |
| 296 infrabots_dir, | 307 infrabots_dir, |
| 297 idempotent=False, | 308 idempotent=False, |
| 298 store_output=False, | 309 store_output=False, |
| 299 extra_isolate_hashes=extra_isolate_hashes) | 310 extra_isolate_hashes=extra_isolate_hashes) |
| 300 return api.swarming.collect_swarming_task(task) | 311 return api.swarming.collect_swarming_task(task) |
| 301 | 312 |
| 302 | 313 |
| 303 def compile_steps_swarm(api, builder_spec, got_revision, infrabots_dir, | 314 def compile_steps_swarm(api, builder_cfg, got_revision, infrabots_dir): |
| 304 extra_isolate_hashes, cipd_packages): | |
| 305 builder_name = derive_compile_bot_name(api) | 315 builder_name = derive_compile_bot_name(api) |
| 306 compile_builder_spec = builder_spec | 316 compile_builder_cfg = api.builder_name_schema.DictForBuilderName(builder_name) |
| 307 if builder_name != api.properties['buildername']: | |
| 308 compile_builder_spec = api.vars.get_builder_spec(builder_name) | |
| 309 | 317 |
| 310 extra_hashes = extra_isolate_hashes[:] | 318 cipd_packages = [] |
| 319 |
| 320 # Android bots require a toolchain. |
| 321 if 'Android' in api.properties['buildername']: |
| 322 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'android_sdk')) |
| 311 | 323 |
| 312 # Windows bots require a toolchain. | 324 # Windows bots require a toolchain. |
| 313 if 'Win' in builder_name: | 325 if 'Win' in builder_name: |
| 314 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION') | 326 version_file = infrabots_dir.join('assets', 'win_toolchain', 'VERSION') |
| 315 version = api.run.readfile(version_file, | 327 version = api.run.readfile(version_file, |
| 316 name='read win_toolchain VERSION', | 328 name='read win_toolchain VERSION', |
| 317 test_data='0').rstrip() | 329 test_data='0').rstrip() |
| 318 version = 'version:%s' % version | 330 version = 'version:%s' % version |
| 319 pkg = ('t', 'skia/bots/win_toolchain', version) | 331 pkg = ('t', 'skia/bots/win_toolchain', version) |
| 320 cipd_packages.append(pkg) | 332 cipd_packages.append(pkg) |
| 321 | 333 |
| 322 if 'Vulkan' in builder_name: | 334 if 'Vulkan' in builder_name: |
| 323 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk')) | 335 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'win_vulkan_sdk')) |
| 324 | 336 |
| 325 # Fake these properties for compile tasks so that they can be de-duped. | 337 # Fake these properties for compile tasks so that they can be de-duped. |
| 326 master = 'client.skia.compile' | 338 master = 'client.skia.compile' |
| 327 slave = 'skiabot-dummy-compile-slave' | 339 slave = 'skiabot-dummy-compile-slave' |
| 328 buildnumber = 1 | 340 buildnumber = 1 |
| 329 | 341 |
| 330 task = trigger_task( | 342 task = trigger_task( |
| 331 api, | 343 api, |
| 332 'compile', | 344 'compile', |
| 333 builder_name, | 345 builder_name, |
| 334 master, | 346 master, |
| 335 slave, | 347 slave, |
| 336 buildnumber, | 348 buildnumber, |
| 337 compile_builder_spec, | 349 compile_builder_cfg, |
| 338 got_revision, | 350 got_revision, |
| 339 infrabots_dir, | 351 infrabots_dir, |
| 340 idempotent=True, | 352 idempotent=True, |
| 341 store_output=False, | 353 store_output=False, |
| 342 extra_isolate_hashes=extra_hashes, | |
| 343 cipd_packages=cipd_packages) | 354 cipd_packages=cipd_packages) |
| 344 | 355 |
| 345 # Wait for compile to finish, record the results hash. | 356 # Wait for compile to finish, record the results hash. |
| 346 return api.swarming.collect_swarming_task_isolate_hash(task) | 357 return api.swarming.collect_swarming_task_isolate_hash(task) |
| 347 | 358 |
| 348 | 359 |
| 349 def get_timeouts(builder_cfg): | 360 def get_timeouts(builder_cfg): |
| 350 """Some builders require longer than the default timeouts. | 361 """Some builders require longer than the default timeouts. |
| 351 | 362 |
| 352 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are | 363 Returns tuple of (expiration, hard_timeout, io_timeout). If those values are |
| 353 none then default timeouts should be used. | 364 none then default timeouts should be used. |
| 354 """ | 365 """ |
| 355 expiration = None | 366 expiration = None |
| 356 hard_timeout = None | 367 hard_timeout = None |
| 357 io_timeout = None | 368 io_timeout = None |
| 358 if 'Valgrind' in builder_cfg.get('extra_config', ''): | 369 if 'Valgrind' in builder_cfg.get('extra_config', ''): |
| 359 expiration = 2*24*60*60 | 370 expiration = 2*24*60*60 |
| 360 hard_timeout = 9*60*60 | 371 hard_timeout = 9*60*60 |
| 361 io_timeout = 60*60 | 372 io_timeout = 60*60 |
| 362 return expiration, hard_timeout, io_timeout | 373 return expiration, hard_timeout, io_timeout |
| 363 | 374 |
| 364 | 375 |
| 365 def perf_steps_trigger(api, builder_spec, got_revision, infrabots_dir, | 376 def gsutil_env(api, boto_file): |
| 377 """Environment variables for gsutil.""" |
| 378 home_dir = os.path.expanduser('~') |
| 379 if api.path._test_data.enabled: |
| 380 home_dir = '[HOME]' |
| 381 |
| 382 boto_path = None |
| 383 if boto_file: |
| 384 boto_path = api.path.join(home_dir, boto_file) |
| 385 return {'AWS_CREDENTIAL_FILE': boto_path, |
| 386 'BOTO_CONFIG': boto_path} |
| 387 |
| 388 |
| 389 def perf_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, |
| 366 extra_hashes, cipd_packages): | 390 extra_hashes, cipd_packages): |
| 367 """Trigger perf tests via Swarming.""" | 391 """Trigger perf tests via Swarming.""" |
| 368 | 392 |
| 369 expiration, hard_timeout, io_timeout = get_timeouts( | 393 expiration, hard_timeout, io_timeout = get_timeouts(builder_cfg) |
| 370 builder_spec['builder_cfg']) | |
| 371 return trigger_task( | 394 return trigger_task( |
| 372 api, | 395 api, |
| 373 'perf', | 396 'perf', |
| 374 api.properties['buildername'], | 397 api.properties['buildername'], |
| 375 api.properties['mastername'], | 398 api.properties['mastername'], |
| 376 api.properties['slavename'], | 399 api.properties['slavename'], |
| 377 api.properties['buildnumber'], | 400 api.properties['buildnumber'], |
| 378 builder_spec, | 401 builder_cfg, |
| 379 got_revision, | 402 got_revision, |
| 380 infrabots_dir, | 403 infrabots_dir, |
| 381 extra_isolate_hashes=extra_hashes, | 404 extra_isolate_hashes=extra_hashes, |
| 382 expiration=expiration, | 405 expiration=expiration, |
| 383 hard_timeout=hard_timeout, | 406 hard_timeout=hard_timeout, |
| 384 io_timeout=io_timeout, | 407 io_timeout=io_timeout, |
| 385 cipd_packages=cipd_packages) | 408 cipd_packages=cipd_packages) |
| 386 | 409 |
| 387 | 410 |
| 388 def perf_steps_collect(api, task, upload_perf_results, got_revision, | 411 def perf_steps_collect(api, task, got_revision, is_trybot): |
| 389 is_trybot): | |
| 390 """Wait for perf steps to finish and upload results.""" | 412 """Wait for perf steps to finish and upload results.""" |
| 391 # Wait for nanobench to finish, download the results. | 413 # Wait for nanobench to finish, download the results. |
| 392 api.run.rmtree(task.task_output_dir) | 414 api.run.rmtree(task.task_output_dir) |
| 393 api.swarming.collect_swarming_task(task) | 415 api.swarming.collect_swarming_task(task) |
| 394 | 416 |
| 395 # Upload the results. | 417 # Upload the results. |
| 396 if upload_perf_results: | 418 if api.vars.upload_perf_results: |
| 397 perf_data_dir = api.path['slave_build'].join( | 419 perf_data_dir = api.path['slave_build'].join( |
| 398 'perfdata', api.properties['buildername'], 'data') | 420 'perfdata', api.properties['buildername'], 'data') |
| 399 git_timestamp = api.git.get_timestamp(test_data='1408633190', | 421 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 400 infra_step=True) | 422 infra_step=True) |
| 401 api.run.rmtree(perf_data_dir) | 423 api.run.rmtree(perf_data_dir) |
| 402 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) | 424 api.file.makedirs('perf_dir', perf_data_dir, infra_step=True) |
| 403 src_results_file = task.task_output_dir.join( | 425 src_results_file = task.task_output_dir.join( |
| 404 '0', 'perfdata', api.properties['buildername'], 'data', | 426 '0', 'perfdata', api.properties['buildername'], 'data', |
| 405 'nanobench_%s.json' % got_revision) | 427 'nanobench_%s.json' % got_revision) |
| 406 dst_results_file = perf_data_dir.join( | 428 dst_results_file = perf_data_dir.join( |
| 407 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) | 429 'nanobench_%s_%s.json' % (got_revision, git_timestamp)) |
| 408 api.file.copy('perf_results', src_results_file, dst_results_file, | 430 api.file.copy('perf_results', src_results_file, dst_results_file, |
| 409 infra_step=True) | 431 infra_step=True) |
| 410 | 432 |
| 411 gsutil_path = api.path['slave_build'].join( | 433 gsutil_path = api.path['slave_build'].join( |
| 412 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', | 434 'skia', 'infra', 'bots', '.recipe_deps', 'depot_tools', 'third_party', |
| 413 'gsutil', 'gsutil') | 435 'gsutil', 'gsutil') |
| 414 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 436 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 415 perf_data_dir, got_revision, gsutil_path] | 437 perf_data_dir, got_revision, gsutil_path] |
| 416 if is_trybot: | 438 if is_trybot: |
| 417 upload_args.append(api.properties['issue']) | 439 upload_args.append(api.properties['issue']) |
| 418 api.python( | 440 api.python( |
| 419 'Upload perf results', | 441 'Upload perf results', |
| 420 script=api.core.resource('upload_bench_results.py'), | 442 script=api.core.resource('upload_bench_results.py'), |
| 421 args=upload_args, | 443 args=upload_args, |
| 422 cwd=api.path['checkout'], | 444 cwd=api.path['checkout'], |
| 423 infra_step=True) | 445 infra_step=True) |
| 424 | 446 |
| 425 | 447 |
| 426 def test_steps_trigger(api, builder_spec, got_revision, infrabots_dir, | 448 def test_steps_trigger(api, builder_cfg, got_revision, infrabots_dir, |
| 427 extra_hashes, cipd_packages): | 449 extra_hashes, cipd_packages): |
| 428 """Trigger DM via Swarming.""" | 450 """Trigger DM via Swarming.""" |
| 429 expiration, hard_timeout, io_timeout = get_timeouts( | 451 expiration, hard_timeout, io_timeout = get_timeouts(builder_cfg) |
| 430 builder_spec['builder_cfg']) | |
| 431 return trigger_task( | 452 return trigger_task( |
| 432 api, | 453 api, |
| 433 'test', | 454 'test', |
| 434 api.properties['buildername'], | 455 api.properties['buildername'], |
| 435 api.properties['mastername'], | 456 api.properties['mastername'], |
| 436 api.properties['slavename'], | 457 api.properties['slavename'], |
| 437 api.properties['buildnumber'], | 458 api.properties['buildnumber'], |
| 438 builder_spec, | 459 builder_cfg, |
| 439 got_revision, | 460 got_revision, |
| 440 infrabots_dir, | 461 infrabots_dir, |
| 441 extra_isolate_hashes=extra_hashes, | 462 extra_isolate_hashes=extra_hashes, |
| 442 expiration=expiration, | 463 expiration=expiration, |
| 443 hard_timeout=hard_timeout, | 464 hard_timeout=hard_timeout, |
| 444 io_timeout=io_timeout, | 465 io_timeout=io_timeout, |
| 445 cipd_packages=cipd_packages) | 466 cipd_packages=cipd_packages) |
| 446 | 467 |
| 447 | 468 |
| 448 def test_steps_collect(api, task, upload_dm_results, got_revision, is_trybot, | 469 def test_steps_collect(api, task, got_revision, is_trybot, builder_cfg): |
| 449 builder_cfg): | |
| 450 """Collect the test results from Swarming.""" | 470 """Collect the test results from Swarming.""" |
| 451 # Wait for tests to finish, download the results. | 471 # Wait for tests to finish, download the results. |
| 452 api.run.rmtree(task.task_output_dir) | 472 api.run.rmtree(task.task_output_dir) |
| 453 api.swarming.collect_swarming_task(task) | 473 api.swarming.collect_swarming_task(task) |
| 454 | 474 |
| 455 # Upload the results. | 475 # Upload the results. |
| 456 if upload_dm_results: | 476 if api.vars.upload_dm_results: |
| 457 dm_dir = api.path['slave_build'].join('dm') | 477 dm_dir = api.path['slave_build'].join('dm') |
| 458 dm_src = task.task_output_dir.join('0', 'dm') | 478 dm_src = task.task_output_dir.join('0', 'dm') |
| 459 api.run.rmtree(dm_dir) | 479 api.run.rmtree(dm_dir) |
| 460 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) | 480 api.file.copytree('dm_dir', dm_src, dm_dir, infra_step=True) |
| 461 | 481 |
| 462 # Upload them to Google Storage. | 482 # Upload them to Google Storage. |
| 463 api.python( | 483 api.python( |
| 464 'Upload DM Results', | 484 'Upload DM Results', |
| 465 script=api.core.resource('upload_dm_results.py'), | 485 script=api.core.resource('upload_dm_results.py'), |
| 466 args=[ | 486 args=[ |
| 467 dm_dir, | 487 dm_dir, |
| 468 got_revision, | 488 got_revision, |
| 469 api.properties['buildername'], | 489 api.properties['buildername'], |
| 470 api.properties['buildnumber'], | 490 api.properties['buildnumber'], |
| 471 api.properties['issue'] if is_trybot else '', | 491 api.properties['issue'] if is_trybot else '', |
| 472 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), | 492 api.path['slave_build'].join('skia', 'common', 'py', 'utils'), |
| 473 ], | 493 ], |
| 474 cwd=api.path['checkout'], | 494 cwd=api.path['checkout'], |
| 475 env=api.vars.gsutil_env('chromium-skia-gm.boto'), | 495 env=gsutil_env(api, 'chromium-skia-gm.boto'), |
| 476 infra_step=True) | 496 infra_step=True) |
| 477 | 497 |
| 478 if builder_cfg['configuration'] == 'Coverage': | 498 if builder_cfg['configuration'] == 'Coverage': |
| 479 upload_coverage_results(api, task, got_revision, is_trybot) | 499 upload_coverage_results(api, task, got_revision, is_trybot) |
| 480 | 500 |
| 481 | 501 |
| 482 def upload_coverage_results(api, task, got_revision, is_trybot): | 502 def upload_coverage_results(api, task, got_revision, is_trybot): |
| 483 results_dir = task.task_output_dir.join('0') | 503 results_dir = task.task_output_dir.join('0') |
| 484 git_timestamp = api.git.get_timestamp(test_data='1408633190', | 504 git_timestamp = api.git.get_timestamp(test_data='1408633190', |
| 485 infra_step=True) | 505 infra_step=True) |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 519 gsutil_path = api.depot_tools.gsutil_py_path | 539 gsutil_path = api.depot_tools.gsutil_py_path |
| 520 upload_args = [api.properties['buildername'], api.properties['buildnumber'], | 540 upload_args = [api.properties['buildername'], api.properties['buildnumber'], |
| 521 results_dir, got_revision, gsutil_path] | 541 results_dir, got_revision, gsutil_path] |
| 522 if is_trybot: | 542 if is_trybot: |
| 523 upload_args.append(api.properties['issue']) | 543 upload_args.append(api.properties['issue']) |
| 524 api.python( | 544 api.python( |
| 525 'upload nanobench coverage results', | 545 'upload nanobench coverage results', |
| 526 script=api.core.resource('upload_bench_results.py'), | 546 script=api.core.resource('upload_bench_results.py'), |
| 527 args=upload_args, | 547 args=upload_args, |
| 528 cwd=api.path['checkout'], | 548 cwd=api.path['checkout'], |
| 529 env=api.vars.gsutil_env('chromium-skia-gm.boto'), | 549 env=gsutil_env(api, 'chromium-skia-gm.boto'), |
| 530 infra_step=True) | 550 infra_step=True) |
| 531 | 551 |
| 532 # Transform the coverage_by_line_${git_hash}.json file received from | 552 # Transform the coverage_by_line_${git_hash}.json file received from |
| 533 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. | 553 # swarming bot into a coverage_by_line_${git_hash}_${timestamp}.json file. |
| 534 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision) | 554 src_lbl_file = results_dir.join('coverage_by_line_%s.json' % got_revision) |
| 535 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % ( | 555 dst_lbl_file_basename = 'coverage_by_line_%s_%s.json' % ( |
| 536 got_revision, git_timestamp) | 556 got_revision, git_timestamp) |
| 537 dst_lbl_file = results_dir.join(dst_lbl_file_basename) | 557 dst_lbl_file = results_dir.join(dst_lbl_file_basename) |
| 538 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file, | 558 api.file.copy('Line-by-line coverage JSON', src_lbl_file, dst_lbl_file, |
| 539 infra_step=True) | 559 infra_step=True) |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 588 api.swarming.setup( | 608 api.swarming.setup( |
| 589 infrabots_dir.join('tools', 'luci-go'), | 609 infrabots_dir.join('tools', 'luci-go'), |
| 590 swarming_rev='') | 610 swarming_rev='') |
| 591 | 611 |
| 592 # Run gsutil.py to ensure that it's installed. | 612 # Run gsutil.py to ensure that it's installed. |
| 593 api.gsutil(['help']) | 613 api.gsutil(['help']) |
| 594 | 614 |
| 595 extra_hashes = [] | 615 extra_hashes = [] |
| 596 | 616 |
| 597 # Get ready to compile. | 617 # Get ready to compile. |
| 598 compile_cipd_deps = [] | |
| 599 extra_compile_hashes = [] | |
| 600 | |
| 601 infrabots_dir = api.path['checkout'].join('infra', 'bots') | 618 infrabots_dir = api.path['checkout'].join('infra', 'bots') |
| 602 if 'Infra' in api.properties['buildername']: | 619 if 'Infra' in api.properties['buildername']: |
| 603 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes) | 620 return infra_swarm(api, got_revision, infrabots_dir, extra_hashes) |
| 604 | 621 |
| 605 builder_spec = api.vars.get_builder_spec(api.properties['buildername']) | 622 builder_cfg = api.builder_name_schema.DictForBuilderName( |
| 606 builder_cfg = builder_spec['builder_cfg'] | 623 api.properties['buildername']) |
| 607 | 624 |
| 608 if 'RecreateSKPs' in api.properties['buildername']: | 625 if 'RecreateSKPs' in api.properties['buildername']: |
| 609 recreate_skps_swarm(api, builder_spec, got_revision, infrabots_dir, | 626 recreate_skps_swarm(api, builder_cfg, got_revision, infrabots_dir, |
| 610 extra_hashes) | 627 extra_hashes) |
| 611 return | 628 return |
| 612 if 'Android' in api.properties['buildername']: | |
| 613 compile_cipd_deps.append(cipd_pkg(api, infrabots_dir, 'android_sdk')) | |
| 614 | 629 |
| 615 # Compile. | 630 # Compile. |
| 616 do_compile_steps = builder_spec.get('do_compile_steps', True) | 631 do_compile_steps = True |
| 632 if 'Coverage' in api.properties['buildername']: |
| 633 do_compile_steps = False |
| 617 if do_compile_steps: | 634 if do_compile_steps: |
| 618 extra_hashes.append(compile_steps_swarm( | 635 extra_hashes.append(compile_steps_swarm( |
| 619 api, builder_spec, got_revision, infrabots_dir, extra_compile_hashes, | 636 api, builder_cfg, got_revision, infrabots_dir)) |
| 620 cipd_packages=compile_cipd_deps)) | |
| 621 | 637 |
| 622 if builder_cfg['role'] == 'Housekeeper': | 638 if builder_cfg['role'] == 'Housekeeper': |
| 623 housekeeper_swarm(api, builder_spec, got_revision, infrabots_dir, | 639 housekeeper_swarm(api, builder_cfg, got_revision, infrabots_dir, |
| 624 extra_hashes) | 640 extra_hashes) |
| 625 return | 641 return |
| 626 | 642 |
| 627 # Get ready to test/perf. | 643 # Get ready to test/perf. |
| 628 | 644 |
| 629 # CIPD packages needed by test/perf. | 645 # CIPD packages needed by test/perf. |
| 630 cipd_packages = [] | 646 cipd_packages = [] |
| 631 | 647 |
| 632 do_test_steps = builder_spec['do_test_steps'] | 648 do_test_steps = ( |
| 633 do_perf_steps = builder_spec['do_perf_steps'] | 649 builder_cfg['role'] == api.builder_name_schema.BUILDER_ROLE_TEST) |
| 650 do_perf_steps = ( |
| 651 builder_cfg['role'] == api.builder_name_schema.BUILDER_ROLE_PERF) |
| 634 | 652 |
| 635 if not (do_test_steps or do_perf_steps): | 653 if not (do_test_steps or do_perf_steps): |
| 636 return | 654 return |
| 637 | 655 |
| 638 # SKPs, SkImages. | 656 # SKPs, SkImages. |
| 639 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skp')) | 657 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skp')) |
| 640 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skimage')) | 658 cipd_packages.append(cipd_pkg(api, infrabots_dir, 'skimage')) |
| 641 | 659 |
| 642 # Trigger test and perf tasks. | 660 # Trigger test and perf tasks. |
| 643 test_task = None | 661 test_task = None |
| 644 perf_task = None | 662 perf_task = None |
| 645 if do_test_steps: | 663 if do_test_steps: |
| 646 test_task = test_steps_trigger(api, builder_spec, got_revision, | 664 test_task = test_steps_trigger(api, builder_cfg, got_revision, |
| 647 infrabots_dir, extra_hashes, cipd_packages) | 665 infrabots_dir, extra_hashes, cipd_packages) |
| 648 if do_perf_steps: | 666 if do_perf_steps: |
| 649 perf_task = perf_steps_trigger(api, builder_spec, got_revision, | 667 perf_task = perf_steps_trigger(api, builder_cfg, got_revision, |
| 650 infrabots_dir, extra_hashes, cipd_packages) | 668 infrabots_dir, extra_hashes, cipd_packages) |
| 651 is_trybot = builder_cfg['is_trybot'] | 669 is_trybot = builder_cfg['is_trybot'] |
| 670 |
| 671 # Wait for results, then upload them if necessary. |
| 672 |
| 652 if test_task: | 673 if test_task: |
| 653 test_steps_collect(api, test_task, builder_spec['upload_dm_results'], | 674 test_steps_collect(api, test_task, |
| 654 got_revision, is_trybot, builder_cfg) | 675 got_revision, is_trybot, builder_cfg) |
| 676 |
| 655 if perf_task: | 677 if perf_task: |
| 656 perf_steps_collect(api, perf_task, builder_spec['upload_perf_results'], | 678 perf_steps_collect(api, perf_task, |
| 657 got_revision, is_trybot) | 679 got_revision, is_trybot) |
| 658 | 680 |
| 659 | 681 |
| 660 def test_for_bot(api, builder, mastername, slavename, testname=None): | 682 def test_for_bot(api, builder, mastername, slavename, testname=None): |
| 661 """Generate a test for the given bot.""" | 683 """Generate a test for the given bot.""" |
| 662 testname = testname or builder | 684 testname = testname or builder |
| 663 test = ( | 685 test = ( |
| 664 api.test(testname) + | 686 api.test(testname) + |
| 665 api.properties(buildername=builder, | 687 api.properties(buildername=builder, |
| 666 mastername=mastername, | 688 mastername=mastername, |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 705 test += api.path.exists(*paths) | 727 test += api.path.exists(*paths) |
| 706 | 728 |
| 707 return test | 729 return test |
| 708 | 730 |
| 709 | 731 |
| 710 def GenTests(api): | 732 def GenTests(api): |
| 711 for mastername, slaves in TEST_BUILDERS.iteritems(): | 733 for mastername, slaves in TEST_BUILDERS.iteritems(): |
| 712 for slavename, builders_by_slave in slaves.iteritems(): | 734 for slavename, builders_by_slave in slaves.iteritems(): |
| 713 for builder in builders_by_slave: | 735 for builder in builders_by_slave: |
| 714 yield test_for_bot(api, builder, mastername, slavename) | 736 yield test_for_bot(api, builder, mastername, slavename) |
| OLD | NEW |