OLD | NEW |
(Empty) | |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 |
| 6 # pylint: disable=W0201 |
| 7 |
| 8 |
| 9 import json |
| 10 import os |
| 11 import re |
| 12 import sys |
| 13 |
| 14 from recipe_engine import recipe_api |
| 15 from recipe_engine import config_types |
| 16 |
| 17 from . import android_flavor |
| 18 from . import cmake_flavor |
| 19 from . import coverage_flavor |
| 20 from . import default_flavor |
| 21 from . import fake_specs |
| 22 from . import ios_flavor |
| 23 from . import pdfium_flavor |
| 24 from . import valgrind_flavor |
| 25 from . import xsan_flavor |
| 26 |
| 27 |
| 28 BOTO_CHROMIUM_SKIA_GM = 'chromium-skia-gm.boto' |
| 29 |
| 30 GS_SUBDIR_TMPL_SK_IMAGE = 'skimage/v%s' |
| 31 GS_SUBDIR_TMPL_SKP = 'playback_%s/skps' |
| 32 |
| 33 TEST_EXPECTED_SKP_VERSION = '42' |
| 34 TEST_EXPECTED_SK_IMAGE_VERSION = '42' |
| 35 |
| 36 VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION' |
| 37 VERSION_FILE_SKP = 'SKP_VERSION' |
| 38 |
| 39 VERSION_NONE = -1 |
| 40 |
| 41 BUILD_PRODUCTS_ISOLATE_WHITELIST = [ |
| 42 'dm', |
| 43 'dm.exe', |
| 44 'nanobench', |
| 45 'nanobench.exe', |
| 46 '*.so', |
| 47 '*.dll', |
| 48 '*.dylib', |
| 49 'skia_launcher', |
| 50 'lib/*.so', |
| 51 'iOSShell.app', |
| 52 'iOSShell.ipa', |
| 53 'visualbench', |
| 54 'visualbench.exe', |
| 55 ] |
| 56 |
| 57 |
| 58 def is_android(builder_cfg): |
| 59 """Determine whether the given builder is an Android builder.""" |
| 60 return ('Android' in builder_cfg.get('extra_config', '') or |
| 61 builder_cfg.get('os') == 'Android') |
| 62 |
| 63 |
| 64 def is_cmake(builder_cfg): |
| 65 return 'CMake' in builder_cfg.get('extra_config', '') |
| 66 |
| 67 |
| 68 def is_ios(builder_cfg): |
| 69 return ('iOS' in builder_cfg.get('extra_config', '') or |
| 70 builder_cfg.get('os') == 'iOS') |
| 71 |
| 72 |
| 73 def is_pdfium(builder_cfg): |
| 74 return 'PDFium' in builder_cfg.get('extra_config', '') |
| 75 |
| 76 |
| 77 def is_valgrind(builder_cfg): |
| 78 return 'Valgrind' in builder_cfg.get('extra_config', '') |
| 79 |
| 80 |
| 81 def is_xsan(builder_cfg): |
| 82 return ('ASAN' in builder_cfg.get('extra_config', '') or |
| 83 'MSAN' in builder_cfg.get('extra_config', '') or |
| 84 'TSAN' in builder_cfg.get('extra_config', '')) |
| 85 |
| 86 |
| 87 class SkiaApi(recipe_api.RecipeApi): |
| 88 |
| 89 def get_flavor(self, builder_cfg): |
| 90 """Return a flavor utils object specific to the given builder.""" |
| 91 if is_android(builder_cfg): |
| 92 return android_flavor.AndroidFlavorUtils(self) |
| 93 elif is_cmake(builder_cfg): |
| 94 return cmake_flavor.CMakeFlavorUtils(self) |
| 95 elif is_ios(builder_cfg): |
| 96 return ios_flavor.iOSFlavorUtils(self) |
| 97 elif is_pdfium(builder_cfg): |
| 98 return pdfium_flavor.PDFiumFlavorUtils(self) |
| 99 elif is_valgrind(builder_cfg): |
| 100 return valgrind_flavor.ValgrindFlavorUtils(self) |
| 101 elif is_xsan(builder_cfg): |
| 102 return xsan_flavor.XSanFlavorUtils(self) |
| 103 elif builder_cfg.get('configuration') == 'Coverage': |
| 104 return coverage_flavor.CoverageFlavorUtils(self) |
| 105 else: |
| 106 return default_flavor.DefaultFlavorUtils(self) |
| 107 |
| 108 @property |
| 109 def home_dir(self): |
| 110 """Find the home directory.""" |
| 111 home_dir = os.path.expanduser('~') |
| 112 if self._test_data.enabled: |
| 113 home_dir = '[HOME]' |
| 114 return home_dir |
| 115 |
| 116 def gsutil_env(self, boto_file): |
| 117 """Environment variables for gsutil.""" |
| 118 boto_path = None |
| 119 if boto_file: |
| 120 boto_path = self.m.path.join(self.home_dir, boto_file) |
| 121 return {'AWS_CREDENTIAL_FILE': boto_path, |
| 122 'BOTO_CONFIG': boto_path} |
| 123 |
| 124 def get_builder_spec(self, skia_dir, builder_name): |
| 125 """Obtain the buildbot spec for the given builder.""" |
| 126 fake_spec = None |
| 127 if self._test_data.enabled: |
| 128 fake_spec = fake_specs.FAKE_SPECS[builder_name] |
| 129 builder_spec = self.json_from_file( |
| 130 skia_dir.join('tools', 'buildbot_spec.py'), |
| 131 skia_dir, |
| 132 builder_name, |
| 133 fake_spec) |
| 134 return builder_spec |
| 135 |
| 136 def make_path(self, *path): |
| 137 """Return a Path object for the given path.""" |
| 138 key = 'custom_%s' % '_'.join(path) |
| 139 self.m.path.c.base_paths[key] = tuple(path) |
| 140 return self.m.path[key] |
| 141 |
| 142 def setup(self): |
| 143 """Prepare the bot to run.""" |
| 144 # Setup |
| 145 self.failed = [] |
| 146 |
| 147 self.builder_name = self.m.properties['buildername'] |
| 148 self.master_name = self.m.properties['mastername'] |
| 149 self.slave_name = self.m.properties['slavename'] |
| 150 |
| 151 self.slave_dir = self.m.path['slave_build'] |
| 152 self.checkout_root = self.slave_dir |
| 153 self.default_env = {} |
| 154 self.gclient_env = {} |
| 155 self.is_compile_bot = self.builder_name.startswith('Build-') |
| 156 |
| 157 self.default_env['CHROME_HEADLESS'] = '1' |
| 158 # The 'build' and 'depot_tools' directories are provided through isolate |
| 159 # and aren't in the expected location, so we need to override them. |
| 160 self.m.path.c.base_paths['depot_tools'] = ( |
| 161 self.m.path.c.base_paths['slave_build'] + |
| 162 ('build', 'scripts', 'slave', '.recipe_deps', 'depot_tools')) |
| 163 if 'Win' in self.builder_name: |
| 164 self.m.path.c.base_paths['depot_tools'] = ( |
| 165 'c:\\', 'Users', 'chrome-bot', 'depot_tools') |
| 166 self.m.path.c.base_paths['build'] = ( |
| 167 self.m.path.c.base_paths['slave_build'] + ('build',)) |
| 168 self.default_env['PYTHONPATH'] = self.m.path['build'].join('scripts') |
| 169 |
| 170 # Compile bots keep a persistent checkout. |
| 171 self.persistent_checkout = (self.is_compile_bot or |
| 172 'RecreateSKPs' in self.builder_name) |
| 173 if self.persistent_checkout: |
| 174 if 'Win' in self.builder_name: |
| 175 self.checkout_root = self.make_path('C:\\', 'b', 'work') |
| 176 self.gclient_cache = self.make_path('C:\\', 'b', 'cache') |
| 177 else: |
| 178 self.checkout_root = self.make_path('/', 'b', 'work') |
| 179 self.gclient_cache = self.make_path('/', 'b', 'cache') |
| 180 |
| 181 self.skia_dir = self.checkout_root.join('skia') |
| 182 self.infrabots_dir = self.skia_dir.join('infra', 'bots') |
| 183 |
| 184 # Some bots also require a checkout of chromium. |
| 185 self._need_chromium_checkout = 'CommandBuffer' in self.builder_name |
| 186 if 'CommandBuffer' in self.builder_name: |
| 187 self.gclient_env['GYP_CHROMIUM_NO_ACTION'] = '0' |
| 188 if ((self.is_compile_bot and |
| 189 'SAN' in self.builder_name) or |
| 190 'RecreateSKPs' in self.builder_name): |
| 191 self._need_chromium_checkout = True |
| 192 |
| 193 # Some bots also require a checkout of PDFium. |
| 194 self._need_pdfium_checkout = 'PDFium' in self.builder_name |
| 195 |
| 196 # Check out the Skia code. |
| 197 self.checkout_steps() |
| 198 |
| 199 # Obtain the spec for this builder from the Skia repo. Use it to set more |
| 200 # properties. |
| 201 self.builder_spec = self.get_builder_spec(self.skia_dir, self.builder_name) |
| 202 |
| 203 self.builder_cfg = self.builder_spec['builder_cfg'] |
| 204 self.role = self.builder_cfg['role'] |
| 205 |
| 206 # Set some important variables. |
| 207 self.resource_dir = self.skia_dir.join('resources') |
| 208 self.images_dir = self.slave_dir.join('skimage') |
| 209 if not self.m.path.exists(self.infrabots_dir.join( |
| 210 'assets', 'skimage', 'VERSION')): |
| 211 # TODO(borenet): Remove this once enough time has passed. |
| 212 self.images_dir = self.slave_dir.join('images') |
| 213 self.skia_out = self.skia_dir.join('out', self.builder_name) |
| 214 self.swarming_out_dir = self.make_path(self.m.properties['swarm_out_dir']) |
| 215 self.local_skp_dir = self.slave_dir.join('skp') |
| 216 if not self.m.path.exists(self.infrabots_dir.join( |
| 217 'assets', 'skp', 'VERSION')): |
| 218 # TODO(borenet): Remove this once enough time has passed. |
| 219 self.local_skp_dir = self.slave_dir.join('skps') |
| 220 if not self.is_compile_bot: |
| 221 self.skia_out = self.slave_dir.join('out') |
| 222 self.tmp_dir = self.m.path['slave_build'].join('tmp') |
| 223 if not self.m.path.exists(self.tmp_dir): |
| 224 self._run_once(self.m.file.makedirs, |
| 225 'tmp_dir', |
| 226 self.tmp_dir, |
| 227 infra_step=True) |
| 228 |
| 229 self.gsutil_env_chromium_skia_gm = self.gsutil_env(BOTO_CHROMIUM_SKIA_GM) |
| 230 |
| 231 self.device_dirs = None |
| 232 self._ccache = None |
| 233 self._checked_for_ccache = False |
| 234 self.configuration = self.builder_spec['configuration'] |
| 235 self.default_env.update({'SKIA_OUT': self.skia_out, |
| 236 'BUILDTYPE': self.configuration}) |
| 237 self.default_env.update(self.builder_spec['env']) |
| 238 self.build_targets = [str(t) for t in self.builder_spec['build_targets']] |
| 239 self.do_compile_steps = self.builder_spec.get('do_compile_steps', True) |
| 240 self.do_test_steps = self.builder_spec['do_test_steps'] |
| 241 self.do_perf_steps = self.builder_spec['do_perf_steps'] |
| 242 self.is_trybot = self.builder_cfg['is_trybot'] |
| 243 self.upload_dm_results = self.builder_spec['upload_dm_results'] |
| 244 self.upload_perf_results = self.builder_spec['upload_perf_results'] |
| 245 self.dm_dir = self.m.path.join( |
| 246 self.swarming_out_dir, 'dm') |
| 247 self.perf_data_dir = self.m.path.join(self.swarming_out_dir, |
| 248 'perfdata', self.builder_name, 'data') |
| 249 self.dm_flags = self.builder_spec['dm_flags'] |
| 250 self.nanobench_flags = self.builder_spec['nanobench_flags'] |
| 251 |
| 252 self.flavor = self.get_flavor(self.builder_cfg) |
| 253 |
| 254 def check_failure(self): |
| 255 """Raise an exception if any step failed.""" |
| 256 if self.failed: |
| 257 raise self.m.step.StepFailure('Failed build steps: %s' % |
| 258 ', '.join([f.name for f in self.failed])) |
| 259 |
| 260 def _run_once(self, fn, *args, **kwargs): |
| 261 if not hasattr(self, '_already_ran'): |
| 262 self._already_ran = {} |
| 263 if not fn.__name__ in self._already_ran: |
| 264 self._already_ran[fn.__name__] = fn(*args, **kwargs) |
| 265 return self._already_ran[fn.__name__] |
| 266 |
| 267 def update_repo(self, parent_dir, repo): |
| 268 """Update an existing repo. This is safe to call without gen_steps.""" |
| 269 repo_path = parent_dir.join(repo.name) |
| 270 if self.m.path.exists(repo_path): # pragma: nocover |
| 271 if self.m.platform.is_win: |
| 272 git = 'git.bat' |
| 273 else: |
| 274 git = 'git' |
| 275 self.m.step('git remote set-url', |
| 276 cmd=[git, 'remote', 'set-url', 'origin', repo.url], |
| 277 cwd=repo_path, |
| 278 infra_step=True) |
| 279 self.m.step('git fetch', |
| 280 cmd=[git, 'fetch'], |
| 281 cwd=repo_path, |
| 282 infra_step=True) |
| 283 self.m.step('git reset', |
| 284 cmd=[git, 'reset', '--hard', repo.revision], |
| 285 cwd=repo_path, |
| 286 infra_step=True) |
| 287 self.m.step('git clean', |
| 288 cmd=[git, 'clean', '-d', '-f'], |
| 289 cwd=repo_path, |
| 290 infra_step=True) |
| 291 |
| 292 def checkout_steps(self): |
| 293 """Run the steps to obtain a checkout of Skia.""" |
| 294 cfg_kwargs = {} |
| 295 if not self.persistent_checkout: |
| 296 # We should've obtained the Skia checkout through isolates, so we don't |
| 297 # need to perform the checkout ourselves. |
| 298 self.m.path['checkout'] = self.skia_dir |
| 299 self.got_revision = self.m.properties['revision'] |
| 300 return |
| 301 |
| 302 # Use a persistent gclient cache for Swarming. |
| 303 cfg_kwargs['CACHE_DIR'] = self.gclient_cache |
| 304 |
| 305 # Create the checkout path if necessary. |
| 306 if not self.m.path.exists(self.checkout_root): |
| 307 self.m.file.makedirs('checkout_path', self.checkout_root, infra_step=True) |
| 308 |
| 309 # Initial cleanup. |
| 310 gclient_cfg = self.m.gclient.make_config(**cfg_kwargs) |
| 311 skia = gclient_cfg.solutions.add() |
| 312 skia.name = 'skia' |
| 313 skia.managed = False |
| 314 skia.url = 'https://skia.googlesource.com/skia.git' |
| 315 skia.revision = self.m.properties.get('revision') or 'origin/master' |
| 316 self.update_repo(self.checkout_root, skia) |
| 317 |
| 318 # TODO(rmistry): Remove the below block after there is a solution for |
| 319 # crbug.com/616443 |
| 320 entries_file = self.checkout_root.join('.gclient_entries') |
| 321 if self.m.path.exists(entries_file): |
| 322 self.m.file.remove('remove %s' % entries_file, |
| 323 entries_file, |
| 324 infra_step=True) # pragma: no cover |
| 325 |
| 326 if self._need_chromium_checkout: |
| 327 chromium = gclient_cfg.solutions.add() |
| 328 chromium.name = 'src' |
| 329 chromium.managed = False |
| 330 chromium.url = 'https://chromium.googlesource.com/chromium/src.git' |
| 331 chromium.revision = 'origin/lkgr' |
| 332 self.update_repo(self.checkout_root, chromium) |
| 333 |
| 334 if self._need_pdfium_checkout: |
| 335 pdfium = gclient_cfg.solutions.add() |
| 336 pdfium.name = 'pdfium' |
| 337 pdfium.managed = False |
| 338 pdfium.url = 'https://pdfium.googlesource.com/pdfium.git' |
| 339 pdfium.revision = 'origin/master' |
| 340 self.update_repo(self.checkout_root, pdfium) |
| 341 |
| 342 # Run 'gclient sync'. |
| 343 gclient_cfg.got_revision_mapping['skia'] = 'got_revision' |
| 344 gclient_cfg.target_os.add('llvm') |
| 345 checkout_kwargs = {} |
| 346 checkout_kwargs['env'] = self.default_env |
| 347 update_step = self.m.gclient.checkout(gclient_config=gclient_cfg, |
| 348 cwd=self.checkout_root, |
| 349 **checkout_kwargs) |
| 350 |
| 351 self.got_revision = update_step.presentation.properties['got_revision'] |
| 352 self.m.tryserver.maybe_apply_issue() |
| 353 |
| 354 if self._need_chromium_checkout: |
| 355 self.m.gclient.runhooks(cwd=self.checkout_root, env=self.gclient_env) |
| 356 |
| 357 def copy_build_products(self, src, dst): |
| 358 """Copy whitelisted build products from src to dst.""" |
| 359 self.m.python.inline( |
| 360 name='copy build products', |
| 361 program='''import errno |
| 362 import glob |
| 363 import os |
| 364 import shutil |
| 365 import sys |
| 366 |
| 367 src = sys.argv[1] |
| 368 dst = sys.argv[2] |
| 369 build_products_whitelist = %s |
| 370 |
| 371 try: |
| 372 os.makedirs(dst) |
| 373 except OSError as e: |
| 374 if e.errno != errno.EEXIST: |
| 375 raise |
| 376 |
| 377 for pattern in build_products_whitelist: |
| 378 path = os.path.join(src, pattern) |
| 379 for f in glob.glob(path): |
| 380 dst_path = os.path.join(dst, os.path.relpath(f, src)) |
| 381 if not os.path.isdir(os.path.dirname(dst_path)): |
| 382 os.makedirs(os.path.dirname(dst_path)) |
| 383 print 'Copying build product %%s to %%s' %% (f, dst_path) |
| 384 shutil.move(f, dst_path) |
| 385 ''' % str(BUILD_PRODUCTS_ISOLATE_WHITELIST), |
| 386 args=[src, dst], |
| 387 infra_step=True) |
| 388 |
| 389 def compile_steps(self, clobber=False): |
| 390 """Run the steps to build Skia.""" |
| 391 try: |
| 392 for target in self.build_targets: |
| 393 self.flavor.compile(target) |
| 394 self.copy_build_products( |
| 395 self.flavor.out_dir, |
| 396 self.swarming_out_dir.join('out', self.configuration)) |
| 397 self.flavor.copy_extra_build_products(self.swarming_out_dir) |
| 398 finally: |
| 399 if 'Win' in self.builder_cfg.get('os', ''): |
| 400 self.m.python.inline( |
| 401 name='cleanup', |
| 402 program='''import psutil |
| 403 for p in psutil.process_iter(): |
| 404 try: |
| 405 if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'): |
| 406 p.kill() |
| 407 except psutil._error.AccessDenied: |
| 408 pass |
| 409 ''', |
| 410 infra_step=True) |
| 411 |
| 412 def _readfile(self, filename, *args, **kwargs): |
| 413 """Convenience function for reading files.""" |
| 414 name = kwargs.pop('name') or 'read %s' % self.m.path.basename(filename) |
| 415 return self.m.file.read(name, filename, infra_step=True, *args, **kwargs) |
| 416 |
| 417 def _writefile(self, filename, contents): |
| 418 """Convenience function for writing files.""" |
| 419 return self.m.file.write('write %s' % self.m.path.basename(filename), |
| 420 filename, contents, infra_step=True) |
| 421 |
| 422 def rmtree(self, path): |
| 423 """Wrapper around api.file.rmtree with environment fix.""" |
| 424 env = {} |
| 425 env['PYTHONPATH'] = str(self.m.path['checkout'].join( |
| 426 'infra', 'bots', '.recipe_deps', 'build', 'scripts')) |
| 427 self.m.file.rmtree(self.m.path.basename(path), |
| 428 path, |
| 429 env=env, |
| 430 infra_step=True) |
| 431 |
| 432 def run(self, steptype, name, abort_on_failure=True, |
| 433 fail_build_on_failure=True, env=None, **kwargs): |
| 434 """Run a step. If it fails, keep going but mark the build status failed.""" |
| 435 env = dict(env or {}) |
| 436 env.update(self.default_env) |
| 437 try: |
| 438 return steptype(name=name, env=env, **kwargs) |
| 439 except self.m.step.StepFailure as e: |
| 440 if abort_on_failure: |
| 441 raise # pragma: no cover |
| 442 if fail_build_on_failure: |
| 443 self.failed.append(e) |
| 444 |
| 445 def check_actual_version(self, version_file, tmp_dir, test_actual_version): |
| 446 """Assert that we have an actually-downloaded version of the dir.""" |
| 447 actual_version_file = self.m.path.join(tmp_dir, version_file) |
| 448 actual_version = self._readfile( |
| 449 actual_version_file, |
| 450 name='Get downloaded %s' % version_file, |
| 451 test_data=test_actual_version).rstrip() |
| 452 assert actual_version != VERSION_NONE |
| 453 return actual_version |
| 454 |
| 455 def copy_dir(self, host_version, version_file, tmp_dir, |
| 456 host_path, device_path, test_expected_version, |
| 457 test_actual_version): |
| 458 actual_version_file = self.m.path.join(tmp_dir, version_file) |
| 459 # Copy to device. |
| 460 device_version_file = self.flavor.device_path_join( |
| 461 self.device_dirs.tmp_dir, version_file) |
| 462 if str(actual_version_file) != str(device_version_file): |
| 463 try: |
| 464 device_version = self.flavor.read_file_on_device(device_version_file) |
| 465 except self.m.step.StepFailure: |
| 466 device_version = VERSION_NONE |
| 467 if device_version != host_version: |
| 468 self.flavor.remove_file_on_device(device_version_file) |
| 469 self.flavor.create_clean_device_dir(device_path) |
| 470 self.flavor.copy_directory_contents_to_device(host_path, device_path) |
| 471 |
| 472 # Copy the new version file. |
| 473 self.flavor.copy_file_to_device(actual_version_file, |
| 474 device_version_file) |
| 475 |
| 476 def _copy_images(self): |
| 477 """Download and copy test images if needed.""" |
| 478 version_file = self.infrabots_dir.join('assets', 'skimage', 'VERSION') |
| 479 if self.m.path.exists(version_file): |
| 480 test_data = self.m.properties.get( |
| 481 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION) |
| 482 version = self._readfile(version_file, |
| 483 name='Get downloaded skimage VERSION', |
| 484 test_data=test_data).rstrip() |
| 485 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SK_IMAGE), |
| 486 version) |
| 487 else: |
| 488 # TODO(borenet): Remove this once enough time has passed. |
| 489 version = self.check_actual_version( |
| 490 VERSION_FILE_SK_IMAGE, |
| 491 self.tmp_dir, |
| 492 test_actual_version=self.m.properties.get( |
| 493 'test_downloaded_sk_image_version', |
| 494 TEST_EXPECTED_SK_IMAGE_VERSION), |
| 495 ) |
| 496 self.copy_dir( |
| 497 version, |
| 498 VERSION_FILE_SK_IMAGE, |
| 499 self.tmp_dir, |
| 500 self.images_dir, |
| 501 self.device_dirs.images_dir, |
| 502 test_expected_version=self.m.properties.get( |
| 503 'test_downloaded_sk_image_version', |
| 504 TEST_EXPECTED_SK_IMAGE_VERSION), |
| 505 test_actual_version=self.m.properties.get( |
| 506 'test_downloaded_sk_image_version', |
| 507 TEST_EXPECTED_SK_IMAGE_VERSION)) |
| 508 return version |
| 509 |
| 510 def _copy_skps(self): |
| 511 """Download and copy the SKPs if needed.""" |
| 512 version_file = self.infrabots_dir.join('assets', 'skp', 'VERSION') |
| 513 if self.m.path.exists(version_file): |
| 514 test_data = self.m.properties.get( |
| 515 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION) |
| 516 version = self._readfile(version_file, |
| 517 name='Get downloaded SKP VERSION', |
| 518 test_data=test_data).rstrip() |
| 519 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SKP), version) |
| 520 else: |
| 521 # TODO(borenet): Remove this once enough time has passed. |
| 522 version = self.check_actual_version( |
| 523 VERSION_FILE_SKP, |
| 524 self.tmp_dir, |
| 525 test_actual_version=self.m.properties.get( |
| 526 'test_downloaded_skp_version', |
| 527 TEST_EXPECTED_SKP_VERSION), |
| 528 ) |
| 529 self.copy_dir( |
| 530 version, |
| 531 VERSION_FILE_SKP, |
| 532 self.tmp_dir, |
| 533 self.local_skp_dir, |
| 534 self.device_dirs.skp_dir, |
| 535 test_expected_version=self.m.properties.get( |
| 536 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION), |
| 537 test_actual_version=self.m.properties.get( |
| 538 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)) |
| 539 return version |
| 540 |
| 541 def install(self): |
| 542 """Copy the required executables and files to the device.""" |
| 543 self.device_dirs = self.flavor.get_device_dirs() |
| 544 |
| 545 # Run any device-specific installation. |
| 546 self.flavor.install() |
| 547 |
| 548 # TODO(borenet): Only copy files which have changed. |
| 549 # Resources |
| 550 self.flavor.copy_directory_contents_to_device(self.resource_dir, |
| 551 self.device_dirs.resource_dir) |
| 552 |
| 553 def ccache(self): |
| 554 if not self._checked_for_ccache: |
| 555 self._checked_for_ccache = True |
| 556 if not self.m.platform.is_win: |
| 557 result = self.run( |
| 558 self.m.python.inline, |
| 559 name='has ccache?', |
| 560 program='''import json |
| 561 import subprocess |
| 562 import sys |
| 563 |
| 564 ccache = None |
| 565 try: |
| 566 ccache = subprocess.check_output(['which', 'ccache']).rstrip() |
| 567 except: |
| 568 pass |
| 569 print json.dumps({'ccache': ccache}) |
| 570 ''', |
| 571 stdout=self.m.json.output(), |
| 572 infra_step=True, |
| 573 abort_on_failure=False, |
| 574 fail_build_on_failure=False) |
| 575 if result and result.stdout and result.stdout.get('ccache'): |
| 576 self._ccache = result.stdout['ccache'] |
| 577 |
| 578 return self._ccache |
| 579 |
| 580 def json_from_file(self, filename, cwd, builder_name, test_data): |
| 581 """Execute the given script to obtain JSON data.""" |
| 582 return self.m.python( |
| 583 'exec %s' % self.m.path.basename(filename), |
| 584 filename, |
| 585 args=[self.m.json.output(), builder_name], |
| 586 step_test_data=lambda: self.m.json.test_api.output(test_data), |
| 587 cwd=cwd, |
| 588 infra_step=True).json.output |
| 589 |
| 590 def test_steps(self): |
| 591 """Run the DM test.""" |
| 592 self._run_once(self.install) |
| 593 self._run_once(self._copy_skps) |
| 594 self._run_once(self._copy_images) |
| 595 |
| 596 use_hash_file = False |
| 597 if self.upload_dm_results: |
| 598 # This must run before we write anything into self.device_dirs.dm_dir |
| 599 # or we may end up deleting our output on machines where they're the same. |
| 600 self.flavor.create_clean_host_dir(self.dm_dir) |
| 601 if str(self.dm_dir) != str(self.device_dirs.dm_dir): |
| 602 self.flavor.create_clean_device_dir(self.device_dirs.dm_dir) |
| 603 |
| 604 # Obtain the list of already-generated hashes. |
| 605 hash_filename = 'uninteresting_hashes.txt' |
| 606 |
| 607 # Ensure that the tmp_dir exists. |
| 608 self._run_once(self.m.file.makedirs, |
| 609 'tmp_dir', |
| 610 self.tmp_dir, |
| 611 infra_step=True) |
| 612 |
| 613 host_hashes_file = self.tmp_dir.join(hash_filename) |
| 614 hashes_file = self.flavor.device_path_join( |
| 615 self.device_dirs.tmp_dir, hash_filename) |
| 616 self.run( |
| 617 self.m.python.inline, |
| 618 'get uninteresting hashes', |
| 619 program=""" |
| 620 import contextlib |
| 621 import math |
| 622 import socket |
| 623 import sys |
| 624 import time |
| 625 import urllib2 |
| 626 |
| 627 HASHES_URL = 'https://gold.skia.org/_/hashes' |
| 628 RETRIES = 5 |
| 629 TIMEOUT = 60 |
| 630 WAIT_BASE = 15 |
| 631 |
| 632 socket.setdefaulttimeout(TIMEOUT) |
| 633 for retry in range(RETRIES): |
| 634 try: |
| 635 with contextlib.closing( |
| 636 urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w: |
| 637 hashes = w.read() |
| 638 with open(sys.argv[1], 'w') as f: |
| 639 f.write(hashes) |
| 640 break |
| 641 except Exception as e: |
| 642 print 'Failed to get uninteresting hashes from %s:' % HASHES_URL |
| 643 print e |
| 644 if retry == RETRIES: |
| 645 raise |
| 646 waittime = WAIT_BASE * math.pow(2, retry) |
| 647 print 'Retry in %d seconds.' % waittime |
| 648 time.sleep(waittime) |
| 649 """, |
| 650 args=[host_hashes_file], |
| 651 cwd=self.skia_dir, |
| 652 abort_on_failure=False, |
| 653 fail_build_on_failure=False, |
| 654 infra_step=True) |
| 655 |
| 656 if self.m.path.exists(host_hashes_file): |
| 657 self.flavor.copy_file_to_device(host_hashes_file, hashes_file) |
| 658 use_hash_file = True |
| 659 |
| 660 # Run DM. |
| 661 properties = [ |
| 662 'gitHash', self.got_revision, |
| 663 'master', self.master_name, |
| 664 'builder', self.builder_name, |
| 665 'build_number', self.m.properties['buildnumber'], |
| 666 ] |
| 667 if self.is_trybot: |
| 668 properties.extend([ |
| 669 'issue', self.m.properties['issue'], |
| 670 'patchset', self.m.properties['patchset'], |
| 671 ]) |
| 672 |
| 673 args = [ |
| 674 'dm', |
| 675 '--undefok', # This helps branches that may not know new flags. |
| 676 '--resourcePath', self.device_dirs.resource_dir, |
| 677 '--skps', self.device_dirs.skp_dir, |
| 678 '--images', self.flavor.device_path_join( |
| 679 self.device_dirs.images_dir, 'dm'), |
| 680 '--colorImages', self.flavor.device_path_join(self.device_dirs.images_dir, |
| 681 'colorspace'), |
| 682 '--nameByHash', |
| 683 '--properties' |
| 684 ] + properties |
| 685 |
| 686 args.append('--key') |
| 687 args.extend(self._KeyParams()) |
| 688 if use_hash_file: |
| 689 args.extend(['--uninterestingHashesFile', hashes_file]) |
| 690 if self.upload_dm_results: |
| 691 args.extend(['--writePath', self.device_dirs.dm_dir]) |
| 692 |
| 693 skip_flag = None |
| 694 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': |
| 695 skip_flag = '--nogpu' |
| 696 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': |
| 697 skip_flag = '--nocpu' |
| 698 if skip_flag: |
| 699 args.append(skip_flag) |
| 700 args.extend(self.dm_flags) |
| 701 |
| 702 self.run(self.flavor.step, 'dm', cmd=args, abort_on_failure=False, |
| 703 env=self.default_env) |
| 704 |
| 705 if self.upload_dm_results: |
| 706 # Copy images and JSON to host machine if needed. |
| 707 self.flavor.copy_directory_contents_to_host(self.device_dirs.dm_dir, |
| 708 self.dm_dir) |
| 709 |
| 710 # See skia:2789. |
| 711 if ('Valgrind' in self.builder_name and |
| 712 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): |
| 713 abandonGpuContext = list(args) |
| 714 abandonGpuContext.append('--abandonGpuContext') |
| 715 self.run(self.flavor.step, 'dm --abandonGpuContext', |
| 716 cmd=abandonGpuContext, abort_on_failure=False) |
| 717 preAbandonGpuContext = list(args) |
| 718 preAbandonGpuContext.append('--preAbandonGpuContext') |
| 719 self.run(self.flavor.step, 'dm --preAbandonGpuContext', |
| 720 cmd=preAbandonGpuContext, abort_on_failure=False, |
| 721 env=self.default_env) |
| 722 |
| 723 def perf_steps(self): |
| 724 """Run Skia benchmarks.""" |
| 725 self._run_once(self.install) |
| 726 self._run_once(self._copy_skps) |
| 727 self._run_once(self._copy_images) |
| 728 |
| 729 if self.upload_perf_results: |
| 730 self.flavor.create_clean_device_dir(self.device_dirs.perf_data_dir) |
| 731 |
| 732 # Run nanobench. |
| 733 properties = [ |
| 734 '--properties', |
| 735 'gitHash', self.got_revision, |
| 736 'build_number', self.m.properties['buildnumber'], |
| 737 ] |
| 738 if self.is_trybot: |
| 739 properties.extend([ |
| 740 'issue', self.m.properties['issue'], |
| 741 'patchset', self.m.properties['patchset'], |
| 742 ]) |
| 743 |
| 744 target = 'nanobench' |
| 745 if 'VisualBench' in self.builder_name: |
| 746 target = 'visualbench' |
| 747 args = [ |
| 748 target, |
| 749 '--undefok', # This helps branches that may not know new flags. |
| 750 '-i', self.device_dirs.resource_dir, |
| 751 '--skps', self.device_dirs.skp_dir, |
| 752 '--images', self.flavor.device_path_join( |
| 753 self.device_dirs.images_dir, 'nanobench'), |
| 754 ] |
| 755 |
| 756 skip_flag = None |
| 757 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': |
| 758 skip_flag = '--nogpu' |
| 759 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': |
| 760 skip_flag = '--nocpu' |
| 761 if skip_flag: |
| 762 args.append(skip_flag) |
| 763 args.extend(self.nanobench_flags) |
| 764 |
| 765 if self.upload_perf_results: |
| 766 json_path = self.flavor.device_path_join( |
| 767 self.device_dirs.perf_data_dir, |
| 768 'nanobench_%s.json' % self.got_revision) |
| 769 args.extend(['--outResultsFile', json_path]) |
| 770 args.extend(properties) |
| 771 |
| 772 keys_blacklist = ['configuration', 'role', 'is_trybot'] |
| 773 args.append('--key') |
| 774 for k in sorted(self.builder_cfg.keys()): |
| 775 if not k in keys_blacklist: |
| 776 args.extend([k, self.builder_cfg[k]]) |
| 777 |
| 778 self.run(self.flavor.step, target, cmd=args, abort_on_failure=False, |
| 779 env=self.default_env) |
| 780 |
| 781 # See skia:2789. |
| 782 if ('Valgrind' in self.builder_name and |
| 783 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): |
| 784 abandonGpuContext = list(args) |
| 785 abandonGpuContext.extend(['--abandonGpuContext', '--nocpu']) |
| 786 self.run(self.flavor.step, '%s --abandonGpuContext' % target, |
| 787 cmd=abandonGpuContext, abort_on_failure=False, |
| 788 env=self.default_env) |
| 789 |
| 790 # Upload results. |
| 791 if self.upload_perf_results: |
| 792 self.m.file.makedirs('perf_dir', self.perf_data_dir) |
| 793 self.flavor.copy_directory_contents_to_host( |
| 794 self.device_dirs.perf_data_dir, self.perf_data_dir) |
| 795 |
| 796 def cleanup_steps(self): |
| 797 """Run any cleanup steps.""" |
| 798 self.flavor.cleanup_steps() |
| 799 |
| 800 def _KeyParams(self): |
| 801 """Build a unique key from the builder name (as a list). |
| 802 |
| 803 E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6 |
| 804 """ |
| 805 # Don't bother to include role, which is always Test. |
| 806 # TryBots are uploaded elsewhere so they can use the same key. |
| 807 blacklist = ['role', 'is_trybot'] |
| 808 |
| 809 flat = [] |
| 810 for k in sorted(self.builder_cfg.keys()): |
| 811 if k not in blacklist: |
| 812 flat.append(k) |
| 813 flat.append(self.builder_cfg[k]) |
| 814 return flat |
OLD | NEW |