OLD | NEW |
| (Empty) |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 | |
6 # pylint: disable=W0201 | |
7 | |
8 | |
9 import json | |
10 import os | |
11 import re | |
12 import sys | |
13 | |
14 from recipe_engine import recipe_api | |
15 from recipe_engine import config_types | |
16 | |
17 from . import android_flavor | |
18 from . import cmake_flavor | |
19 from . import coverage_flavor | |
20 from . import default_flavor | |
21 from . import fake_specs | |
22 from . import gn_flavor | |
23 from . import ios_flavor | |
24 from . import pdfium_flavor | |
25 from . import valgrind_flavor | |
26 from . import xsan_flavor | |
27 | |
28 | |
29 BOTO_CHROMIUM_SKIA_GM = 'chromium-skia-gm.boto' | |
30 | |
31 GS_SUBDIR_TMPL_SK_IMAGE = 'skimage/v%s' | |
32 GS_SUBDIR_TMPL_SKP = 'playback_%s/skps' | |
33 | |
34 TEST_EXPECTED_SKP_VERSION = '42' | |
35 TEST_EXPECTED_SK_IMAGE_VERSION = '42' | |
36 | |
37 VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION' | |
38 VERSION_FILE_SKP = 'SKP_VERSION' | |
39 | |
40 VERSION_NONE = -1 | |
41 | |
42 BUILD_PRODUCTS_ISOLATE_WHITELIST = [ | |
43 'dm', | |
44 'dm.exe', | |
45 'nanobench', | |
46 'nanobench.exe', | |
47 '*.so', | |
48 '*.dll', | |
49 '*.dylib', | |
50 'skia_launcher', | |
51 'lib/*.so', | |
52 'iOSShell.app', | |
53 'iOSShell.ipa', | |
54 'visualbench', | |
55 'visualbench.exe', | |
56 'vulkan-1.dll', | |
57 ] | |
58 | |
59 | |
60 def is_android(builder_cfg): | |
61 """Determine whether the given builder is an Android builder.""" | |
62 return ('Android' in builder_cfg.get('extra_config', '') or | |
63 builder_cfg.get('os') == 'Android') | |
64 | |
65 | |
66 def is_cmake(builder_cfg): | |
67 return 'CMake' in builder_cfg.get('extra_config', '') | |
68 | |
69 | |
70 def is_gn(builder_cfg): | |
71 return 'GN' == builder_cfg.get('extra_config', '') | |
72 | |
73 | |
74 def is_ios(builder_cfg): | |
75 return ('iOS' in builder_cfg.get('extra_config', '') or | |
76 builder_cfg.get('os') == 'iOS') | |
77 | |
78 | |
79 def is_pdfium(builder_cfg): | |
80 return 'PDFium' in builder_cfg.get('extra_config', '') | |
81 | |
82 | |
83 def is_valgrind(builder_cfg): | |
84 return 'Valgrind' in builder_cfg.get('extra_config', '') | |
85 | |
86 | |
87 def is_xsan(builder_cfg): | |
88 return ('ASAN' in builder_cfg.get('extra_config', '') or | |
89 'MSAN' in builder_cfg.get('extra_config', '') or | |
90 'TSAN' in builder_cfg.get('extra_config', '')) | |
91 | |
92 | |
93 class SkiaApi(recipe_api.RecipeApi): | |
94 | |
95 def get_flavor(self, builder_cfg): | |
96 """Return a flavor utils object specific to the given builder.""" | |
97 if is_android(builder_cfg): | |
98 return android_flavor.AndroidFlavorUtils(self) | |
99 elif is_cmake(builder_cfg): | |
100 return cmake_flavor.CMakeFlavorUtils(self) | |
101 elif is_gn(builder_cfg): | |
102 return gn_flavor.GNFlavorUtils(self) | |
103 elif is_ios(builder_cfg): | |
104 return ios_flavor.iOSFlavorUtils(self) | |
105 elif is_pdfium(builder_cfg): | |
106 return pdfium_flavor.PDFiumFlavorUtils(self) | |
107 elif is_valgrind(builder_cfg): | |
108 return valgrind_flavor.ValgrindFlavorUtils(self) | |
109 elif is_xsan(builder_cfg): | |
110 return xsan_flavor.XSanFlavorUtils(self) | |
111 elif builder_cfg.get('configuration') == 'Coverage': | |
112 return coverage_flavor.CoverageFlavorUtils(self) | |
113 else: | |
114 return default_flavor.DefaultFlavorUtils(self) | |
115 | |
116 @property | |
117 def home_dir(self): | |
118 """Find the home directory.""" | |
119 home_dir = os.path.expanduser('~') | |
120 if self._test_data.enabled: | |
121 home_dir = '[HOME]' | |
122 return home_dir | |
123 | |
124 def gsutil_env(self, boto_file): | |
125 """Environment variables for gsutil.""" | |
126 boto_path = None | |
127 if boto_file: | |
128 boto_path = self.m.path.join(self.home_dir, boto_file) | |
129 return {'AWS_CREDENTIAL_FILE': boto_path, | |
130 'BOTO_CONFIG': boto_path} | |
131 | |
132 def get_builder_spec(self, skia_dir, builder_name): | |
133 """Obtain the buildbot spec for the given builder.""" | |
134 fake_spec = None | |
135 if self._test_data.enabled: | |
136 fake_spec = fake_specs.FAKE_SPECS[builder_name] | |
137 builder_spec = self.json_from_file( | |
138 skia_dir.join('tools', 'buildbot_spec.py'), | |
139 skia_dir, | |
140 builder_name, | |
141 fake_spec) | |
142 return builder_spec | |
143 | |
144 def make_path(self, *path): | |
145 """Return a Path object for the given path.""" | |
146 key = 'custom_%s' % '_'.join(path) | |
147 self.m.path.c.base_paths[key] = tuple(path) | |
148 return self.m.path[key] | |
149 | |
150 def setup(self): | |
151 """Prepare the bot to run.""" | |
152 # Setup | |
153 self.failed = [] | |
154 | |
155 self.builder_name = self.m.properties['buildername'] | |
156 self.master_name = self.m.properties['mastername'] | |
157 self.slave_name = self.m.properties['slavename'] | |
158 | |
159 self.slave_dir = self.m.path['slave_build'] | |
160 self.checkout_root = self.slave_dir | |
161 self.default_env = {} | |
162 self.gclient_env = {} | |
163 self.is_compile_bot = self.builder_name.startswith('Build-') | |
164 | |
165 self.default_env['CHROME_HEADLESS'] = '1' | |
166 # The 'depot_tools' directory comes from recipe DEPS and isn't provided by | |
167 # default. We have to set it manually. | |
168 self.m.path.c.base_paths['depot_tools'] = ( | |
169 self.m.path.c.base_paths['slave_build'] + | |
170 ('skia', 'infra', 'bots', '.recipe_deps', 'depot_tools')) | |
171 if 'Win' in self.builder_name: | |
172 self.m.path.c.base_paths['depot_tools'] = ( | |
173 'c:\\', 'Users', 'chrome-bot', 'depot_tools') | |
174 | |
175 # Compile bots keep a persistent checkout. | |
176 self.persistent_checkout = (self.is_compile_bot or | |
177 'RecreateSKPs' in self.builder_name) | |
178 if self.persistent_checkout: | |
179 if 'Win' in self.builder_name: | |
180 self.checkout_root = self.make_path('C:\\', 'b', 'work') | |
181 self.gclient_cache = self.make_path('C:\\', 'b', 'cache') | |
182 else: | |
183 self.checkout_root = self.make_path('/', 'b', 'work') | |
184 self.gclient_cache = self.make_path('/', 'b', 'cache') | |
185 | |
186 self.skia_dir = self.checkout_root.join('skia') | |
187 self.infrabots_dir = self.skia_dir.join('infra', 'bots') | |
188 | |
189 # Some bots also require a checkout of chromium. | |
190 self._need_chromium_checkout = 'CommandBuffer' in self.builder_name | |
191 if 'CommandBuffer' in self.builder_name: | |
192 self.gclient_env['GYP_CHROMIUM_NO_ACTION'] = '0' | |
193 if ((self.is_compile_bot and | |
194 'SAN' in self.builder_name) or | |
195 'RecreateSKPs' in self.builder_name): | |
196 self._need_chromium_checkout = True | |
197 if 'RecreateSKPs' in self.builder_name: | |
198 self.gclient_env['CPPFLAGS'] = ( | |
199 '-DSK_ALLOW_CROSSPROCESS_PICTUREIMAGEFILTERS=1') | |
200 | |
201 # Some bots also require a checkout of PDFium. | |
202 self._need_pdfium_checkout = 'PDFium' in self.builder_name | |
203 | |
204 # Check out the Skia code. | |
205 self.checkout_steps() | |
206 | |
207 # Obtain the spec for this builder from the Skia repo. Use it to set more | |
208 # properties. | |
209 self.builder_spec = self.get_builder_spec(self.skia_dir, self.builder_name) | |
210 | |
211 self.builder_cfg = self.builder_spec['builder_cfg'] | |
212 self.role = self.builder_cfg['role'] | |
213 | |
214 # Set some important variables. | |
215 self.resource_dir = self.skia_dir.join('resources') | |
216 self.images_dir = self.slave_dir.join('skimage') | |
217 if not self.m.path.exists(self.infrabots_dir.join( | |
218 'assets', 'skimage', 'VERSION')): | |
219 # TODO(borenet): Remove this once enough time has passed. | |
220 self.images_dir = self.slave_dir.join('images') | |
221 self.skia_out = self.skia_dir.join('out', self.builder_name) | |
222 self.swarming_out_dir = self.make_path(self.m.properties['swarm_out_dir']) | |
223 self.local_skp_dir = self.slave_dir.join('skp') | |
224 if not self.m.path.exists(self.infrabots_dir.join( | |
225 'assets', 'skp', 'VERSION')): | |
226 # TODO(borenet): Remove this once enough time has passed. | |
227 self.local_skp_dir = self.slave_dir.join('skps') | |
228 if not self.is_compile_bot: | |
229 self.skia_out = self.slave_dir.join('out') | |
230 self.tmp_dir = self.m.path['slave_build'].join('tmp') | |
231 if not self.m.path.exists(self.tmp_dir): | |
232 self._run_once(self.m.file.makedirs, | |
233 'tmp_dir', | |
234 self.tmp_dir, | |
235 infra_step=True) | |
236 | |
237 self.gsutil_env_chromium_skia_gm = self.gsutil_env(BOTO_CHROMIUM_SKIA_GM) | |
238 | |
239 self.device_dirs = None | |
240 self._ccache = None | |
241 self._checked_for_ccache = False | |
242 self.configuration = self.builder_spec['configuration'] | |
243 self.default_env.update({'SKIA_OUT': self.skia_out, | |
244 'BUILDTYPE': self.configuration}) | |
245 self.default_env.update(self.builder_spec['env']) | |
246 self.build_targets = [str(t) for t in self.builder_spec['build_targets']] | |
247 self.do_compile_steps = self.builder_spec.get('do_compile_steps', True) | |
248 self.do_test_steps = self.builder_spec['do_test_steps'] | |
249 self.do_perf_steps = self.builder_spec['do_perf_steps'] | |
250 self.is_trybot = self.builder_cfg['is_trybot'] | |
251 self.upload_dm_results = self.builder_spec['upload_dm_results'] | |
252 self.upload_perf_results = self.builder_spec['upload_perf_results'] | |
253 self.dm_dir = self.m.path.join( | |
254 self.swarming_out_dir, 'dm') | |
255 self.perf_data_dir = self.m.path.join(self.swarming_out_dir, | |
256 'perfdata', self.builder_name, 'data') | |
257 self.dm_flags = self.builder_spec['dm_flags'] | |
258 self.nanobench_flags = self.builder_spec['nanobench_flags'] | |
259 | |
260 self.flavor = self.get_flavor(self.builder_cfg) | |
261 | |
262 def check_failure(self): | |
263 """Raise an exception if any step failed.""" | |
264 if self.failed: | |
265 raise self.m.step.StepFailure('Failed build steps: %s' % | |
266 ', '.join([f.name for f in self.failed])) | |
267 | |
268 def _run_once(self, fn, *args, **kwargs): | |
269 if not hasattr(self, '_already_ran'): | |
270 self._already_ran = {} | |
271 if not fn.__name__ in self._already_ran: | |
272 self._already_ran[fn.__name__] = fn(*args, **kwargs) | |
273 return self._already_ran[fn.__name__] | |
274 | |
275 def update_repo(self, parent_dir, repo): | |
276 """Update an existing repo. This is safe to call without gen_steps.""" | |
277 repo_path = parent_dir.join(repo.name) | |
278 if self.m.path.exists(repo_path): # pragma: nocover | |
279 if self.m.platform.is_win: | |
280 git = 'git.bat' | |
281 else: | |
282 git = 'git' | |
283 self.m.step('git remote set-url', | |
284 cmd=[git, 'remote', 'set-url', 'origin', repo.url], | |
285 cwd=repo_path, | |
286 infra_step=True) | |
287 self.m.step('git fetch', | |
288 cmd=[git, 'fetch'], | |
289 cwd=repo_path, | |
290 infra_step=True) | |
291 self.m.step('git reset', | |
292 cmd=[git, 'reset', '--hard', repo.revision], | |
293 cwd=repo_path, | |
294 infra_step=True) | |
295 self.m.step('git clean', | |
296 cmd=[git, 'clean', '-d', '-f'], | |
297 cwd=repo_path, | |
298 infra_step=True) | |
299 | |
300 def checkout_steps(self): | |
301 """Run the steps to obtain a checkout of Skia.""" | |
302 cfg_kwargs = {} | |
303 if not self.persistent_checkout: | |
304 # We should've obtained the Skia checkout through isolates, so we don't | |
305 # need to perform the checkout ourselves. | |
306 self.m.path['checkout'] = self.skia_dir | |
307 self.got_revision = self.m.properties['revision'] | |
308 return | |
309 | |
310 # Use a persistent gclient cache for Swarming. | |
311 cfg_kwargs['CACHE_DIR'] = self.gclient_cache | |
312 | |
313 # Create the checkout path if necessary. | |
314 if not self.m.path.exists(self.checkout_root): | |
315 self.m.file.makedirs('checkout_path', self.checkout_root, infra_step=True) | |
316 | |
317 # Initial cleanup. | |
318 gclient_cfg = self.m.gclient.make_config(**cfg_kwargs) | |
319 skia = gclient_cfg.solutions.add() | |
320 skia.name = 'skia' | |
321 skia.managed = False | |
322 skia.url = 'https://skia.googlesource.com/skia.git' | |
323 skia.revision = self.m.properties.get('revision') or 'origin/master' | |
324 self.update_repo(self.checkout_root, skia) | |
325 | |
326 # TODO(rmistry): Remove the below block after there is a solution for | |
327 # crbug.com/616443 | |
328 entries_file = self.checkout_root.join('.gclient_entries') | |
329 if self.m.path.exists(entries_file): | |
330 self.m.file.remove('remove %s' % entries_file, | |
331 entries_file, | |
332 infra_step=True) # pragma: no cover | |
333 | |
334 if self._need_chromium_checkout: | |
335 chromium = gclient_cfg.solutions.add() | |
336 chromium.name = 'src' | |
337 chromium.managed = False | |
338 chromium.url = 'https://chromium.googlesource.com/chromium/src.git' | |
339 chromium.revision = 'origin/lkgr' | |
340 self.update_repo(self.checkout_root, chromium) | |
341 | |
342 if self._need_pdfium_checkout: | |
343 pdfium = gclient_cfg.solutions.add() | |
344 pdfium.name = 'pdfium' | |
345 pdfium.managed = False | |
346 pdfium.url = 'https://pdfium.googlesource.com/pdfium.git' | |
347 pdfium.revision = 'origin/master' | |
348 self.update_repo(self.checkout_root, pdfium) | |
349 | |
350 # Run 'gclient sync'. | |
351 gclient_cfg.got_revision_mapping['skia'] = 'got_revision' | |
352 gclient_cfg.target_os.add('llvm') | |
353 checkout_kwargs = {} | |
354 checkout_kwargs['env'] = self.default_env | |
355 | |
356 # api.gclient.revert() assumes things about the layout of the code, so it | |
357 # fails for us. Run an appropriate revert sequence for trybots instead. | |
358 gclient_file = self.checkout_root.join('.gclient') | |
359 if (self.m.tryserver.is_tryserver and | |
360 self.m.path.exists(gclient_file)): # pragma: no cover | |
361 # These steps taken from: | |
362 # https://chromium.googlesource.com/chromium/tools/build/+/ | |
363 # 81a696760ab7c25f6606c54fc781b90b8af9fdd2/scripts/slave/ | |
364 # gclient_safe_revert.py | |
365 if self.m.path.exists(entries_file): | |
366 self.m.gclient('recurse', [ | |
367 'recurse', '-i', 'sh', '-c', | |
368 'if [ -e .git ]; then git remote update; fi']) | |
369 self.m.gclient( | |
370 'revert', | |
371 ['revert', '-v', '-v', '-v', '--nohooks', '--upstream'], | |
372 cwd=self.checkout_root) | |
373 | |
374 update_step = self.m.gclient.checkout(gclient_config=gclient_cfg, | |
375 cwd=self.checkout_root, | |
376 revert=False, | |
377 **checkout_kwargs) | |
378 | |
379 self.got_revision = update_step.presentation.properties['got_revision'] | |
380 self.m.tryserver.maybe_apply_issue() | |
381 | |
382 if self._need_chromium_checkout: | |
383 self.m.gclient.runhooks(cwd=self.checkout_root, env=self.gclient_env) | |
384 | |
385 def copy_build_products(self, src, dst): | |
386 """Copy whitelisted build products from src to dst.""" | |
387 self.m.python.inline( | |
388 name='copy build products', | |
389 program='''import errno | |
390 import glob | |
391 import os | |
392 import shutil | |
393 import sys | |
394 | |
395 src = sys.argv[1] | |
396 dst = sys.argv[2] | |
397 build_products_whitelist = %s | |
398 | |
399 try: | |
400 os.makedirs(dst) | |
401 except OSError as e: | |
402 if e.errno != errno.EEXIST: | |
403 raise | |
404 | |
405 for pattern in build_products_whitelist: | |
406 path = os.path.join(src, pattern) | |
407 for f in glob.glob(path): | |
408 dst_path = os.path.join(dst, os.path.relpath(f, src)) | |
409 if not os.path.isdir(os.path.dirname(dst_path)): | |
410 os.makedirs(os.path.dirname(dst_path)) | |
411 print 'Copying build product %%s to %%s' %% (f, dst_path) | |
412 shutil.move(f, dst_path) | |
413 ''' % str(BUILD_PRODUCTS_ISOLATE_WHITELIST), | |
414 args=[src, dst], | |
415 infra_step=True) | |
416 | |
417 def compile_steps(self, clobber=False): | |
418 """Run the steps to build Skia.""" | |
419 try: | |
420 for target in self.build_targets: | |
421 self.flavor.compile(target) | |
422 self.copy_build_products( | |
423 self.flavor.out_dir, | |
424 self.swarming_out_dir.join('out', self.configuration)) | |
425 self.flavor.copy_extra_build_products(self.swarming_out_dir) | |
426 finally: | |
427 if 'Win' in self.builder_cfg.get('os', ''): | |
428 self.m.python.inline( | |
429 name='cleanup', | |
430 program='''import psutil | |
431 for p in psutil.process_iter(): | |
432 try: | |
433 if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'): | |
434 p.kill() | |
435 except psutil._error.AccessDenied: | |
436 pass | |
437 ''', | |
438 infra_step=True) | |
439 | |
440 def _readfile(self, filename, *args, **kwargs): | |
441 """Convenience function for reading files.""" | |
442 name = kwargs.pop('name') or 'read %s' % self.m.path.basename(filename) | |
443 return self.m.file.read(name, filename, infra_step=True, *args, **kwargs) | |
444 | |
445 def _writefile(self, filename, contents): | |
446 """Convenience function for writing files.""" | |
447 return self.m.file.write('write %s' % self.m.path.basename(filename), | |
448 filename, contents, infra_step=True) | |
449 | |
450 def rmtree(self, path): | |
451 """Wrapper around api.file.rmtree with environment fix.""" | |
452 env = {} | |
453 env['PYTHONPATH'] = str(self.m.path['checkout'].join( | |
454 'infra', 'bots', '.recipe_deps', 'build', 'scripts')) | |
455 self.m.file.rmtree(self.m.path.basename(path), | |
456 path, | |
457 env=env, | |
458 infra_step=True) | |
459 | |
460 def run(self, steptype, name, abort_on_failure=True, | |
461 fail_build_on_failure=True, env=None, **kwargs): | |
462 """Run a step. If it fails, keep going but mark the build status failed.""" | |
463 env = dict(env or {}) | |
464 env.update(self.default_env) | |
465 try: | |
466 return steptype(name=name, env=env, **kwargs) | |
467 except self.m.step.StepFailure as e: | |
468 if abort_on_failure: | |
469 raise # pragma: no cover | |
470 if fail_build_on_failure: | |
471 self.failed.append(e) | |
472 | |
473 def check_actual_version(self, version_file, tmp_dir, test_actual_version): | |
474 """Assert that we have an actually-downloaded version of the dir.""" | |
475 actual_version_file = self.m.path.join(tmp_dir, version_file) | |
476 actual_version = self._readfile( | |
477 actual_version_file, | |
478 name='Get downloaded %s' % version_file, | |
479 test_data=test_actual_version).rstrip() | |
480 assert actual_version != VERSION_NONE | |
481 return actual_version | |
482 | |
483 def copy_dir(self, host_version, version_file, tmp_dir, | |
484 host_path, device_path, test_expected_version, | |
485 test_actual_version): | |
486 actual_version_file = self.m.path.join(tmp_dir, version_file) | |
487 # Copy to device. | |
488 device_version_file = self.flavor.device_path_join( | |
489 self.device_dirs.tmp_dir, version_file) | |
490 if str(actual_version_file) != str(device_version_file): | |
491 try: | |
492 device_version = self.flavor.read_file_on_device(device_version_file) | |
493 except self.m.step.StepFailure: | |
494 device_version = VERSION_NONE | |
495 if device_version != host_version: | |
496 self.flavor.remove_file_on_device(device_version_file) | |
497 self.flavor.create_clean_device_dir(device_path) | |
498 self.flavor.copy_directory_contents_to_device(host_path, device_path) | |
499 | |
500 # Copy the new version file. | |
501 self.flavor.copy_file_to_device(actual_version_file, | |
502 device_version_file) | |
503 | |
504 def _copy_images(self): | |
505 """Download and copy test images if needed.""" | |
506 version_file = self.infrabots_dir.join('assets', 'skimage', 'VERSION') | |
507 if self.m.path.exists(version_file): | |
508 test_data = self.m.properties.get( | |
509 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION) | |
510 version = self._readfile(version_file, | |
511 name='Get downloaded skimage VERSION', | |
512 test_data=test_data).rstrip() | |
513 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SK_IMAGE), | |
514 version) | |
515 else: | |
516 # TODO(borenet): Remove this once enough time has passed. | |
517 version = self.check_actual_version( | |
518 VERSION_FILE_SK_IMAGE, | |
519 self.tmp_dir, | |
520 test_actual_version=self.m.properties.get( | |
521 'test_downloaded_sk_image_version', | |
522 TEST_EXPECTED_SK_IMAGE_VERSION), | |
523 ) | |
524 self.copy_dir( | |
525 version, | |
526 VERSION_FILE_SK_IMAGE, | |
527 self.tmp_dir, | |
528 self.images_dir, | |
529 self.device_dirs.images_dir, | |
530 test_expected_version=self.m.properties.get( | |
531 'test_downloaded_sk_image_version', | |
532 TEST_EXPECTED_SK_IMAGE_VERSION), | |
533 test_actual_version=self.m.properties.get( | |
534 'test_downloaded_sk_image_version', | |
535 TEST_EXPECTED_SK_IMAGE_VERSION)) | |
536 return version | |
537 | |
538 def _copy_skps(self): | |
539 """Download and copy the SKPs if needed.""" | |
540 version_file = self.infrabots_dir.join('assets', 'skp', 'VERSION') | |
541 if self.m.path.exists(version_file): | |
542 test_data = self.m.properties.get( | |
543 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION) | |
544 version = self._readfile(version_file, | |
545 name='Get downloaded SKP VERSION', | |
546 test_data=test_data).rstrip() | |
547 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SKP), version) | |
548 else: | |
549 # TODO(borenet): Remove this once enough time has passed. | |
550 version = self.check_actual_version( | |
551 VERSION_FILE_SKP, | |
552 self.tmp_dir, | |
553 test_actual_version=self.m.properties.get( | |
554 'test_downloaded_skp_version', | |
555 TEST_EXPECTED_SKP_VERSION), | |
556 ) | |
557 self.copy_dir( | |
558 version, | |
559 VERSION_FILE_SKP, | |
560 self.tmp_dir, | |
561 self.local_skp_dir, | |
562 self.device_dirs.skp_dir, | |
563 test_expected_version=self.m.properties.get( | |
564 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION), | |
565 test_actual_version=self.m.properties.get( | |
566 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)) | |
567 return version | |
568 | |
569 def install(self): | |
570 """Copy the required executables and files to the device.""" | |
571 self.device_dirs = self.flavor.get_device_dirs() | |
572 | |
573 # Run any device-specific installation. | |
574 self.flavor.install() | |
575 | |
576 # TODO(borenet): Only copy files which have changed. | |
577 # Resources | |
578 self.flavor.copy_directory_contents_to_device(self.resource_dir, | |
579 self.device_dirs.resource_dir) | |
580 | |
581 def ccache(self): | |
582 if not self._checked_for_ccache: | |
583 self._checked_for_ccache = True | |
584 if not self.m.platform.is_win: | |
585 result = self.run( | |
586 self.m.python.inline, | |
587 name='has ccache?', | |
588 program='''import json | |
589 import subprocess | |
590 import sys | |
591 | |
592 ccache = None | |
593 try: | |
594 ccache = subprocess.check_output(['which', 'ccache']).rstrip() | |
595 except: | |
596 pass | |
597 print json.dumps({'ccache': ccache}) | |
598 ''', | |
599 stdout=self.m.json.output(), | |
600 infra_step=True, | |
601 abort_on_failure=False, | |
602 fail_build_on_failure=False) | |
603 if result and result.stdout and result.stdout.get('ccache'): | |
604 self._ccache = result.stdout['ccache'] | |
605 | |
606 return self._ccache | |
607 | |
608 def json_from_file(self, filename, cwd, builder_name, test_data): | |
609 """Execute the given script to obtain JSON data.""" | |
610 return self.m.python( | |
611 'exec %s' % self.m.path.basename(filename), | |
612 filename, | |
613 args=[self.m.json.output(), builder_name], | |
614 step_test_data=lambda: self.m.json.test_api.output(test_data), | |
615 cwd=cwd, | |
616 infra_step=True).json.output | |
617 | |
618 def test_steps(self): | |
619 """Run the DM test.""" | |
620 self._run_once(self.install) | |
621 self._run_once(self._copy_skps) | |
622 self._run_once(self._copy_images) | |
623 | |
624 use_hash_file = False | |
625 if self.upload_dm_results: | |
626 # This must run before we write anything into self.device_dirs.dm_dir | |
627 # or we may end up deleting our output on machines where they're the same. | |
628 self.flavor.create_clean_host_dir(self.dm_dir) | |
629 if str(self.dm_dir) != str(self.device_dirs.dm_dir): | |
630 self.flavor.create_clean_device_dir(self.device_dirs.dm_dir) | |
631 | |
632 # Obtain the list of already-generated hashes. | |
633 hash_filename = 'uninteresting_hashes.txt' | |
634 | |
635 # Ensure that the tmp_dir exists. | |
636 self._run_once(self.m.file.makedirs, | |
637 'tmp_dir', | |
638 self.tmp_dir, | |
639 infra_step=True) | |
640 | |
641 host_hashes_file = self.tmp_dir.join(hash_filename) | |
642 hashes_file = self.flavor.device_path_join( | |
643 self.device_dirs.tmp_dir, hash_filename) | |
644 self.run( | |
645 self.m.python.inline, | |
646 'get uninteresting hashes', | |
647 program=""" | |
648 import contextlib | |
649 import math | |
650 import socket | |
651 import sys | |
652 import time | |
653 import urllib2 | |
654 | |
655 HASHES_URL = 'https://gold.skia.org/_/hashes' | |
656 RETRIES = 5 | |
657 TIMEOUT = 60 | |
658 WAIT_BASE = 15 | |
659 | |
660 socket.setdefaulttimeout(TIMEOUT) | |
661 for retry in range(RETRIES): | |
662 try: | |
663 with contextlib.closing( | |
664 urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w: | |
665 hashes = w.read() | |
666 with open(sys.argv[1], 'w') as f: | |
667 f.write(hashes) | |
668 break | |
669 except Exception as e: | |
670 print 'Failed to get uninteresting hashes from %s:' % HASHES_URL | |
671 print e | |
672 if retry == RETRIES: | |
673 raise | |
674 waittime = WAIT_BASE * math.pow(2, retry) | |
675 print 'Retry in %d seconds.' % waittime | |
676 time.sleep(waittime) | |
677 """, | |
678 args=[host_hashes_file], | |
679 cwd=self.skia_dir, | |
680 abort_on_failure=False, | |
681 fail_build_on_failure=False, | |
682 infra_step=True) | |
683 | |
684 if self.m.path.exists(host_hashes_file): | |
685 self.flavor.copy_file_to_device(host_hashes_file, hashes_file) | |
686 use_hash_file = True | |
687 | |
688 # Run DM. | |
689 properties = [ | |
690 'gitHash', self.got_revision, | |
691 'master', self.master_name, | |
692 'builder', self.builder_name, | |
693 'build_number', self.m.properties['buildnumber'], | |
694 ] | |
695 if self.is_trybot: | |
696 properties.extend([ | |
697 'issue', self.m.properties['issue'], | |
698 'patchset', self.m.properties['patchset'], | |
699 ]) | |
700 | |
701 args = [ | |
702 'dm', | |
703 '--undefok', # This helps branches that may not know new flags. | |
704 '--resourcePath', self.device_dirs.resource_dir, | |
705 '--skps', self.device_dirs.skp_dir, | |
706 '--images', self.flavor.device_path_join( | |
707 self.device_dirs.images_dir, 'dm'), | |
708 '--colorImages', self.flavor.device_path_join(self.device_dirs.images_dir, | |
709 'colorspace'), | |
710 '--nameByHash', | |
711 '--properties' | |
712 ] + properties | |
713 | |
714 args.append('--key') | |
715 args.extend(self._KeyParams()) | |
716 if use_hash_file: | |
717 args.extend(['--uninterestingHashesFile', hashes_file]) | |
718 if self.upload_dm_results: | |
719 args.extend(['--writePath', self.device_dirs.dm_dir]) | |
720 | |
721 skip_flag = None | |
722 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': | |
723 skip_flag = '--nogpu' | |
724 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': | |
725 skip_flag = '--nocpu' | |
726 if skip_flag: | |
727 args.append(skip_flag) | |
728 args.extend(self.dm_flags) | |
729 | |
730 self.run(self.flavor.step, 'dm', cmd=args, abort_on_failure=False, | |
731 env=self.default_env) | |
732 | |
733 if self.upload_dm_results: | |
734 # Copy images and JSON to host machine if needed. | |
735 self.flavor.copy_directory_contents_to_host(self.device_dirs.dm_dir, | |
736 self.dm_dir) | |
737 | |
738 # See skia:2789. | |
739 if ('Valgrind' in self.builder_name and | |
740 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): | |
741 abandonGpuContext = list(args) | |
742 abandonGpuContext.append('--abandonGpuContext') | |
743 self.run(self.flavor.step, 'dm --abandonGpuContext', | |
744 cmd=abandonGpuContext, abort_on_failure=False) | |
745 preAbandonGpuContext = list(args) | |
746 preAbandonGpuContext.append('--preAbandonGpuContext') | |
747 self.run(self.flavor.step, 'dm --preAbandonGpuContext', | |
748 cmd=preAbandonGpuContext, abort_on_failure=False, | |
749 env=self.default_env) | |
750 | |
751 def perf_steps(self): | |
752 """Run Skia benchmarks.""" | |
753 self._run_once(self.install) | |
754 self._run_once(self._copy_skps) | |
755 self._run_once(self._copy_images) | |
756 | |
757 if self.upload_perf_results: | |
758 self.flavor.create_clean_device_dir(self.device_dirs.perf_data_dir) | |
759 | |
760 # Run nanobench. | |
761 properties = [ | |
762 '--properties', | |
763 'gitHash', self.got_revision, | |
764 'build_number', self.m.properties['buildnumber'], | |
765 ] | |
766 if self.is_trybot: | |
767 properties.extend([ | |
768 'issue', self.m.properties['issue'], | |
769 'patchset', self.m.properties['patchset'], | |
770 ]) | |
771 | |
772 target = 'nanobench' | |
773 if 'VisualBench' in self.builder_name: | |
774 target = 'visualbench' | |
775 args = [ | |
776 target, | |
777 '--undefok', # This helps branches that may not know new flags. | |
778 '-i', self.device_dirs.resource_dir, | |
779 '--skps', self.device_dirs.skp_dir, | |
780 '--images', self.flavor.device_path_join( | |
781 self.device_dirs.images_dir, 'nanobench'), | |
782 ] | |
783 | |
784 skip_flag = None | |
785 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': | |
786 skip_flag = '--nogpu' | |
787 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': | |
788 skip_flag = '--nocpu' | |
789 if skip_flag: | |
790 args.append(skip_flag) | |
791 args.extend(self.nanobench_flags) | |
792 | |
793 if self.upload_perf_results: | |
794 json_path = self.flavor.device_path_join( | |
795 self.device_dirs.perf_data_dir, | |
796 'nanobench_%s.json' % self.got_revision) | |
797 args.extend(['--outResultsFile', json_path]) | |
798 args.extend(properties) | |
799 | |
800 keys_blacklist = ['configuration', 'role', 'is_trybot'] | |
801 args.append('--key') | |
802 for k in sorted(self.builder_cfg.keys()): | |
803 if not k in keys_blacklist: | |
804 args.extend([k, self.builder_cfg[k]]) | |
805 | |
806 self.run(self.flavor.step, target, cmd=args, abort_on_failure=False, | |
807 env=self.default_env) | |
808 | |
809 # See skia:2789. | |
810 if ('Valgrind' in self.builder_name and | |
811 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): | |
812 abandonGpuContext = list(args) | |
813 abandonGpuContext.extend(['--abandonGpuContext', '--nocpu']) | |
814 self.run(self.flavor.step, '%s --abandonGpuContext' % target, | |
815 cmd=abandonGpuContext, abort_on_failure=False, | |
816 env=self.default_env) | |
817 | |
818 # Upload results. | |
819 if self.upload_perf_results: | |
820 self.m.file.makedirs('perf_dir', self.perf_data_dir) | |
821 self.flavor.copy_directory_contents_to_host( | |
822 self.device_dirs.perf_data_dir, self.perf_data_dir) | |
823 | |
824 def cleanup_steps(self): | |
825 """Run any cleanup steps.""" | |
826 self.flavor.cleanup_steps() | |
827 | |
828 def _KeyParams(self): | |
829 """Build a unique key from the builder name (as a list). | |
830 | |
831 E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6 | |
832 """ | |
833 # Don't bother to include role, which is always Test. | |
834 # TryBots are uploaded elsewhere so they can use the same key. | |
835 blacklist = ['role', 'is_trybot'] | |
836 | |
837 flat = [] | |
838 for k in sorted(self.builder_cfg.keys()): | |
839 if k not in blacklist: | |
840 flat.append(k) | |
841 flat.append(self.builder_cfg[k]) | |
842 return flat | |
OLD | NEW |