OLD | NEW |
---|---|
(Empty) | |
1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 | |
6 # pylint: disable=W0201 | |
7 | |
8 | |
9 import json | |
10 import os | |
11 import re | |
12 import sys | |
13 | |
14 from recipe_engine import recipe_api | |
15 from recipe_engine import config_types | |
16 | |
17 from . import android_flavor | |
18 from . import cmake_flavor | |
19 from . import coverage_flavor | |
20 from . import default_flavor | |
21 from . import fake_specs | |
22 from . import ios_flavor | |
23 from . import pdfium_flavor | |
24 from . import valgrind_flavor | |
25 from . import xsan_flavor | |
26 | |
27 | |
28 BOTO_CHROMIUM_SKIA_GM = 'chromium-skia-gm.boto' | |
29 | |
30 GS_SUBDIR_TMPL_SK_IMAGE = 'skimage/v%s' | |
31 GS_SUBDIR_TMPL_SKP = 'playback_%s/skps' | |
32 | |
33 TEST_EXPECTED_SKP_VERSION = '42' | |
34 TEST_EXPECTED_SK_IMAGE_VERSION = '42' | |
35 | |
36 VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION' | |
37 VERSION_FILE_SKP = 'SKP_VERSION' | |
38 | |
39 VERSION_NONE = -1 | |
40 | |
41 BUILD_PRODUCTS_ISOLATE_WHITELIST = [ | |
42 'dm', | |
43 'dm.exe', | |
44 'nanobench', | |
45 'nanobench.exe', | |
46 '*.so', | |
47 '*.dll', | |
48 '*.dylib', | |
49 'skia_launcher', | |
50 'lib/*.so', | |
51 'iOSShell.app', | |
52 'iOSShell.ipa', | |
53 'visualbench', | |
54 'visualbench.exe', | |
55 ] | |
56 | |
57 | |
58 def is_android(builder_cfg): | |
59 """Determine whether the given builder is an Android builder.""" | |
60 return ('Android' in builder_cfg.get('extra_config', '') or | |
61 builder_cfg.get('os') == 'Android') | |
62 | |
63 | |
64 def is_cmake(builder_cfg): | |
65 return 'CMake' in builder_cfg.get('extra_config', '') | |
66 | |
67 | |
68 def is_ios(builder_cfg): | |
69 return ('iOS' in builder_cfg.get('extra_config', '') or | |
70 builder_cfg.get('os') == 'iOS') | |
71 | |
72 | |
73 def is_pdfium(builder_cfg): | |
74 return 'PDFium' in builder_cfg.get('extra_config', '') | |
75 | |
76 | |
77 def is_valgrind(builder_cfg): | |
78 return 'Valgrind' in builder_cfg.get('extra_config', '') | |
79 | |
80 | |
81 def is_xsan(builder_cfg): | |
82 return ('ASAN' in builder_cfg.get('extra_config', '') or | |
83 'MSAN' in builder_cfg.get('extra_config', '') or | |
84 'TSAN' in builder_cfg.get('extra_config', '')) | |
85 | |
86 | |
87 class SkiaApi(recipe_api.RecipeApi): | |
88 | |
89 def get_flavor(self, builder_cfg): | |
90 """Return a flavor utils object specific to the given builder.""" | |
91 if is_android(builder_cfg): | |
92 return android_flavor.AndroidFlavorUtils(self) | |
93 elif is_cmake(builder_cfg): | |
94 return cmake_flavor.CMakeFlavorUtils(self) | |
95 elif is_ios(builder_cfg): | |
96 return ios_flavor.iOSFlavorUtils(self) | |
97 elif is_pdfium(builder_cfg): | |
98 return pdfium_flavor.PDFiumFlavorUtils(self) | |
99 elif is_valgrind(builder_cfg): | |
100 return valgrind_flavor.ValgrindFlavorUtils(self) | |
101 elif is_xsan(builder_cfg): | |
102 return xsan_flavor.XSanFlavorUtils(self) | |
103 elif builder_cfg.get('configuration') == 'Coverage': | |
104 return coverage_flavor.CoverageFlavorUtils(self) | |
105 else: | |
106 return default_flavor.DefaultFlavorUtils(self) | |
107 | |
108 @property | |
109 def home_dir(self): | |
110 """Find the home directory.""" | |
111 home_dir = os.path.expanduser('~') | |
112 if self._test_data.enabled: | |
113 home_dir = '[HOME]' | |
114 return home_dir | |
115 | |
116 def gsutil_env(self, boto_file): | |
117 """Environment variables for gsutil.""" | |
118 boto_path = None | |
119 if boto_file: | |
120 boto_path = self.m.path.join(self.home_dir, boto_file) | |
121 return {'AWS_CREDENTIAL_FILE': boto_path, | |
122 'BOTO_CONFIG': boto_path} | |
123 | |
124 def get_builder_spec(self, skia_dir, builder_name): | |
125 """Obtain the buildbot spec for the given builder.""" | |
126 fake_spec = None | |
127 if self._test_data.enabled: | |
128 fake_spec = fake_specs.FAKE_SPECS[builder_name] | |
129 builder_spec = self.json_from_file( | |
130 skia_dir.join('tools', 'buildbot_spec.py'), | |
131 skia_dir, | |
132 builder_name, | |
133 fake_spec) | |
134 return builder_spec | |
135 | |
136 def make_path(self, *path): | |
137 """Return a Path object for the given path.""" | |
138 key = 'custom_%s' % '_'.join(path) | |
139 self.m.path.c.base_paths[key] = tuple(path) | |
140 return self.m.path[key] | |
141 | |
142 def setup(self): | |
143 """Prepare the bot to run.""" | |
144 # Setup | |
145 self.failed = [] | |
146 | |
147 self.builder_name = self.m.properties['buildername'] | |
148 self.master_name = self.m.properties['mastername'] | |
149 self.slave_name = self.m.properties['slavename'] | |
150 | |
151 self.slave_dir = self.m.path['slave_build'] | |
152 self.checkout_root = self.slave_dir | |
153 self.default_env = {} | |
154 self.gclient_env = {} | |
155 self.is_compile_bot = self.builder_name.startswith('Build-') | |
156 | |
157 self.default_env['CHROME_HEADLESS'] = '1' | |
158 # The 'depot_tools' directory comes from recipe DEPS and isn't provided by | |
159 # default. We have to set it manually. | |
160 self.m.path.c.base_paths['depot_tools'] = ( | |
161 self.m.path.c.base_paths['slave_build'] + | |
162 ('skia', 'infra', 'bots', '.recipe_deps', 'depot_tools')) | |
163 if 'Win' in self.builder_name: | |
164 self.m.path.c.base_paths['depot_tools'] = ( | |
165 'c:\\', 'Users', 'chrome-bot', 'depot_tools') | |
166 | |
167 # Compile bots keep a persistent checkout. | |
168 self.persistent_checkout = (self.is_compile_bot or | |
169 'RecreateSKPs' in self.builder_name) | |
170 if self.persistent_checkout: | |
171 if 'Win' in self.builder_name: | |
172 self.checkout_root = self.make_path('C:\\', 'b', 'work') | |
173 self.gclient_cache = self.make_path('C:\\', 'b', 'cache') | |
174 else: | |
175 self.checkout_root = self.make_path('/', 'b', 'work') | |
176 self.gclient_cache = self.make_path('/', 'b', 'cache') | |
177 | |
178 self.skia_dir = self.checkout_root.join('skia') | |
179 self.infrabots_dir = self.skia_dir.join('infra', 'bots') | |
180 | |
181 # Some bots also require a checkout of chromium. | |
182 self._need_chromium_checkout = 'CommandBuffer' in self.builder_name | |
183 if 'CommandBuffer' in self.builder_name: | |
184 self.gclient_env['GYP_CHROMIUM_NO_ACTION'] = '0' | |
185 if ((self.is_compile_bot and | |
186 'SAN' in self.builder_name) or | |
187 'RecreateSKPs' in self.builder_name): | |
188 self._need_chromium_checkout = True | |
189 | |
190 # Some bots also require a checkout of PDFium. | |
191 self._need_pdfium_checkout = 'PDFium' in self.builder_name | |
192 | |
193 # Check out the Skia code. | |
194 self.checkout_steps() | |
195 | |
196 # Obtain the spec for this builder from the Skia repo. Use it to set more | |
197 # properties. | |
198 self.builder_spec = self.get_builder_spec(self.skia_dir, self.builder_name) | |
199 | |
200 self.builder_cfg = self.builder_spec['builder_cfg'] | |
201 self.role = self.builder_cfg['role'] | |
202 | |
203 # Set some important variables. | |
204 self.resource_dir = self.skia_dir.join('resources') | |
205 self.images_dir = self.slave_dir.join('skimage') | |
206 if not self.m.path.exists(self.infrabots_dir.join( | |
207 'assets', 'skimage', 'VERSION')): | |
208 # TODO(borenet): Remove this once enough time has passed. | |
209 self.images_dir = self.slave_dir.join('images') | |
210 self.skia_out = self.skia_dir.join('out', self.builder_name) | |
211 self.swarming_out_dir = self.make_path(self.m.properties['swarm_out_dir']) | |
212 self.local_skp_dir = self.slave_dir.join('skp') | |
213 if not self.m.path.exists(self.infrabots_dir.join( | |
214 'assets', 'skp', 'VERSION')): | |
215 # TODO(borenet): Remove this once enough time has passed. | |
216 self.local_skp_dir = self.slave_dir.join('skps') | |
217 if not self.is_compile_bot: | |
218 self.skia_out = self.slave_dir.join('out') | |
219 self.tmp_dir = self.m.path['slave_build'].join('tmp') | |
220 if not self.m.path.exists(self.tmp_dir): | |
221 self._run_once(self.m.file.makedirs, | |
222 'tmp_dir', | |
223 self.tmp_dir, | |
224 infra_step=True) | |
225 | |
226 self.gsutil_env_chromium_skia_gm = self.gsutil_env(BOTO_CHROMIUM_SKIA_GM) | |
227 | |
228 self.device_dirs = None | |
229 self._ccache = None | |
230 self._checked_for_ccache = False | |
231 self.configuration = self.builder_spec['configuration'] | |
232 self.default_env.update({'SKIA_OUT': self.skia_out, | |
233 'BUILDTYPE': self.configuration}) | |
234 self.default_env.update(self.builder_spec['env']) | |
235 self.build_targets = [str(t) for t in self.builder_spec['build_targets']] | |
236 self.do_compile_steps = self.builder_spec.get('do_compile_steps', True) | |
237 self.do_test_steps = self.builder_spec['do_test_steps'] | |
238 self.do_perf_steps = self.builder_spec['do_perf_steps'] | |
239 self.is_trybot = self.builder_cfg['is_trybot'] | |
240 self.upload_dm_results = self.builder_spec['upload_dm_results'] | |
241 self.upload_perf_results = self.builder_spec['upload_perf_results'] | |
242 self.dm_dir = self.m.path.join( | |
243 self.swarming_out_dir, 'dm') | |
244 self.perf_data_dir = self.m.path.join(self.swarming_out_dir, | |
245 'perfdata', self.builder_name, 'data') | |
246 self.dm_flags = self.builder_spec['dm_flags'] | |
247 self.nanobench_flags = self.builder_spec['nanobench_flags'] | |
248 | |
249 self.flavor = self.get_flavor(self.builder_cfg) | |
250 | |
251 def check_failure(self): | |
252 """Raise an exception if any step failed.""" | |
253 if self.failed: | |
254 raise self.m.step.StepFailure('Failed build steps: %s' % | |
255 ', '.join([f.name for f in self.failed])) | |
256 | |
257 def _run_once(self, fn, *args, **kwargs): | |
258 if not hasattr(self, '_already_ran'): | |
259 self._already_ran = {} | |
260 if not fn.__name__ in self._already_ran: | |
261 self._already_ran[fn.__name__] = fn(*args, **kwargs) | |
262 return self._already_ran[fn.__name__] | |
263 | |
264 def update_repo(self, parent_dir, repo): | |
265 """Update an existing repo. This is safe to call without gen_steps.""" | |
266 repo_path = parent_dir.join(repo.name) | |
267 if self.m.path.exists(repo_path): # pragma: nocover | |
268 if self.m.platform.is_win: | |
269 git = 'git.bat' | |
270 else: | |
271 git = 'git' | |
272 self.m.step('git remote set-url', | |
273 cmd=[git, 'remote', 'set-url', 'origin', repo.url], | |
274 cwd=repo_path, | |
275 infra_step=True) | |
276 self.m.step('git fetch', | |
277 cmd=[git, 'fetch'], | |
278 cwd=repo_path, | |
279 infra_step=True) | |
280 self.m.step('git reset', | |
281 cmd=[git, 'reset', '--hard', repo.revision], | |
282 cwd=repo_path, | |
283 infra_step=True) | |
284 self.m.step('git clean', | |
285 cmd=[git, 'clean', '-d', '-f'], | |
286 cwd=repo_path, | |
287 infra_step=True) | |
288 | |
289 def checkout_steps(self): | |
290 """Run the steps to obtain a checkout of Skia.""" | |
291 cfg_kwargs = {} | |
292 if not self.persistent_checkout: | |
293 # We should've obtained the Skia checkout through isolates, so we don't | |
294 # need to perform the checkout ourselves. | |
295 self.m.path['checkout'] = self.skia_dir | |
296 self.got_revision = self.m.properties['revision'] | |
297 return | |
298 | |
299 # Use a persistent gclient cache for Swarming. | |
300 cfg_kwargs['CACHE_DIR'] = self.gclient_cache | |
301 | |
302 # Create the checkout path if necessary. | |
303 if not self.m.path.exists(self.checkout_root): | |
304 self.m.file.makedirs('checkout_path', self.checkout_root, infra_step=True) | |
305 | |
306 # Initial cleanup. | |
307 gclient_cfg = self.m.gclient.make_config(**cfg_kwargs) | |
308 skia = gclient_cfg.solutions.add() | |
309 skia.name = 'skia' | |
310 skia.managed = False | |
311 skia.url = 'https://skia.googlesource.com/skia.git' | |
312 skia.revision = self.m.properties.get('revision') or 'origin/master' | |
313 self.update_repo(self.checkout_root, skia) | |
314 | |
315 # TODO(rmistry): Remove the below block after there is a solution for | |
316 # crbug.com/616443 | |
317 entries_file = self.checkout_root.join('.gclient_entries') | |
318 if self.m.path.exists(entries_file): | |
319 self.m.file.remove('remove %s' % entries_file, | |
320 entries_file, | |
321 infra_step=True) # pragma: no cover | |
322 | |
323 if self._need_chromium_checkout: | |
324 chromium = gclient_cfg.solutions.add() | |
325 chromium.name = 'src' | |
326 chromium.managed = False | |
327 chromium.url = 'https://chromium.googlesource.com/chromium/src.git' | |
328 chromium.revision = 'origin/lkgr' | |
329 self.update_repo(self.checkout_root, chromium) | |
330 | |
331 if self._need_pdfium_checkout: | |
332 pdfium = gclient_cfg.solutions.add() | |
333 pdfium.name = 'pdfium' | |
334 pdfium.managed = False | |
335 pdfium.url = 'https://pdfium.googlesource.com/pdfium.git' | |
336 pdfium.revision = 'origin/master' | |
337 self.update_repo(self.checkout_root, pdfium) | |
338 | |
339 # Run 'gclient sync'. | |
340 gclient_cfg.got_revision_mapping['skia'] = 'got_revision' | |
341 gclient_cfg.target_os.add('llvm') | |
342 checkout_kwargs = {} | |
343 checkout_kwargs['env'] = self.default_env | |
344 | |
345 # api.gclient.revert() assumes things about the layout of the code, so it | |
rmistry
2016/07/26 18:20:03
Unfortunate
borenet
2016/07/26 18:29:31
Yep.
| |
346 # fails for us. Run an appropriate revert sequence for trybots instead. | |
347 gclient_file = self.checkout_root.join('.gclient') | |
348 if (self.m.tryserver.is_tryserver and | |
349 self.m.path.exists(gclient_file)): # pragma: no cover | |
350 # These steps taken from: | |
351 # https://chromium.googlesource.com/chromium/tools/build/+/ | |
352 # 81a696760ab7c25f6606c54fc781b90b8af9fdd2/scripts/slave/ | |
353 # gclient_safe_revert.py | |
354 if self.m.path.exists(entries_file): | |
355 self.m.gclient('recurse', [ | |
356 'recurse', '-i', 'sh', '-c', | |
357 'if [ -e .git ]; then git remote update; fi']) | |
358 self.m.gclient( | |
359 'revert', | |
360 ['revert', '-v', '-v', '-v', '--nohooks', '--upstream'], | |
361 cwd=self.checkout_root) | |
362 | |
363 update_step = self.m.gclient.checkout(gclient_config=gclient_cfg, | |
364 cwd=self.checkout_root, | |
365 revert=False, | |
366 **checkout_kwargs) | |
367 | |
368 self.got_revision = update_step.presentation.properties['got_revision'] | |
369 self.m.tryserver.maybe_apply_issue() | |
370 | |
371 if self._need_chromium_checkout: | |
372 self.m.gclient.runhooks(cwd=self.checkout_root, env=self.gclient_env) | |
373 | |
374 def copy_build_products(self, src, dst): | |
375 """Copy whitelisted build products from src to dst.""" | |
376 self.m.python.inline( | |
377 name='copy build products', | |
378 program='''import errno | |
379 import glob | |
380 import os | |
381 import shutil | |
382 import sys | |
383 | |
384 src = sys.argv[1] | |
385 dst = sys.argv[2] | |
386 build_products_whitelist = %s | |
387 | |
388 try: | |
389 os.makedirs(dst) | |
390 except OSError as e: | |
391 if e.errno != errno.EEXIST: | |
392 raise | |
393 | |
394 for pattern in build_products_whitelist: | |
395 path = os.path.join(src, pattern) | |
396 for f in glob.glob(path): | |
397 dst_path = os.path.join(dst, os.path.relpath(f, src)) | |
398 if not os.path.isdir(os.path.dirname(dst_path)): | |
399 os.makedirs(os.path.dirname(dst_path)) | |
400 print 'Copying build product %%s to %%s' %% (f, dst_path) | |
401 shutil.move(f, dst_path) | |
402 ''' % str(BUILD_PRODUCTS_ISOLATE_WHITELIST), | |
403 args=[src, dst], | |
404 infra_step=True) | |
405 | |
406 def compile_steps(self, clobber=False): | |
407 """Run the steps to build Skia.""" | |
408 try: | |
409 for target in self.build_targets: | |
410 self.flavor.compile(target) | |
411 self.copy_build_products( | |
412 self.flavor.out_dir, | |
413 self.swarming_out_dir.join('out', self.configuration)) | |
414 self.flavor.copy_extra_build_products(self.swarming_out_dir) | |
415 finally: | |
416 if 'Win' in self.builder_cfg.get('os', ''): | |
417 self.m.python.inline( | |
418 name='cleanup', | |
419 program='''import psutil | |
420 for p in psutil.process_iter(): | |
421 try: | |
422 if p.name in ('mspdbsrv.exe', 'vctip.exe', 'cl.exe', 'link.exe'): | |
423 p.kill() | |
424 except psutil._error.AccessDenied: | |
425 pass | |
426 ''', | |
427 infra_step=True) | |
428 | |
429 def _readfile(self, filename, *args, **kwargs): | |
430 """Convenience function for reading files.""" | |
431 name = kwargs.pop('name') or 'read %s' % self.m.path.basename(filename) | |
432 return self.m.file.read(name, filename, infra_step=True, *args, **kwargs) | |
433 | |
434 def _writefile(self, filename, contents): | |
435 """Convenience function for writing files.""" | |
436 return self.m.file.write('write %s' % self.m.path.basename(filename), | |
437 filename, contents, infra_step=True) | |
438 | |
439 def rmtree(self, path): | |
440 """Wrapper around api.file.rmtree with environment fix.""" | |
441 env = {} | |
442 env['PYTHONPATH'] = str(self.m.path['checkout'].join( | |
443 'infra', 'bots', '.recipe_deps', 'build', 'scripts')) | |
444 self.m.file.rmtree(self.m.path.basename(path), | |
445 path, | |
446 env=env, | |
447 infra_step=True) | |
448 | |
449 def run(self, steptype, name, abort_on_failure=True, | |
450 fail_build_on_failure=True, env=None, **kwargs): | |
451 """Run a step. If it fails, keep going but mark the build status failed.""" | |
452 env = dict(env or {}) | |
453 env.update(self.default_env) | |
454 try: | |
455 return steptype(name=name, env=env, **kwargs) | |
456 except self.m.step.StepFailure as e: | |
457 if abort_on_failure: | |
458 raise # pragma: no cover | |
459 if fail_build_on_failure: | |
460 self.failed.append(e) | |
461 | |
462 def check_actual_version(self, version_file, tmp_dir, test_actual_version): | |
463 """Assert that we have an actually-downloaded version of the dir.""" | |
464 actual_version_file = self.m.path.join(tmp_dir, version_file) | |
465 actual_version = self._readfile( | |
466 actual_version_file, | |
467 name='Get downloaded %s' % version_file, | |
468 test_data=test_actual_version).rstrip() | |
469 assert actual_version != VERSION_NONE | |
470 return actual_version | |
471 | |
472 def copy_dir(self, host_version, version_file, tmp_dir, | |
473 host_path, device_path, test_expected_version, | |
474 test_actual_version): | |
475 actual_version_file = self.m.path.join(tmp_dir, version_file) | |
476 # Copy to device. | |
477 device_version_file = self.flavor.device_path_join( | |
478 self.device_dirs.tmp_dir, version_file) | |
479 if str(actual_version_file) != str(device_version_file): | |
480 try: | |
481 device_version = self.flavor.read_file_on_device(device_version_file) | |
482 except self.m.step.StepFailure: | |
483 device_version = VERSION_NONE | |
484 if device_version != host_version: | |
485 self.flavor.remove_file_on_device(device_version_file) | |
486 self.flavor.create_clean_device_dir(device_path) | |
487 self.flavor.copy_directory_contents_to_device(host_path, device_path) | |
488 | |
489 # Copy the new version file. | |
490 self.flavor.copy_file_to_device(actual_version_file, | |
491 device_version_file) | |
492 | |
493 def _copy_images(self): | |
494 """Download and copy test images if needed.""" | |
495 version_file = self.infrabots_dir.join('assets', 'skimage', 'VERSION') | |
496 if self.m.path.exists(version_file): | |
497 test_data = self.m.properties.get( | |
498 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION) | |
499 version = self._readfile(version_file, | |
500 name='Get downloaded skimage VERSION', | |
501 test_data=test_data).rstrip() | |
502 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SK_IMAGE), | |
503 version) | |
504 else: | |
505 # TODO(borenet): Remove this once enough time has passed. | |
506 version = self.check_actual_version( | |
507 VERSION_FILE_SK_IMAGE, | |
508 self.tmp_dir, | |
509 test_actual_version=self.m.properties.get( | |
510 'test_downloaded_sk_image_version', | |
511 TEST_EXPECTED_SK_IMAGE_VERSION), | |
512 ) | |
513 self.copy_dir( | |
514 version, | |
515 VERSION_FILE_SK_IMAGE, | |
516 self.tmp_dir, | |
517 self.images_dir, | |
518 self.device_dirs.images_dir, | |
519 test_expected_version=self.m.properties.get( | |
520 'test_downloaded_sk_image_version', | |
521 TEST_EXPECTED_SK_IMAGE_VERSION), | |
522 test_actual_version=self.m.properties.get( | |
523 'test_downloaded_sk_image_version', | |
524 TEST_EXPECTED_SK_IMAGE_VERSION)) | |
525 return version | |
526 | |
527 def _copy_skps(self): | |
528 """Download and copy the SKPs if needed.""" | |
529 version_file = self.infrabots_dir.join('assets', 'skp', 'VERSION') | |
530 if self.m.path.exists(version_file): | |
531 test_data = self.m.properties.get( | |
532 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION) | |
533 version = self._readfile(version_file, | |
534 name='Get downloaded SKP VERSION', | |
535 test_data=test_data).rstrip() | |
536 self._writefile(self.m.path.join(self.tmp_dir, VERSION_FILE_SKP), version) | |
537 else: | |
538 # TODO(borenet): Remove this once enough time has passed. | |
539 version = self.check_actual_version( | |
540 VERSION_FILE_SKP, | |
541 self.tmp_dir, | |
542 test_actual_version=self.m.properties.get( | |
543 'test_downloaded_skp_version', | |
544 TEST_EXPECTED_SKP_VERSION), | |
545 ) | |
546 self.copy_dir( | |
547 version, | |
548 VERSION_FILE_SKP, | |
549 self.tmp_dir, | |
550 self.local_skp_dir, | |
551 self.device_dirs.skp_dir, | |
552 test_expected_version=self.m.properties.get( | |
553 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION), | |
554 test_actual_version=self.m.properties.get( | |
555 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)) | |
556 return version | |
557 | |
558 def install(self): | |
559 """Copy the required executables and files to the device.""" | |
560 self.device_dirs = self.flavor.get_device_dirs() | |
561 | |
562 # Run any device-specific installation. | |
563 self.flavor.install() | |
564 | |
565 # TODO(borenet): Only copy files which have changed. | |
566 # Resources | |
567 self.flavor.copy_directory_contents_to_device(self.resource_dir, | |
568 self.device_dirs.resource_dir) | |
569 | |
570 def ccache(self): | |
571 if not self._checked_for_ccache: | |
572 self._checked_for_ccache = True | |
573 if not self.m.platform.is_win: | |
574 result = self.run( | |
575 self.m.python.inline, | |
576 name='has ccache?', | |
577 program='''import json | |
578 import subprocess | |
579 import sys | |
580 | |
581 ccache = None | |
582 try: | |
583 ccache = subprocess.check_output(['which', 'ccache']).rstrip() | |
584 except: | |
585 pass | |
586 print json.dumps({'ccache': ccache}) | |
587 ''', | |
588 stdout=self.m.json.output(), | |
589 infra_step=True, | |
590 abort_on_failure=False, | |
591 fail_build_on_failure=False) | |
592 if result and result.stdout and result.stdout.get('ccache'): | |
593 self._ccache = result.stdout['ccache'] | |
594 | |
595 return self._ccache | |
596 | |
597 def json_from_file(self, filename, cwd, builder_name, test_data): | |
598 """Execute the given script to obtain JSON data.""" | |
599 return self.m.python( | |
600 'exec %s' % self.m.path.basename(filename), | |
601 filename, | |
602 args=[self.m.json.output(), builder_name], | |
603 step_test_data=lambda: self.m.json.test_api.output(test_data), | |
604 cwd=cwd, | |
605 infra_step=True).json.output | |
606 | |
607 def test_steps(self): | |
608 """Run the DM test.""" | |
609 self._run_once(self.install) | |
610 self._run_once(self._copy_skps) | |
611 self._run_once(self._copy_images) | |
612 | |
613 use_hash_file = False | |
614 if self.upload_dm_results: | |
615 # This must run before we write anything into self.device_dirs.dm_dir | |
616 # or we may end up deleting our output on machines where they're the same. | |
617 self.flavor.create_clean_host_dir(self.dm_dir) | |
618 if str(self.dm_dir) != str(self.device_dirs.dm_dir): | |
619 self.flavor.create_clean_device_dir(self.device_dirs.dm_dir) | |
620 | |
621 # Obtain the list of already-generated hashes. | |
622 hash_filename = 'uninteresting_hashes.txt' | |
623 | |
624 # Ensure that the tmp_dir exists. | |
625 self._run_once(self.m.file.makedirs, | |
626 'tmp_dir', | |
627 self.tmp_dir, | |
628 infra_step=True) | |
629 | |
630 host_hashes_file = self.tmp_dir.join(hash_filename) | |
631 hashes_file = self.flavor.device_path_join( | |
632 self.device_dirs.tmp_dir, hash_filename) | |
633 self.run( | |
634 self.m.python.inline, | |
635 'get uninteresting hashes', | |
636 program=""" | |
637 import contextlib | |
638 import math | |
639 import socket | |
640 import sys | |
641 import time | |
642 import urllib2 | |
643 | |
644 HASHES_URL = 'https://gold.skia.org/_/hashes' | |
645 RETRIES = 5 | |
646 TIMEOUT = 60 | |
647 WAIT_BASE = 15 | |
648 | |
649 socket.setdefaulttimeout(TIMEOUT) | |
650 for retry in range(RETRIES): | |
651 try: | |
652 with contextlib.closing( | |
653 urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w: | |
654 hashes = w.read() | |
655 with open(sys.argv[1], 'w') as f: | |
656 f.write(hashes) | |
657 break | |
658 except Exception as e: | |
659 print 'Failed to get uninteresting hashes from %s:' % HASHES_URL | |
660 print e | |
661 if retry == RETRIES: | |
662 raise | |
663 waittime = WAIT_BASE * math.pow(2, retry) | |
664 print 'Retry in %d seconds.' % waittime | |
665 time.sleep(waittime) | |
666 """, | |
667 args=[host_hashes_file], | |
668 cwd=self.skia_dir, | |
669 abort_on_failure=False, | |
670 fail_build_on_failure=False, | |
671 infra_step=True) | |
672 | |
673 if self.m.path.exists(host_hashes_file): | |
674 self.flavor.copy_file_to_device(host_hashes_file, hashes_file) | |
675 use_hash_file = True | |
676 | |
677 # Run DM. | |
678 properties = [ | |
679 'gitHash', self.got_revision, | |
680 'master', self.master_name, | |
681 'builder', self.builder_name, | |
682 'build_number', self.m.properties['buildnumber'], | |
683 ] | |
684 if self.is_trybot: | |
685 properties.extend([ | |
686 'issue', self.m.properties['issue'], | |
687 'patchset', self.m.properties['patchset'], | |
688 ]) | |
689 | |
690 args = [ | |
691 'dm', | |
692 '--undefok', # This helps branches that may not know new flags. | |
693 '--resourcePath', self.device_dirs.resource_dir, | |
694 '--skps', self.device_dirs.skp_dir, | |
695 '--images', self.flavor.device_path_join( | |
696 self.device_dirs.images_dir, 'dm'), | |
697 '--colorImages', self.flavor.device_path_join(self.device_dirs.images_dir, | |
698 'colorspace'), | |
699 '--nameByHash', | |
700 '--properties' | |
701 ] + properties | |
702 | |
703 args.append('--key') | |
704 args.extend(self._KeyParams()) | |
705 if use_hash_file: | |
706 args.extend(['--uninterestingHashesFile', hashes_file]) | |
707 if self.upload_dm_results: | |
708 args.extend(['--writePath', self.device_dirs.dm_dir]) | |
709 | |
710 skip_flag = None | |
711 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': | |
712 skip_flag = '--nogpu' | |
713 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': | |
714 skip_flag = '--nocpu' | |
715 if skip_flag: | |
716 args.append(skip_flag) | |
717 args.extend(self.dm_flags) | |
718 | |
719 self.run(self.flavor.step, 'dm', cmd=args, abort_on_failure=False, | |
720 env=self.default_env) | |
721 | |
722 if self.upload_dm_results: | |
723 # Copy images and JSON to host machine if needed. | |
724 self.flavor.copy_directory_contents_to_host(self.device_dirs.dm_dir, | |
725 self.dm_dir) | |
726 | |
727 # See skia:2789. | |
728 if ('Valgrind' in self.builder_name and | |
729 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): | |
730 abandonGpuContext = list(args) | |
731 abandonGpuContext.append('--abandonGpuContext') | |
732 self.run(self.flavor.step, 'dm --abandonGpuContext', | |
733 cmd=abandonGpuContext, abort_on_failure=False) | |
734 preAbandonGpuContext = list(args) | |
735 preAbandonGpuContext.append('--preAbandonGpuContext') | |
736 self.run(self.flavor.step, 'dm --preAbandonGpuContext', | |
737 cmd=preAbandonGpuContext, abort_on_failure=False, | |
738 env=self.default_env) | |
739 | |
740 def perf_steps(self): | |
741 """Run Skia benchmarks.""" | |
742 self._run_once(self.install) | |
743 self._run_once(self._copy_skps) | |
744 self._run_once(self._copy_images) | |
745 | |
746 if self.upload_perf_results: | |
747 self.flavor.create_clean_device_dir(self.device_dirs.perf_data_dir) | |
748 | |
749 # Run nanobench. | |
750 properties = [ | |
751 '--properties', | |
752 'gitHash', self.got_revision, | |
753 'build_number', self.m.properties['buildnumber'], | |
754 ] | |
755 if self.is_trybot: | |
756 properties.extend([ | |
757 'issue', self.m.properties['issue'], | |
758 'patchset', self.m.properties['patchset'], | |
759 ]) | |
760 | |
761 target = 'nanobench' | |
762 if 'VisualBench' in self.builder_name: | |
763 target = 'visualbench' | |
764 args = [ | |
765 target, | |
766 '--undefok', # This helps branches that may not know new flags. | |
767 '-i', self.device_dirs.resource_dir, | |
768 '--skps', self.device_dirs.skp_dir, | |
769 '--images', self.flavor.device_path_join( | |
770 self.device_dirs.images_dir, 'nanobench'), | |
771 ] | |
772 | |
773 skip_flag = None | |
774 if self.builder_cfg.get('cpu_or_gpu') == 'CPU': | |
775 skip_flag = '--nogpu' | |
776 elif self.builder_cfg.get('cpu_or_gpu') == 'GPU': | |
777 skip_flag = '--nocpu' | |
778 if skip_flag: | |
779 args.append(skip_flag) | |
780 args.extend(self.nanobench_flags) | |
781 | |
782 if self.upload_perf_results: | |
783 json_path = self.flavor.device_path_join( | |
784 self.device_dirs.perf_data_dir, | |
785 'nanobench_%s.json' % self.got_revision) | |
786 args.extend(['--outResultsFile', json_path]) | |
787 args.extend(properties) | |
788 | |
789 keys_blacklist = ['configuration', 'role', 'is_trybot'] | |
790 args.append('--key') | |
791 for k in sorted(self.builder_cfg.keys()): | |
792 if not k in keys_blacklist: | |
793 args.extend([k, self.builder_cfg[k]]) | |
794 | |
795 self.run(self.flavor.step, target, cmd=args, abort_on_failure=False, | |
796 env=self.default_env) | |
797 | |
798 # See skia:2789. | |
799 if ('Valgrind' in self.builder_name and | |
800 self.builder_cfg.get('cpu_or_gpu') == 'GPU'): | |
801 abandonGpuContext = list(args) | |
802 abandonGpuContext.extend(['--abandonGpuContext', '--nocpu']) | |
803 self.run(self.flavor.step, '%s --abandonGpuContext' % target, | |
804 cmd=abandonGpuContext, abort_on_failure=False, | |
805 env=self.default_env) | |
806 | |
807 # Upload results. | |
808 if self.upload_perf_results: | |
809 self.m.file.makedirs('perf_dir', self.perf_data_dir) | |
810 self.flavor.copy_directory_contents_to_host( | |
811 self.device_dirs.perf_data_dir, self.perf_data_dir) | |
812 | |
813 def cleanup_steps(self): | |
814 """Run any cleanup steps.""" | |
815 self.flavor.cleanup_steps() | |
816 | |
817 def _KeyParams(self): | |
818 """Build a unique key from the builder name (as a list). | |
819 | |
820 E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6 | |
821 """ | |
822 # Don't bother to include role, which is always Test. | |
823 # TryBots are uploaded elsewhere so they can use the same key. | |
824 blacklist = ['role', 'is_trybot'] | |
825 | |
826 flat = [] | |
827 for k in sorted(self.builder_cfg.keys()): | |
828 if k not in blacklist: | |
829 flat.append(k) | |
830 flat.append(self.builder_cfg[k]) | |
831 return flat | |
OLD | NEW |