Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(146)

Side by Side Diff: infra/bots/recipe_modules/core/api.py

Issue 2198173002: Re-organize Skia recipes (Closed) Base URL: https://skia.googlesource.com/skia.git@master
Patch Set: Fix missing dependency Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5
6 # pylint: disable=W0201
7
8
9 import json
10 import os
11 import re
12 import sys
13
14 from recipe_engine import recipe_api
15 from recipe_engine import config_types
16
17 from . import fake_specs
18
19
20 TEST_EXPECTED_SKP_VERSION = '42'
21 TEST_EXPECTED_SK_IMAGE_VERSION = '42'
22
23 VERSION_FILE_SK_IMAGE = 'SK_IMAGE_VERSION'
24 VERSION_FILE_SKP = 'SKP_VERSION'
25
26 VERSION_NONE = -1
27
28
29 class SkiaApi(recipe_api.RecipeApi):
30
31 def get_builder_spec(self, skia_dir, builder_name):
32 """Obtain the buildbot spec for the given builder."""
33 fake_spec = None
34 if self._test_data.enabled:
35 fake_spec = fake_specs.FAKE_SPECS[builder_name]
36 builder_spec = self.m.run.json_from_file(
37 skia_dir.join('tools', 'buildbot_spec.py'),
38 skia_dir,
39 builder_name,
40 fake_spec)
41 return builder_spec
42
43 def setup(self):
44 """Prepare the bot to run."""
45 # Setup dependencies.
46 self.m.vars.setup()
47
48 # Check out the Skia code.
49 self.checkout_steps()
50
51 # Obtain the spec for this builder from the Skia repo. Use it to set more
52 # properties.
53 builder_spec = self.get_builder_spec(self.m.vars.skia_dir,
54 self.m.vars.builder_name)
55
56 # Continue setting up vars with the builder_spec.
57 self.m.vars.update_with_builder_spec(builder_spec)
58
59
60 if not self.m.path.exists(self.m.vars.tmp_dir):
61 self.m.run.run_once(self.m.file.makedirs,
62 'tmp_dir',
63 self.m.vars.tmp_dir,
64 infra_step=True)
65
66 self.m.flavor.setup()
67
68 def update_repo(self, parent_dir, repo):
69 """Update an existing repo. This is safe to call without gen_steps."""
70 repo_path = parent_dir.join(repo.name)
71 if self.m.path.exists(repo_path): # pragma: nocover
72 if self.m.platform.is_win:
73 git = 'git.bat'
74 else:
75 git = 'git'
76 self.m.step('git remote set-url',
77 cmd=[git, 'remote', 'set-url', 'origin', repo.url],
78 cwd=repo_path,
79 infra_step=True)
80 self.m.step('git fetch',
81 cmd=[git, 'fetch'],
82 cwd=repo_path,
83 infra_step=True)
84 self.m.step('git reset',
85 cmd=[git, 'reset', '--hard', repo.revision],
86 cwd=repo_path,
87 infra_step=True)
88 self.m.step('git clean',
89 cmd=[git, 'clean', '-d', '-f'],
90 cwd=repo_path,
91 infra_step=True)
92
93 def checkout_steps(self):
94 """Run the steps to obtain a checkout of Skia."""
95 cfg_kwargs = {}
96 if not self.m.vars.persistent_checkout:
97 # We should've obtained the Skia checkout through isolates, so we don't
98 # need to perform the checkout ourselves.
99 return
100
101 # Use a persistent gclient cache for Swarming.
102 cfg_kwargs['CACHE_DIR'] = self.m.vars.gclient_cache
103
104 # Create the checkout path if necessary.
105 if not self.m.path.exists(self.m.vars.checkout_root):
106 self.m.file.makedirs('checkout_path',
107 self.m.vars.checkout_root,
108 infra_step=True)
109
110 # Initial cleanup.
111 gclient_cfg = self.m.gclient.make_config(**cfg_kwargs)
112 skia = gclient_cfg.solutions.add()
113 skia.name = 'skia'
114 skia.managed = False
115 skia.url = 'https://skia.googlesource.com/skia.git'
116 skia.revision = self.m.properties.get('revision') or 'origin/master'
117 self.update_repo(self.m.vars.checkout_root, skia)
118
119 # TODO(rmistry): Remove the below block after there is a solution for
120 # crbug.com/616443
121 entries_file = self.m.vars.checkout_root.join('.gclient_entries')
122 if self.m.path.exists(entries_file):
123 self.m.file.remove('remove %s' % entries_file,
124 entries_file,
125 infra_step=True) # pragma: no cover
126
127 if self.m.vars.need_chromium_checkout:
128 chromium = gclient_cfg.solutions.add()
129 chromium.name = 'src'
130 chromium.managed = False
131 chromium.url = 'https://chromium.googlesource.com/chromium/src.git'
132 chromium.revision = 'origin/lkgr'
133 self.update_repo(self.m.vars.checkout_root, chromium)
134
135 if self.m.vars.need_pdfium_checkout:
136 pdfium = gclient_cfg.solutions.add()
137 pdfium.name = 'pdfium'
138 pdfium.managed = False
139 pdfium.url = 'https://pdfium.googlesource.com/pdfium.git'
140 pdfium.revision = 'origin/master'
141 self.update_repo(self.m.vars.checkout_root, pdfium)
142
143 # Run 'gclient sync'.
144 gclient_cfg.got_revision_mapping['skia'] = 'got_revision'
145 gclient_cfg.target_os.add('llvm')
146 checkout_kwargs = {}
147 checkout_kwargs['env'] = self.m.vars.default_env
148
149 # api.gclient.revert() assumes things about the layout of the code, so it
150 # fails for us. Run an appropriate revert sequence for trybots instead.
151 gclient_file = self.m.vars.checkout_root.join('.gclient')
152 if (self.m.tryserver.is_tryserver and
153 self.m.path.exists(gclient_file)): # pragma: no cover
154 # These steps taken from:
155 # https://chromium.googlesource.com/chromium/tools/build/+/
156 # 81a696760ab7c25f6606c54fc781b90b8af9fdd2/scripts/slave/
157 # gclient_safe_revert.py
158 if self.m.path.exists(entries_file):
159 self.m.gclient('recurse', [
160 'recurse', '-i', 'sh', '-c',
161 'if [ -e .git ]; then git remote update; fi'])
162 self.m.gclient(
163 'revert',
164 ['revert', '-v', '-v', '-v', '--nohooks', '--upstream'],
165 cwd=self.m.vars.checkout_root)
166
167 update_step = self.m.gclient.checkout(gclient_config=gclient_cfg,
168 cwd=self.m.vars.checkout_root,
169 revert=False,
170 **checkout_kwargs)
171
172 self.m.vars.got_revision = (
173 update_step.presentation.properties['got_revision'])
174 self.m.tryserver.maybe_apply_issue()
175
176 if self.m.vars.need_chromium_checkout:
177 self.m.gclient.runhooks(cwd=self.m.vars.checkout_root,
178 env=self.m.vars.gclient_env)
179
180 def copy_dir(self, host_version, version_file, tmp_dir,
181 host_path, device_path, test_expected_version,
182 test_actual_version):
183 actual_version_file = self.m.path.join(tmp_dir, version_file)
184 # Copy to device.
185 device_version_file = self.m.flavor.device_path_join(
186 self.m.flavor.device_dirs.tmp_dir, version_file)
187 if str(actual_version_file) != str(device_version_file):
188 try:
189 device_version = (
190 self.m.flavor.read_file_on_device(device_version_file))
191 except self.m.step.StepFailure:
192 device_version = VERSION_NONE
193 if device_version != host_version:
194 self.m.flavor.remove_file_on_device(device_version_file)
195 self.m.flavor.create_clean_device_dir(device_path)
196 self.m.flavor.copy_directory_contents_to_device(
197 host_path, device_path)
198
199 # Copy the new version file.
200 self.m.flavor.copy_file_to_device(actual_version_file,
201 device_version_file)
202
203 def _copy_images(self):
204 """Download and copy test images if needed."""
205 version_file = self.m.vars.infrabots_dir.join(
206 'assets', 'skimage', 'VERSION')
207 test_data = self.m.properties.get(
208 'test_downloaded_sk_image_version', TEST_EXPECTED_SK_IMAGE_VERSION)
209 version = self.m.run.readfile(
210 version_file,
211 name='Get downloaded skimage VERSION',
212 test_data=test_data).rstrip()
213 self.m.run.writefile(
214 self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SK_IMAGE),
215 version)
216 self.copy_dir(
217 version,
218 VERSION_FILE_SK_IMAGE,
219 self.m.vars.tmp_dir,
220 self.m.vars.images_dir,
221 self.m.flavor.device_dirs.images_dir,
222 test_expected_version=self.m.properties.get(
223 'test_downloaded_sk_image_version',
224 TEST_EXPECTED_SK_IMAGE_VERSION),
225 test_actual_version=self.m.properties.get(
226 'test_downloaded_sk_image_version',
227 TEST_EXPECTED_SK_IMAGE_VERSION))
228 return version
229
230 def _copy_skps(self):
231 """Download and copy the SKPs if needed."""
232 version_file = self.m.vars.infrabots_dir.join(
233 'assets', 'skp', 'VERSION')
234 test_data = self.m.properties.get(
235 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION)
236 version = self.m.run.readfile(
237 version_file,
238 name='Get downloaded SKP VERSION',
239 test_data=test_data).rstrip()
240 self.m.run.writefile(
241 self.m.path.join(self.m.vars.tmp_dir, VERSION_FILE_SKP),
242 version)
243 self.copy_dir(
244 version,
245 VERSION_FILE_SKP,
246 self.m.vars.tmp_dir,
247 self.m.vars.local_skp_dir,
248 self.m.flavor.device_dirs.skp_dir,
249 test_expected_version=self.m.properties.get(
250 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION),
251 test_actual_version=self.m.properties.get(
252 'test_downloaded_skp_version', TEST_EXPECTED_SKP_VERSION))
253 return version
254
255 def install(self):
256 """Copy the required executables and files to the device."""
257 # Run any device-specific installation.
258 self.m.flavor.install()
259
260 # TODO(borenet): Only copy files which have changed.
261 # Resources
262 self.m.flavor.copy_directory_contents_to_device(
263 self.m.vars.resource_dir,
264 self.m.flavor.device_dirs.resource_dir)
265
266 def test_steps(self):
267 """Run the DM test."""
268 self.m.run.run_once(self.install)
269 self.m.run.run_once(self._copy_skps)
270 self.m.run.run_once(self._copy_images)
271
272 use_hash_file = False
273 if self.m.vars.upload_dm_results:
274 # This must run before we write anything into
275 # self.m.flavor.device_dirs.dm_dir or we may end up deleting our
276 # output on machines where they're the same.
277 self.m.flavor.create_clean_host_dir(self.m.vars.dm_dir)
278 host_dm_dir = str(self.m.vars.dm_dir)
279 device_dm_dir = str(self.m.flavor.device_dirs.dm_dir)
280 if host_dm_dir != device_dm_dir:
281 self.m.flavor.create_clean_device_dir(device_dm_dir)
282
283 # Obtain the list of already-generated hashes.
284 hash_filename = 'uninteresting_hashes.txt'
285
286 # Ensure that the tmp_dir exists.
287 self.m.run.run_once(self.m.file.makedirs,
288 'tmp_dir',
289 self.m.vars.tmp_dir,
290 infra_step=True)
291
292 host_hashes_file = self.m.vars.tmp_dir.join(hash_filename)
293 hashes_file = self.m.flavor.device_path_join(
294 self.m.flavor.device_dirs.tmp_dir, hash_filename)
295 self.m.run(
296 self.m.python.inline,
297 'get uninteresting hashes',
298 program="""
299 import contextlib
300 import math
301 import socket
302 import sys
303 import time
304 import urllib2
305
306 HASHES_URL = 'https://gold.skia.org/_/hashes'
307 RETRIES = 5
308 TIMEOUT = 60
309 WAIT_BASE = 15
310
311 socket.setdefaulttimeout(TIMEOUT)
312 for retry in range(RETRIES):
313 try:
314 with contextlib.closing(
315 urllib2.urlopen(HASHES_URL, timeout=TIMEOUT)) as w:
316 hashes = w.read()
317 with open(sys.argv[1], 'w') as f:
318 f.write(hashes)
319 break
320 except Exception as e:
321 print 'Failed to get uninteresting hashes from %s:' % HASHES_URL
322 print e
323 if retry == RETRIES:
324 raise
325 waittime = WAIT_BASE * math.pow(2, retry)
326 print 'Retry in %d seconds.' % waittime
327 time.sleep(waittime)
328 """,
329 args=[host_hashes_file],
330 cwd=self.m.vars.skia_dir,
331 abort_on_failure=False,
332 fail_build_on_failure=False,
333 infra_step=True)
334
335 if self.m.path.exists(host_hashes_file):
336 self.m.flavor.copy_file_to_device(host_hashes_file, hashes_file)
337 use_hash_file = True
338
339 # Run DM.
340 properties = [
341 'gitHash', self.m.vars.got_revision,
342 'master', self.m.vars.master_name,
343 'builder', self.m.vars.builder_name,
344 'build_number', self.m.vars.build_number,
345 ]
346 if self.m.vars.is_trybot:
347 properties.extend([
348 'issue', self.m.vars.issue,
349 'patchset', self.m.vars.patchset,
350 ])
351
352 args = [
353 'dm',
354 '--undefok', # This helps branches that may not know new flags.
355 '--resourcePath', self.m.flavor.device_dirs.resource_dir,
356 '--skps', self.m.flavor.device_dirs.skp_dir,
357 '--images', self.m.flavor.device_path_join(
358 self.m.flavor.device_dirs.images_dir, 'dm'),
359 '--colorImages', self.m.flavor.device_path_join(
360 self.m.flavor.device_dirs.images_dir, 'colorspace'),
361 '--nameByHash',
362 '--properties'
363 ] + properties
364
365 args.append('--key')
366 args.extend(self._KeyParams())
367 if use_hash_file:
368 args.extend(['--uninterestingHashesFile', hashes_file])
369 if self.m.vars.upload_dm_results:
370 args.extend(['--writePath', self.m.flavor.device_dirs.dm_dir])
371
372 skip_flag = None
373 if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
374 skip_flag = '--nogpu'
375 elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
376 skip_flag = '--nocpu'
377 if skip_flag:
378 args.append(skip_flag)
379 args.extend(self.m.vars.dm_flags)
380
381 self.m.run(self.m.flavor.step, 'dm', cmd=args,
382 abort_on_failure=False,
383 env=self.m.vars.default_env)
384
385 if self.m.vars.upload_dm_results:
386 # Copy images and JSON to host machine if needed.
387 self.m.flavor.copy_directory_contents_to_host(
388 self.m.flavor.device_dirs.dm_dir, self.m.vars.dm_dir)
389
390 # See skia:2789.
391 if ('Valgrind' in self.m.vars.builder_name and
392 self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
393 abandonGpuContext = list(args)
394 abandonGpuContext.append('--abandonGpuContext')
395 self.m.run(self.m.flavor.step, 'dm --abandonGpuContext',
396 cmd=abandonGpuContext, abort_on_failure=False)
397 preAbandonGpuContext = list(args)
398 preAbandonGpuContext.append('--preAbandonGpuContext')
399 self.m.run(self.m.flavor.step, 'dm --preAbandonGpuContext',
400 cmd=preAbandonGpuContext, abort_on_failure=False,
401 env=self.m.vars.default_env)
402
403 def perf_steps(self):
404 """Run Skia benchmarks."""
405 self.m.run.run_once(self.install)
406 self.m.run.run_once(self._copy_skps)
407 self.m.run.run_once(self._copy_images)
408
409 if self.m.vars.upload_perf_results:
410 self.m.flavor.create_clean_device_dir(
411 self.m.flavor.device_dirs.perf_data_dir)
412
413 # Run nanobench.
414 properties = [
415 '--properties',
416 'gitHash', self.m.vars.got_revision,
417 'build_number', self.m.vars.build_number,
418 ]
419 if self.m.vars.is_trybot:
420 properties.extend([
421 'issue', self.m.vars.issue,
422 'patchset', self.m.vars.patchset,
423 ])
424
425 target = 'nanobench'
426 if 'VisualBench' in self.m.vars.builder_name:
427 target = 'visualbench'
428 args = [
429 target,
430 '--undefok', # This helps branches that may not know new flags.
431 '-i', self.m.flavor.device_dirs.resource_dir,
432 '--skps', self.m.flavor.device_dirs.skp_dir,
433 '--images', self.m.flavor.device_path_join(
434 self.m.flavor.device_dirs.images_dir, 'nanobench'),
435 ]
436
437 skip_flag = None
438 if self.m.vars.builder_cfg.get('cpu_or_gpu') == 'CPU':
439 skip_flag = '--nogpu'
440 elif self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU':
441 skip_flag = '--nocpu'
442 if skip_flag:
443 args.append(skip_flag)
444 args.extend(self.m.vars.nanobench_flags)
445
446 if self.m.vars.upload_perf_results:
447 json_path = self.m.flavor.device_path_join(
448 self.m.flavor.device_dirs.perf_data_dir,
449 'nanobench_%s.json' % self.m.vars.got_revision)
450 args.extend(['--outResultsFile', json_path])
451 args.extend(properties)
452
453 keys_blacklist = ['configuration', 'role', 'is_trybot']
454 args.append('--key')
455 for k in sorted(self.m.vars.builder_cfg.keys()):
456 if not k in keys_blacklist:
457 args.extend([k, self.m.vars.builder_cfg[k]])
458
459 self.m.run(self.m.flavor.step, target, cmd=args,
460 abort_on_failure=False,
461 env=self.m.vars.default_env)
462
463 # See skia:2789.
464 if ('Valgrind' in self.m.vars.builder_name and
465 self.m.vars.builder_cfg.get('cpu_or_gpu') == 'GPU'):
466 abandonGpuContext = list(args)
467 abandonGpuContext.extend(['--abandonGpuContext', '--nocpu'])
468 self.m.run(self.m.flavor.step,
469 '%s --abandonGpuContext' % target,
470 cmd=abandonGpuContext, abort_on_failure=False,
471 env=self.m.vars.default_env)
472
473 # Upload results.
474 if self.m.vars.upload_perf_results:
475 self.m.file.makedirs('perf_dir', self.m.vars.perf_data_dir)
476 self.m.flavor.copy_directory_contents_to_host(
477 self.m.flavor.device_dirs.perf_data_dir,
478 self.m.vars.perf_data_dir)
479
480 def cleanup_steps(self):
481 """Run any cleanup steps."""
482 self.m.flavor.cleanup_steps()
483
484 def _KeyParams(self):
485 """Build a unique key from the builder name (as a list).
486
487 E.g. arch x86 gpu GeForce320M mode MacMini4.1 os Mac10.6
488 """
489 # Don't bother to include role, which is always Test.
490 # TryBots are uploaded elsewhere so they can use the same key.
491 blacklist = ['role', 'is_trybot']
492
493 flat = []
494 for k in sorted(self.m.vars.builder_cfg.keys()):
495 if k not in blacklist:
496 flat.append(k)
497 flat.append(self.m.vars.builder_cfg[k])
498 return flat
OLDNEW
« no previous file with comments | « infra/bots/recipe_modules/core/android_devices.py ('k') | infra/bots/recipe_modules/core/fake_specs.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698