OLD | NEW |
---|---|
1 # Copyright 2016 The LUCI Authors. All rights reserved. | 1 # Copyright 2016 The LUCI Authors. All rights reserved. |
2 # Use of this source code is governed under the Apache License, Version 2.0 | 2 # Use of this source code is governed under the Apache License, Version 2.0 |
3 # that can be found in the LICENSE file. | 3 # that can be found in the LICENSE file. |
4 | 4 |
5 """Fetches CIPD client and installs packages.""" | 5 """Fetches CIPD client and installs packages.""" |
6 | 6 |
7 import contextlib | 7 import contextlib |
8 import hashlib | 8 import hashlib |
9 import json | 9 import json |
10 import logging | 10 import logging |
11 import optparse | 11 import optparse |
12 import os | 12 import os |
13 import platform | 13 import platform |
14 import re | |
15 import shutil | |
14 import sys | 16 import sys |
15 import tempfile | 17 import tempfile |
16 import time | 18 import time |
17 import urllib | 19 import urllib |
18 | 20 |
19 from utils import file_path | 21 from utils import file_path |
20 from utils import fs | 22 from utils import fs |
21 from utils import net | 23 from utils import net |
22 from utils import subprocess42 | 24 from utils import subprocess42 |
23 from utils import tools | 25 from utils import tools |
(...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
132 binary_path (str): path to the CIPD client binary. | 134 binary_path (str): path to the CIPD client binary. |
133 package_name (str): the CIPD package name for the client itself. | 135 package_name (str): the CIPD package name for the client itself. |
134 instance_id (str): the CIPD instance_id for the client itself. | 136 instance_id (str): the CIPD instance_id for the client itself. |
135 service_url (str): if not None, URL of the CIPD backend that overrides | 137 service_url (str): if not None, URL of the CIPD backend that overrides |
136 the default one. | 138 the default one. |
137 """ | 139 """ |
138 self.binary_path = binary_path | 140 self.binary_path = binary_path |
139 self.package_name = package_name | 141 self.package_name = package_name |
140 self.instance_id = instance_id | 142 self.instance_id = instance_id |
141 self.service_url = service_url | 143 self.service_url = service_url |
144 self._cache_hash = hashlib.sha1 | |
145 self._cache_hash_algo = ( | |
146 isolated_format.SUPPORTED_ALGOS_REVERSE[self._cache_hash]) | |
147 | |
148 def _ensure_from_isolate(self, target_dir, cipd_isolated, isolate_cache): | |
149 """ | |
150 Retrieves the CIPD subdirectories from the isolate cache, if they exist, | |
M-A Ruel
2017/05/10 13:31:58
please start comment on the first line
generally p
kjlubick
2017/05/10 19:41:00
Done.
| |
151 hardlinking or copying the files into the provided directory. | |
152 | |
153 Does the opposite of _isolate_cipd for a given cipd_isolated file. | |
154 | |
155 Args: | |
156 target_dir: directory in which to put the files. | |
M-A Ruel
2017/05/10 13:31:58
inconsistent dots, remove
kjlubick
2017/05/10 19:41:00
Done.
| |
157 cipd_isolated: the isolated.[hash] file created previously in | |
158 _isolate_cipd | |
159 isolate_cache: the isolateserver.DiskCache in which the files may be | |
160 stored. | |
161 | |
162 Returns: | |
163 True if the isolated file and all the isolated contents were successfully | |
164 found in the isolate cache and put into place using hardlinks with a copy | |
165 fallback. False otherwise. | |
166 """ | |
167 if not isolate_cache: | |
168 logging.info('Not ensuring cipd from isolate cache isolate_cache is not' | |
169 'defined: %s', isolate_cache) | |
170 return False | |
171 try: | |
172 with open(cipd_isolated , 'rb') as f: | |
173 digest = str(f.read()) | |
M-A Ruel
2017/05/10 13:31:58
it's already an str.
kjlubick
2017/05/10 19:41:01
Done.
| |
174 try: | |
175 content = isolate_cache.getfileobj(digest).read() | |
176 except Exception as e: | |
M-A Ruel
2017/05/10 13:31:58
that's too broad, please scope down
kjlubick
2017/05/10 19:41:00
Done.
| |
177 logging.warning('Could not find isolated file in cache with digest ' | |
178 '%s: %s', digest, e) | |
179 return False | |
180 | |
181 ifile = isolated_format.IsolatedFile(digest, self._cache_hash) | |
182 ifile.load(content) | |
183 | |
184 if not isolateserver.is_cached(ifile, isolate_cache): | |
185 logging.info('Cached CIPD asset(s) %s are incomplete', cipd_isolated) | |
186 return False | |
187 | |
188 file_path.ensure_tree(target_dir) | |
189 return isolateserver.extract(ifile, target_dir, isolate_cache) | |
190 except Exception as e: | |
191 logging.warning('Could not ensure cipd package from isolate %s', e) | |
192 return False | |
193 | |
194 return True | |
195 | |
196 def _isolate_cipd(self, root, subdirs, isolate_cache, cipd_cache): | |
197 """ | |
198 Puts the content of the CIPD subdirectories into the isolate cache, | |
M-A Ruel
2017/05/10 13:31:58
single sentence
empty line
rest
kjlubick
2017/05/10 19:41:01
Done.
| |
199 creating a .isolated file representing each subdirectory. This .isolated | |
200 file goes into the isolate_cache as well, and a .isolated.[hash] file | |
201 goes into cipd_cache for extraction in _ensure_from_isolate(). The suffix | |
202 will be related to self._cache_hash_algo (.sha-1 for SHA-1, etc) | |
203 | |
204 This allows for disks with slow I/O (e.g. Raspberry Pis) to not have to | |
205 re-extract the CIPD zips every time, potentially saving a bunch of time. | |
206 | |
207 Arg: | |
208 root: where packages are installed. | |
209 subdirs: dict of subdir -> name_version where name_version is | |
210 [subdir].[pkg1version_pkg2version...].isolated.[hash] This way, if | |
211 any packages are updated, the cached files will also change. | |
212 isolate_cache: A isolateserver.DiskCache used to store files locally. | |
213 cipd_cache: A directory in which to put the *isolated.[hash] files. | |
214 """ | |
215 if not isolate_cache or not os.path.isdir(cipd_cache): | |
216 logging.info('Not putting cipd into isolate cache because one of the' | |
217 'caches is empty: %s, %s', isolate_cache, cipd_cache) | |
218 return | |
219 for subdir, hashfile in subdirs.iteritems(): | |
220 if not subdir: | |
221 logging.info('not caching to %s because it extracts to ./', hashfile) | |
222 continue | |
223 # The subdirs given to us from cipd are always in foo/bar/baz format | |
224 # which freaks Windows out. | |
225 subdir = subdir.replace('/', os.path.sep) | |
226 subdir = os.path.join(root, subdir) | |
227 | |
228 if not os.path.isdir(subdir): | |
229 logging.warning('%r is not a directory, so it can\'t be isolated', | |
230 subdir) | |
231 continue | |
232 | |
233 infiles, metadata = isolateserver.directory_to_metadata( | |
234 subdir, self._cache_hash, [], True) | |
235 | |
236 # The .isolated file to be created on disk. hashfile represents the | |
237 # file that will also be created along with this. | |
238 # e.g. *.isolated.sha-1 if the sha1 algorithm is used | |
239 isolated_file = unicode(os.path.join(cipd_cache, hashfile[:-5])) | |
240 data = { | |
241 'algo': self._cache_hash_algo, | |
242 'files': metadata, | |
243 'version': isolated_format.ISOLATED_FILE_VERSION, | |
244 } | |
245 isolated_format.save_isolated(isolated_file, data) | |
246 | |
247 for infile in infiles: | |
248 with open(os.path.join(subdir, infile.path) , 'rb') as f: | |
249 isolate_cache.write(infile.digest, f) | |
250 | |
251 with open(isolated_file , 'rb') as f: | |
252 content = f.read() | |
253 digest = self._cache_hash(content).hexdigest() | |
254 isolate_cache.write(digest, content) | |
255 | |
256 with open(os.path.join(cipd_cache, hashfile), 'w') as f: | |
257 f.write(digest) | |
142 | 258 |
143 def ensure( | 259 def ensure( |
144 self, site_root, packages, cache_dir=None, tmp_dir=None, timeout=None): | 260 self, site_root, packages, cache_dir=None, tmp_dir=None, timeout=None, |
261 isolate_cache=None): | |
145 """Ensures that packages installed in |site_root| equals |packages| set. | 262 """Ensures that packages installed in |site_root| equals |packages| set. |
146 | 263 |
147 Blocking call. | 264 Blocking call. |
148 | 265 |
266 Attempts to use the isolate cache to store the unzipped cipd files, keeping | |
267 a .isolated file in the cipd cache_dir | |
268 | |
149 Args: | 269 Args: |
150 site_root (str): where to install packages. | 270 site_root (str): where to install packages. |
151 packages: dict of subdir -> list of (package_template, version) tuples. | 271 packages: dict of subdir -> list of (package_template, version) tuples. |
152 cache_dir (str): if set, cache dir for cipd binary own cache. | 272 cache_dir (str): if set, cache dir for cipd binary own cache. |
153 Typically contains packages and tags. | 273 Typically contains packages and tags. |
154 tmp_dir (str): if not None, dir for temp files. | 274 tmp_dir (str): if not None, dir for temp files. |
155 timeout (int): if not None, timeout in seconds for this function to run. | 275 timeout (int): if not None, timeout in seconds for this function to run. |
276 isolate_cache (isolateserver.DiskCache): if not None, CIPD assets will | |
277 be unzipped and stored in this disk cache and extracted from there. | |
156 | 278 |
157 Returns: | 279 Returns: |
158 Pinned packages in the form of {subdir: [(package_name, package_id)]}, | 280 Pinned packages in the form of {subdir: [(package_name, package_id)]}, |
159 which correspond 1:1 with the input packages argument. | 281 which correspond 1:1 with the input packages argument. |
160 | 282 |
161 Raises: | 283 Raises: |
162 Error if could not install packages or timed out. | 284 Error if could not install packages or timed out. |
163 """ | 285 """ |
164 timeoutfn = tools.sliding_timeout(timeout) | 286 timeoutfn = tools.sliding_timeout(timeout) |
165 logging.info('Installing packages %r into %s', packages, site_root) | 287 logging.info('Installing packages %r into %s', packages, site_root) |
166 | |
167 ensure_file_handle, ensure_file_path = tempfile.mkstemp( | 288 ensure_file_handle, ensure_file_path = tempfile.mkstemp( |
168 dir=tmp_dir, prefix=u'cipd-ensure-file-', suffix='.txt') | 289 dir=tmp_dir, prefix=u'cipd-ensure-file-', suffix='.txt') |
169 json_out_file_handle, json_file_path = tempfile.mkstemp( | 290 json_out_file_handle, json_file_path = tempfile.mkstemp( |
170 dir=tmp_dir, prefix=u'cipd-ensure-result-', suffix='.json') | 291 dir=tmp_dir, prefix=u'cipd-ensure-result-', suffix='.json') |
171 os.close(json_out_file_handle) | 292 os.close(json_out_file_handle) |
172 | 293 to_isolate = {} |
294 from_isolate = {} | |
173 try: | 295 try: |
174 try: | 296 try: |
175 for subdir, pkgs in sorted(packages.iteritems()): | 297 for subdir, pkgs in sorted(packages.iteritems()): |
176 if '\n' in subdir: | 298 if '\n' in subdir: |
177 raise Error( | 299 raise Error( |
178 'Could not install packages; subdir %r contains newline' % subdir) | 300 'Could not install packages; subdir %r contains newline' % subdir) |
301 | |
302 # Join all the versions together so as to cause a new cached isolated | |
303 # to be used if any of them change. | |
304 versions = [p[1] for p in pkgs] | |
305 cipd_isolated = u'%s.%s.isolated.%s' % (subdir, '_'.join(versions), | |
306 self._cache_hash_algo) | |
307 cipd_isolated = os.path.join(cache_dir, cipd_isolated) | |
308 if (os.path.isfile(cipd_isolated) and | |
309 self._ensure_from_isolate(os.path.join(site_root, subdir), | |
310 cipd_isolated, isolate_cache)): | |
311 from_isolate[unicode(subdir)] = pkgs | |
312 continue | |
313 to_isolate[subdir] = cipd_isolated | |
179 os.write(ensure_file_handle, '@Subdir %s\n' % (subdir,)) | 314 os.write(ensure_file_handle, '@Subdir %s\n' % (subdir,)) |
180 for pkg, version in pkgs: | 315 for pkg, version in pkgs: |
181 pkg = render_package_name_template(pkg) | 316 pkg = render_package_name_template(pkg) |
182 os.write(ensure_file_handle, '%s %s\n' % (pkg, version)) | 317 os.write(ensure_file_handle, '%s %s\n' % (pkg, version)) |
318 | |
183 finally: | 319 finally: |
184 os.close(ensure_file_handle) | 320 os.close(ensure_file_handle) |
185 | 321 |
322 # to_isolate is the packages that we need to ensure from CIPD and then | |
323 # isolate. Thus, if this is empty, we don't need to get anything from | |
324 # CIPD because they were successfully pulled from isolate. Thus return | |
325 # from_isolate, the pinned packages that we pulled from_isolate | |
326 if not to_isolate: | |
327 return from_isolate | |
328 | |
186 cmd = [ | 329 cmd = [ |
187 self.binary_path, 'ensure', | 330 self.binary_path, 'ensure', |
188 '-root', site_root, | 331 '-root', site_root, |
189 '-ensure-file', ensure_file_path, | 332 '-ensure-file', ensure_file_path, |
190 '-verbose', # this is safe because cipd-ensure does not print a lot | 333 '-verbose', # this is safe because cipd-ensure does not print a lot |
191 '-json-output', json_file_path, | 334 '-json-output', json_file_path, |
192 ] | 335 ] |
193 if cache_dir: | 336 if cache_dir: |
194 cmd += ['-cache-dir', cache_dir] | 337 cmd += ['-cache-dir', cache_dir] |
195 if self.service_url: | 338 if self.service_url: |
(...skipping 16 matching lines...) Expand all Loading... | |
212 if pipe_name == 'stderr': | 355 if pipe_name == 'stderr': |
213 logging.debug('cipd client: %s', line) | 356 logging.debug('cipd client: %s', line) |
214 else: | 357 else: |
215 logging.info('cipd client: %s', line) | 358 logging.info('cipd client: %s', line) |
216 | 359 |
217 exit_code = process.wait(timeout=timeoutfn()) | 360 exit_code = process.wait(timeout=timeoutfn()) |
218 if exit_code != 0: | 361 if exit_code != 0: |
219 raise Error( | 362 raise Error( |
220 'Could not install packages; exit code %d\noutput:%s' % ( | 363 'Could not install packages; exit code %d\noutput:%s' % ( |
221 exit_code, '\n'.join(output))) | 364 exit_code, '\n'.join(output))) |
365 | |
366 self._isolate_cipd(site_root, to_isolate, isolate_cache, cache_dir) | |
367 | |
222 with open(json_file_path) as jfile: | 368 with open(json_file_path) as jfile: |
223 result_json = json.load(jfile) | 369 result_json = json.load(jfile) |
224 return { | 370 from_isolate.update({ |
225 subdir: [(x['package'], x['instance_id']) for x in pins] | 371 subdir: [(x['package'], x['instance_id']) for x in pins] |
226 for subdir, pins in result_json['result'].iteritems() | 372 for subdir, pins in result_json['result'].iteritems() |
227 } | 373 }) |
374 return from_isolate | |
228 finally: | 375 finally: |
229 fs.remove(ensure_file_path) | 376 fs.remove(ensure_file_path) |
230 fs.remove(json_file_path) | 377 fs.remove(json_file_path) |
231 | 378 |
232 | 379 |
233 def get_platform(): | 380 def get_platform(): |
234 """Returns ${platform} parameter value. | 381 """Returns ${platform} parameter value. |
235 | 382 |
236 Borrowed from | 383 Borrowed from |
237 https://chromium.googlesource.com/infra/infra/+/aaf9586/build/build.py#204 | 384 https://chromium.googlesource.com/infra/infra/+/aaf9586/build/build.py#204 |
(...skipping 236 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
474 """ | 621 """ |
475 result = [] | 622 result = [] |
476 for pkg in packages: | 623 for pkg in packages: |
477 path, name, version = pkg.split(':', 2) | 624 path, name, version = pkg.split(':', 2) |
478 if not name: | 625 if not name: |
479 raise Error('Invalid package "%s": package name is not specified' % pkg) | 626 raise Error('Invalid package "%s": package name is not specified' % pkg) |
480 if not version: | 627 if not version: |
481 raise Error('Invalid package "%s": version is not specified' % pkg) | 628 raise Error('Invalid package "%s": version is not specified' % pkg) |
482 result.append((path, name, version)) | 629 result.append((path, name, version)) |
483 return result | 630 return result |
OLD | NEW |