OLD | NEW |
(Empty) | |
| 1 # Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 import json |
| 6 import logging |
| 7 import os |
| 8 |
| 9 from catapult_base import cloud_storage |
| 10 from catapult_base.dependency_manager import archive_info |
| 11 from catapult_base.dependency_manager import cloud_storage_info |
| 12 from catapult_base.dependency_manager import dependency_info |
| 13 from catapult_base.dependency_manager import exceptions |
| 14 from catapult_base.dependency_manager import local_path_info |
| 15 from catapult_base.dependency_manager import uploader |
| 16 |
| 17 |
| 18 class BaseConfig(object): |
| 19 """A basic config class for use with the DependencyManager. |
| 20 |
| 21 Initiated with a json file in the following format: |
| 22 |
| 23 { "config_type": "BaseConfig", |
| 24 "dependencies": { |
| 25 "dep_name1": { |
| 26 "cloud_storage_base_folder": "base_folder1", |
| 27 "cloud_storage_bucket": "bucket1", |
| 28 "file_info": { |
| 29 "platform1": { |
| 30 "cloud_storage_hash": "hash_for_platform1", |
| 31 "download_path": "download_path111", |
| 32 "version_in_cs": "1.11.1.11." |
| 33 "local_paths": ["local_path1110", "local_path1111"] |
| 34 }, |
| 35 "platform2": { |
| 36 "cloud_storage_hash": "hash_for_platform2", |
| 37 "download_path": "download_path2", |
| 38 "local_paths": ["local_path20", "local_path21"] |
| 39 }, |
| 40 ... |
| 41 } |
| 42 }, |
| 43 "dependency_name_2": { |
| 44 ... |
| 45 }, |
| 46 ... |
| 47 } |
| 48 } |
| 49 |
| 50 Required fields: "dependencies" and "config_type". |
| 51 Note that config_type must be "BaseConfig" |
| 52 |
| 53 Assumptions: |
| 54 "cloud_storage_base_folder" is a top level folder in the given |
| 55 "cloud_storage_bucket" where all of the dependency files are stored |
| 56 at "dependency_name"_"cloud_storage_hash". |
| 57 |
| 58 "download_path" and all paths in "local_paths" are relative to the |
| 59 config file's location. |
| 60 |
| 61 All or none of the following cloud storage related fields must be |
| 62 included in each platform dictionary: |
| 63 "cloud_storage_hash", "download_path", "cs_remote_path" |
| 64 |
| 65 "version_in_cs" is an optional cloud storage field, but is dependent |
| 66 on the above cloud storage related fields. |
| 67 |
| 68 |
| 69 Also note that platform names are often of the form os_architechture. |
| 70 Ex: "win_AMD64" |
| 71 |
| 72 More information on the fields can be found in dependencies_info.py |
| 73 """ |
| 74 def __init__(self, file_path, writable=False): |
| 75 """ Initialize a BaseConfig for the DependencyManager. |
| 76 |
| 77 Args: |
| 78 writable: False: This config will be used to lookup information. |
| 79 True: This config will be used to update information. |
| 80 |
| 81 file_path: Path to a file containing a json dictionary in the expected |
| 82 json format for this config class. Base format expected: |
| 83 |
| 84 { "config_type": config_type, |
| 85 "dependencies": dependencies_dict } |
| 86 |
| 87 config_type: must match the return value of GetConfigType. |
| 88 dependencies: A dictionary with the information needed to |
| 89 create dependency_info instances for the given |
| 90 dependencies. |
| 91 |
| 92 See dependency_info.py for more information. |
| 93 """ |
| 94 self._config_path = file_path |
| 95 self._writable = writable |
| 96 self._is_dirty = False |
| 97 self._pending_uploads = [] |
| 98 if not self._config_path: |
| 99 raise ValueError('Must supply config file path.') |
| 100 if not os.path.exists(self._config_path): |
| 101 if not writable: |
| 102 raise exceptions.EmptyConfigError(file_path) |
| 103 self._config_data = {} |
| 104 self._WriteConfigToFile(self._config_path, dependencies=self._config_data) |
| 105 else: |
| 106 with open(file_path, 'r') as f: |
| 107 config_data = json.load(f) |
| 108 if not config_data: |
| 109 raise exceptions.EmptyConfigError(file_path) |
| 110 config_type = config_data.pop('config_type', None) |
| 111 if config_type != self.GetConfigType(): |
| 112 raise ValueError( |
| 113 'Supplied config_type (%s) is not the expected type (%s) in file ' |
| 114 '%s' % (config_type, self.GetConfigType(), file_path)) |
| 115 self._config_data = config_data.get('dependencies', {}) |
| 116 |
| 117 def IterDependencyInfo(self): |
| 118 """ Yields a DependencyInfo for each dependency/platform pair. |
| 119 |
| 120 Raises: |
| 121 ReadWriteError: If called when the config is writable. |
| 122 ValueError: If any of the dependencies contain partial information for |
| 123 downloading from cloud_storage. (See dependency_info.py) |
| 124 """ |
| 125 if self._writable: |
| 126 raise exceptions.ReadWriteError( |
| 127 'Trying to read dependency info from a writable config. File for ' |
| 128 'config: %s' % self._config_path) |
| 129 base_path = os.path.dirname(self._config_path) |
| 130 for dependency in self._config_data: |
| 131 dependency_dict = self._config_data.get(dependency) |
| 132 platforms_dict = dependency_dict.get('file_info', {}) |
| 133 for platform in platforms_dict: |
| 134 platform_info = platforms_dict.get(platform) |
| 135 |
| 136 local_info = None |
| 137 local_paths = platform_info.get('local_paths', []) |
| 138 if local_paths: |
| 139 paths = [] |
| 140 for path in local_paths: |
| 141 path = self._FormatPath(path) |
| 142 paths.append(os.path.abspath(os.path.join(base_path, path))) |
| 143 local_info = local_path_info.LocalPathInfo(paths) |
| 144 |
| 145 cs_info = None |
| 146 cs_bucket = dependency_dict.get('cloud_storage_bucket') |
| 147 cs_base_folder = dependency_dict.get('cloud_storage_base_folder', '') |
| 148 download_path = platform_info.get('download_path') |
| 149 if download_path: |
| 150 download_path = self._FormatPath(download_path) |
| 151 download_path = os.path.abspath( |
| 152 os.path.join(base_path, download_path)) |
| 153 |
| 154 cs_hash = platform_info.get('cloud_storage_hash') |
| 155 if not cs_hash: |
| 156 raise exceptions.ConfigError( |
| 157 'Dependency %s has cloud storage info on platform %s, but is ' |
| 158 'missing a cloud storage hash.', dependency, platform) |
| 159 cs_remote_path = self._CloudStorageRemotePath( |
| 160 dependency, cs_hash, cs_base_folder) |
| 161 version_in_cs = platform_info.get('version_in_cs') |
| 162 |
| 163 zip_info = None |
| 164 path_within_archive = platform_info.get('path_within_archive') |
| 165 if path_within_archive: |
| 166 unzip_path = os.path.abspath( |
| 167 os.path.join(os.path.dirname(download_path), |
| 168 '%s_%s_%s' % (dependency, platform, cs_hash))) |
| 169 zip_info = archive_info.ArchiveInfo( |
| 170 download_path, unzip_path, path_within_archive) |
| 171 |
| 172 cs_info = cloud_storage_info.CloudStorageInfo( |
| 173 cs_bucket, cs_hash, download_path, cs_remote_path, |
| 174 version_in_cs=version_in_cs, archive_info=zip_info) |
| 175 |
| 176 dep_info = dependency_info.DependencyInfo( |
| 177 dependency, platform, self._config_path, local_path_info=local_info, |
| 178 cloud_storage_info=cs_info) |
| 179 yield dep_info |
| 180 |
| 181 @classmethod |
| 182 def GetConfigType(cls): |
| 183 return 'BaseConfig' |
| 184 |
| 185 @property |
| 186 def config_path(self): |
| 187 return self._config_path |
| 188 |
| 189 def AddCloudStorageDependencyUpdateJob( |
| 190 self, dependency, platform, dependency_path, version=None, |
| 191 execute_job=True): |
| 192 """Update the file downloaded from cloud storage for a dependency/platform. |
| 193 |
| 194 Upload a new file to cloud storage for the given dependency and platform |
| 195 pair and update the cloud storage hash and the version for the given pair. |
| 196 |
| 197 Example usage: |
| 198 The following should update the default platform for 'dep_name': |
| 199 UpdateCloudStorageDependency('dep_name', 'default', 'path/to/file') |
| 200 |
| 201 The following should update both the mac and win platforms for 'dep_name', |
| 202 or neither if either update fails: |
| 203 UpdateCloudStorageDependency( |
| 204 'dep_name', 'mac_x86_64', 'path/to/mac/file', execute_job=False) |
| 205 UpdateCloudStorageDependency( |
| 206 'dep_name', 'win_AMD64', 'path/to/win/file', execute_job=False) |
| 207 ExecuteUpdateJobs() |
| 208 |
| 209 Args: |
| 210 dependency: The dependency to update. |
| 211 platform: The platform to update the dependency info for. |
| 212 dependency_path: Path to the new dependency to be used. |
| 213 version: Version of the updated dependency, for checking future updates |
| 214 against. |
| 215 execute_job: True if the config should be written to disk and the file |
| 216 should be uploaded to cloud storage after the update. False if |
| 217 multiple updates should be performed atomically. Must call |
| 218 ExecuteUpdateJobs after all non-executed jobs are added to complete |
| 219 the update. |
| 220 |
| 221 Raises: |
| 222 ReadWriteError: If the config was not initialized as writable, or if |
| 223 |execute_job| is True but the config has update jobs still pending |
| 224 execution. |
| 225 ValueError: If no information exists in the config for |dependency| on |
| 226 |platform|. |
| 227 """ |
| 228 self._ValidateIsConfigUpdatable( |
| 229 execute_job=execute_job, dependency=dependency, platform=platform) |
| 230 self._is_dirty = True |
| 231 cs_hash = cloud_storage.CalculateHash(dependency_path) |
| 232 if version: |
| 233 self._SetPlatformData(dependency, platform, 'version_in_cs', version) |
| 234 self._SetPlatformData(dependency, platform, 'cloud_storage_hash', cs_hash) |
| 235 |
| 236 cs_base_folder = self._GetPlatformData( |
| 237 dependency, platform, 'cloud_storage_base_folder') |
| 238 cs_bucket = self._GetPlatformData( |
| 239 dependency, platform, 'cloud_storage_bucket') |
| 240 cs_remote_path = self._CloudStorageRemotePath( |
| 241 dependency, cs_hash, cs_base_folder) |
| 242 self._pending_uploads.append(uploader.CloudStorageUploader( |
| 243 cs_bucket, cs_remote_path, dependency_path)) |
| 244 if execute_job: |
| 245 self.ExecuteUpdateJobs() |
| 246 |
| 247 def ExecuteUpdateJobs(self, force=False): |
| 248 """Write all config changes to the config_path specified in __init__. |
| 249 |
| 250 Upload all files pending upload and then write the updated config to |
| 251 file. Attempt to remove all uploaded files on failure. |
| 252 |
| 253 Args: |
| 254 force: True if files should be uploaded to cloud storage even if a |
| 255 file already exists in the upload location. |
| 256 |
| 257 Returns: |
| 258 True: if the config was dirty and the upload succeeded. |
| 259 False: if the config was not dirty. |
| 260 |
| 261 Raises: |
| 262 CloudStorageUploadConflictError: If |force| is False and the potential |
| 263 upload location of a file already exists. |
| 264 CloudStorageError: If copying an existing file to the backup location |
| 265 or uploading a new file fails. |
| 266 """ |
| 267 self._ValidateIsConfigUpdatable() |
| 268 if not self._is_dirty: |
| 269 logging.info('ExecuteUpdateJobs called on clean config') |
| 270 return False |
| 271 if not self._pending_uploads: |
| 272 logging.debug('No files needing upload.') |
| 273 else: |
| 274 try: |
| 275 for item_pending_upload in self._pending_uploads: |
| 276 item_pending_upload.Upload(force) |
| 277 self._WriteConfigToFile(self._config_path, self._config_data) |
| 278 self._pending_uploads = [] |
| 279 self._is_dirty = False |
| 280 except: |
| 281 # Attempt to rollback the update in any instance of failure, even user |
| 282 # interrupt via Ctrl+C; but don't consume the exception. |
| 283 logging.error('Update failed, attempting to roll it back.') |
| 284 for upload_item in reversed(self._pending_uploads): |
| 285 upload_item.Rollback() |
| 286 raise |
| 287 return True |
| 288 |
| 289 def GetVersion(self, dependency, platform): |
| 290 """Return the Version information for the given dependency.""" |
| 291 return self._GetPlatformData( |
| 292 dependency, platform, data_type='version_in_cs') |
| 293 |
| 294 def _SetPlatformData(self, dependency, platform, data_type, data): |
| 295 self._ValidateIsConfigWritable() |
| 296 dependency_dict = self._config_data.get(dependency, {}) |
| 297 platform_dict = dependency_dict.get('file_info', {}).get(platform) |
| 298 if not platform_dict: |
| 299 raise ValueError('No platform data for platform %s on dependency %s' % |
| 300 (platform, dependency)) |
| 301 if (data_type == 'cloud_storage_bucket' or |
| 302 data_type == 'cloud_storage_base_folder'): |
| 303 self._config_data[dependency][data_type] = data |
| 304 else: |
| 305 self._config_data[dependency]['file_info'][platform][data_type] = data |
| 306 |
| 307 def _GetPlatformData(self, dependency, platform, data_type=None): |
| 308 dependency_dict = self._config_data.get(dependency, {}) |
| 309 if not dependency_dict: |
| 310 raise ValueError('Dependency %s is not in config.' % dependency) |
| 311 platform_dict = dependency_dict.get('file_info', {}).get(platform) |
| 312 if not platform_dict: |
| 313 raise ValueError('No platform data for platform %s on dependency %s' % |
| 314 (platform, dependency)) |
| 315 if data_type: |
| 316 if (data_type == 'cloud_storage_bucket' or |
| 317 data_type == 'cloud_storage_base_folder'): |
| 318 return dependency_dict.get(data_type) |
| 319 return platform_dict.get(data_type) |
| 320 return platform_dict |
| 321 |
| 322 def _ValidateIsConfigUpdatable( |
| 323 self, execute_job=False, dependency=None, platform=None): |
| 324 self._ValidateIsConfigWritable() |
| 325 if self._is_dirty and execute_job: |
| 326 raise exceptions.ReadWriteError( |
| 327 'A change has already been made to this config. Either call without' |
| 328 'using the execute_job option or first call ExecuteUpdateJobs().') |
| 329 if dependency and not self._config_data.get(dependency): |
| 330 raise ValueError('Cannot update information because dependency %s does ' |
| 331 'not exist.' % dependency) |
| 332 if platform and not self._GetPlatformData(dependency, platform): |
| 333 raise ValueError('No dependency info is available for the given ' |
| 334 'dependency: %s' % dependency) |
| 335 |
| 336 def _ValidateIsConfigWritable(self): |
| 337 if not self._writable: |
| 338 raise exceptions.ReadWriteError( |
| 339 'Trying to update the information from a read-only config. ' |
| 340 'File for config: %s' % self._config_path) |
| 341 |
| 342 @staticmethod |
| 343 def _CloudStorageRemotePath(dependency, cs_hash, cs_base_folder): |
| 344 cs_remote_file = '%s_%s' % (dependency, cs_hash) |
| 345 cs_remote_path = cs_remote_file if not cs_base_folder else ( |
| 346 '%s/%s' % (cs_base_folder, cs_remote_file)) |
| 347 return cs_remote_path |
| 348 |
| 349 @classmethod |
| 350 def _FormatPath(cls, file_path): |
| 351 """ Format |file_path| for the current file system. |
| 352 |
| 353 We may be downloading files for another platform, so paths must be |
| 354 downloadable on the current system. |
| 355 """ |
| 356 if not file_path: |
| 357 return file_path |
| 358 if os.path.sep != '\\': |
| 359 return file_path.replace('\\', os.path.sep) |
| 360 elif os.path.sep != '/': |
| 361 return file_path.replace('/', os.path.sep) |
| 362 return file_path |
| 363 |
| 364 @classmethod |
| 365 def _WriteConfigToFile(cls, file_path, dependencies=None): |
| 366 json_dict = cls._GetJsonDict(dependencies) |
| 367 file_dir = os.path.dirname(file_path) |
| 368 if not os.path.exists(file_dir): |
| 369 os.makedirs(file_dir) |
| 370 with open(file_path, 'w') as outfile: |
| 371 json.dump( |
| 372 json_dict, outfile, indent=2, sort_keys=True, separators=(',', ': ')) |
| 373 return json_dict |
| 374 |
| 375 @classmethod |
| 376 def _GetJsonDict(cls, dependencies=None): |
| 377 dependencies = dependencies or {} |
| 378 json_dict = {'config_type': cls.GetConfigType(), |
| 379 'dependencies': dependencies} |
| 380 return json_dict |
OLD | NEW |