| OLD | NEW |
| 1 # Copyright 2015 The Swarming Authors. All rights reserved. | 1 # Copyright 2015 The Swarming Authors. All rights reserved. |
| 2 # Use of this source code is governed by the Apache v2.0 license that can be | 2 # Use of this source code is governed by the Apache v2.0 license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import base64 | 5 import base64 |
| 6 import logging | 6 import logging |
| 7 import posixpath | 7 import posixpath |
| 8 import re | 8 import re |
| 9 import urlparse | 9 import urlparse |
| 10 | 10 |
| 11 from google.appengine.ext import ndb | 11 from google.appengine.ext import ndb |
| 12 | 12 |
| 13 from components import auth |
| 13 from components import config | 14 from components import config |
| 14 from components import gitiles | 15 from components import gitiles |
| 15 from components import net | 16 from components import net |
| 16 from components.config import validation | 17 from components.config import validation |
| 17 | 18 |
| 18 from proto import project_config_pb2 | 19 from proto import project_config_pb2 |
| 19 from proto import service_config_pb2 | 20 from proto import service_config_pb2 |
| 20 import common | 21 import common |
| 22 import services |
| 21 import storage | 23 import storage |
| 22 | 24 |
| 23 | 25 |
| 24 def validate_config_set(config_set, ctx=None): | 26 def validate_config_set(config_set, ctx=None): |
| 25 ctx = ctx or validation.Context.raise_on_error() | 27 ctx = ctx or validation.Context.raise_on_error() |
| 26 if not any(r.match(config_set) for r in config.ALL_CONFIG_SET_RGX): | 28 if not any(r.match(config_set) for r in config.ALL_CONFIG_SET_RGX): |
| 27 ctx.error('invalid config set: %s', config_set) | 29 ctx.error('invalid config set: %s', config_set) |
| 28 | 30 |
| 29 | 31 |
| 30 def validate_path(path, ctx=None): | 32 def validate_path(path, ctx=None): |
| (...skipping 12 matching lines...) Expand all Loading... |
| 43 ctx.error('not specified') | 45 ctx.error('not specified') |
| 44 return | 46 return |
| 45 parsed = urlparse.urlparse(url) | 47 parsed = urlparse.urlparse(url) |
| 46 if not parsed.netloc: | 48 if not parsed.netloc: |
| 47 ctx.error('hostname not specified') | 49 ctx.error('hostname not specified') |
| 48 if parsed.scheme != 'https': | 50 if parsed.scheme != 'https': |
| 49 ctx.error('scheme must be "https"') | 51 ctx.error('scheme must be "https"') |
| 50 | 52 |
| 51 | 53 |
| 52 def validate_pattern(pattern, literal_validator, ctx): | 54 def validate_pattern(pattern, literal_validator, ctx): |
| 55 try: |
| 56 config.validation.compile_pattern(pattern) |
| 57 except ValueError as ex: |
| 58 ctx.error(ex.message) |
| 59 return |
| 60 |
| 53 if ':' not in pattern: | 61 if ':' not in pattern: |
| 54 literal_validator(pattern, ctx) | 62 literal_validator(pattern, ctx) |
| 55 return | 63 elif pattern.startswith('text:'): |
| 56 | 64 literal_validator(pattern.split(':', 2)[1], ctx) |
| 57 pattern_type, pattern_text = pattern.split(':', 2) | |
| 58 if pattern_type != 'regex': | |
| 59 ctx.error('unknown pattern type: %s', pattern_type) | |
| 60 return | |
| 61 try: | |
| 62 re.compile(pattern_text) | |
| 63 except re.error as ex: | |
| 64 ctx.error('invalid regular expression "%s": %s', pattern_text, ex) | |
| 65 | 65 |
| 66 | 66 |
| 67 @validation.self_rule( | 67 def check_id_sorted(iterable, list_name, ctx): |
| 68 common.VALIDATION_FILENAME, service_config_pb2.ValidationCfg) | 68 """Emits a warning if the iterable is not sorted by id.""" |
| 69 def validate_validation_cfg(cfg, ctx): | 69 prev = None |
| 70 for i, rule in enumerate(cfg.rules): | 70 for item in iterable: |
| 71 with ctx.prefix('Rule #%d: ', i + 1): | 71 if not item.id: |
| 72 with ctx.prefix('config_set: '): | 72 continue |
| 73 validate_pattern(rule.config_set, validate_config_set, ctx) | 73 if prev is not None and item.id < prev: |
| 74 with ctx.prefix('path: '): | 74 ctx.warning( |
| 75 validate_pattern(rule.path, validate_path, ctx) | 75 '%s are not sorted by id. First offending id: %s', list_name, item.id) |
| 76 with ctx.prefix('url: '): | 76 return |
| 77 validate_url(rule.url, ctx) | 77 prev = item.id |
| 78 |
| 79 |
| 80 def validate_id(id, rgx, known_ids, ctx): |
| 81 if not id: |
| 82 ctx.error('id is not specified') |
| 83 return |
| 84 if not rgx.match(id): |
| 85 ctx.error('id "%s" does not match %s regex', id, rgx.pattern) |
| 86 return |
| 87 if id in known_ids: |
| 88 ctx.error('id is not unique') |
| 89 else: |
| 90 known_ids.add(id) |
| 78 | 91 |
| 79 | 92 |
| 80 def validate_config_set_location(loc, ctx, allow_relative_url=False): | 93 def validate_config_set_location(loc, ctx, allow_relative_url=False): |
| 81 if not loc: | 94 if not loc: |
| 82 ctx.error('not specified') | 95 ctx.error('not specified') |
| 83 return | 96 return |
| 84 if loc.storage_type == service_config_pb2.ConfigSetLocation.UNSET: | 97 if allow_relative_url and is_url_relative(loc.url): |
| 98 if loc.storage_type != service_config_pb2.ConfigSetLocation.UNSET: |
| 99 ctx.error('storage_type must not be set if relative url is used') |
| 100 elif loc.storage_type == service_config_pb2.ConfigSetLocation.UNSET: |
| 85 ctx.error('storage_type is not set') | 101 ctx.error('storage_type is not set') |
| 86 else: | 102 else: |
| 87 assert loc.storage_type == service_config_pb2.ConfigSetLocation.GITILES | 103 assert loc.storage_type == service_config_pb2.ConfigSetLocation.GITILES |
| 88 if allow_relative_url and is_url_relative(loc.url): | |
| 89 # It is relative. Avoid calling gitiles.Location.parse. | |
| 90 return | |
| 91 try: | 104 try: |
| 92 gitiles.Location.parse(loc.url) | 105 gitiles.Location.parse(loc.url) |
| 93 except ValueError as ex: | 106 except ValueError as ex: |
| 94 ctx.error(ex.message) | 107 ctx.error(ex.message) |
| 95 | 108 |
| 96 | 109 |
| 97 @validation.self_rule( | 110 @validation.self_rule( |
| 98 common.PROJECT_REGISTRY_FILENAME, service_config_pb2.ProjectsCfg) | 111 common.PROJECT_REGISTRY_FILENAME, service_config_pb2.ProjectsCfg) |
| 99 def validate_project_registry(cfg, ctx): | 112 def validate_project_registry(cfg, ctx): |
| 100 project_ids = set() | 113 project_ids = set() |
| 101 unsorted_id = None | |
| 102 for i, project in enumerate(cfg.projects): | 114 for i, project in enumerate(cfg.projects): |
| 103 with ctx.prefix('Project %s: ', project.id or ('#%d' % (i + 1))): | 115 with ctx.prefix('Project %s: ', project.id or ('#%d' % (i + 1))): |
| 104 if not project.id: | 116 validate_id(project.id, config.common.PROJECT_ID_RGX, project_ids, ctx) |
| 105 ctx.error('id is not specified') | |
| 106 else: | |
| 107 if project.id in project_ids: | |
| 108 ctx.error('id is not unique') | |
| 109 else: | |
| 110 project_ids.add(project.id) | |
| 111 if not unsorted_id and i > 0: | |
| 112 if cfg.projects[i - 1].id and project.id < cfg.projects[i - 1].id: | |
| 113 unsorted_id = project.id | |
| 114 with ctx.prefix('config_location: '): | 117 with ctx.prefix('config_location: '): |
| 115 validate_config_set_location(project.config_location, ctx) | 118 validate_config_set_location(project.config_location, ctx) |
| 119 check_id_sorted(cfg.projects, 'Projects', ctx) |
| 116 | 120 |
| 117 | 121 |
| 118 if unsorted_id: | 122 |
| 119 ctx.warning( | 123 def validate_identity(identity, ctx): |
| 120 'Project list is not sorted by id. First offending id: %s', | 124 try: |
| 121 unsorted_id) | 125 auth.Identity.from_bytes(identity) |
| 126 except ValueError as ex: |
| 127 ctx.error(ex.message) |
| 128 |
| 129 |
| 130 def validate_email(email, ctx): |
| 131 try: |
| 132 auth.Identity('user', email) |
| 133 except ValueError as ex: |
| 134 ctx.error('invalid email: "%s"', email) |
| 135 |
| 136 |
| 137 def validate_group(group, ctx): |
| 138 if not auth.is_valid_group_name(group): |
| 139 ctx.error('invalid group: %s', group) |
| 140 |
| 141 |
| 142 def validate_identity_predicate(access, ctx): |
| 143 """Ensures |access| is "group:<group>", an identity or an email.""" |
| 144 if not access: |
| 145 ctx.error('not specified') |
| 146 return |
| 147 elif access.startswith('group:'): |
| 148 group = access.split(':', 2)[1] |
| 149 validate_group(group, ctx) |
| 150 elif ':' in access: |
| 151 validate_identity(access, ctx) |
| 152 else: |
| 153 validate_email(access, ctx) |
| 154 |
| 155 |
| 156 def validate_access_list(access_list, ctx): |
| 157 for i, ac in enumerate(access_list): |
| 158 with ctx.prefix('access #%d: ', i + 1): |
| 159 validate_identity_predicate(ac, ctx) |
| 160 |
| 161 |
| 162 @validation.self_rule( |
| 163 common.SERVICES_REGISTRY_FILENAME, service_config_pb2.ServicesCfg) |
| 164 def validate_services_cfg(cfg, ctx): |
| 165 service_ids = set() |
| 166 for i, service in enumerate(cfg.services): |
| 167 with ctx.prefix('Service %s: ', service.id or ('#%d' % (i + 1))): |
| 168 validate_id(service.id, config.common.SERVICE_ID_RGX, service_ids, ctx) |
| 169 if service.config_location and service.config_location.url: |
| 170 with ctx.prefix('config_location: '): |
| 171 validate_config_set_location( |
| 172 service.config_location, ctx, allow_relative_url=True) |
| 173 for owner in service.owners: |
| 174 validate_email(owner, ctx) |
| 175 if service.metadata_url: |
| 176 with ctx.prefix('metadata_url: '): |
| 177 validate_url(service.metadata_url, ctx) |
| 178 validate_access_list(service.access, ctx) |
| 179 |
| 180 check_id_sorted(cfg.services, 'Services', ctx) |
| 181 |
| 182 |
| 183 def validate_service_dynamic_metadata_blob(metadata, ctx): |
| 184 """Validates JSON-encoded ServiceDynamicMetadata""" |
| 185 if not isinstance(metadata, dict): |
| 186 ctx.error('Service dynamic metadata must be an object') |
| 187 return |
| 188 |
| 189 if not metadata.get('version') != '1.0': |
| 190 ctx.error( |
| 191 'Expected format version 1.0, but found "%s"', metadata.get('version')) |
| 192 |
| 193 validation = metadata.get('validation') |
| 194 if validation is None: |
| 195 return |
| 196 |
| 197 with ctx.prefix('validation: '): |
| 198 if not isinstance(validation, dict): |
| 199 ctx.error('must be an object') |
| 200 return |
| 201 with ctx.prefix('url: '): |
| 202 validate_url(validation.get('url'), ctx) |
| 203 patterns = validation.get('patterns') |
| 204 if not isinstance(patterns, list): |
| 205 ctx.error('patterns must be a list') |
| 206 return |
| 207 for i, p in enumerate(patterns): |
| 208 with ctx.prefix('pattern #%d: ', i + 1): |
| 209 if not isinstance(p, dict): |
| 210 ctx.error('must be an object') |
| 211 continue |
| 212 with ctx.prefix('config_set: '): |
| 213 validate_pattern(p.get('config_set'), validate_config_set, ctx) |
| 214 with ctx.prefix('path: '): |
| 215 validate_pattern(p.get('path'), validate_path, ctx) |
| 122 | 216 |
| 123 | 217 |
| 124 @validation.self_rule(common.ACL_FILENAME, service_config_pb2.AclCfg) | 218 @validation.self_rule(common.ACL_FILENAME, service_config_pb2.AclCfg) |
| 125 def validate_acl_cfg(_cfg, _ctx): | 219 def validate_acl_cfg(cfg, ctx): |
| 126 # A valid protobuf message is enough. | 220 if cfg.project_config_access: |
| 127 pass | 221 validate_group(cfg.project_config_access, ctx) |
| 222 |
| 128 | 223 |
| 129 @validation.self_rule(common.IMPORT_FILENAME, service_config_pb2.ImportCfg) | 224 @validation.self_rule(common.IMPORT_FILENAME, service_config_pb2.ImportCfg) |
| 130 def validate_import_cfg(_cfg, _ctx): | 225 def validate_import_cfg(_cfg, _ctx): |
| 131 # A valid protobuf message is enough. | 226 # A valid protobuf message is enough. |
| 132 pass | 227 pass |
| 133 | 228 |
| 134 | 229 |
| 135 @validation.self_rule(common.SCHEMAS_FILENAME, service_config_pb2.SchemasCfg) | 230 @validation.self_rule(common.SCHEMAS_FILENAME, service_config_pb2.SchemasCfg) |
| 136 def validate_schemas(cfg, ctx): | 231 def validate_schemas(cfg, ctx): |
| 137 names = set() | 232 names = set() |
| (...skipping 18 matching lines...) Expand all Loading... |
| 156 validate_path(path, ctx) | 251 validate_path(path, ctx) |
| 157 with ctx.prefix('url: '): | 252 with ctx.prefix('url: '): |
| 158 validate_url(schema.url, ctx) | 253 validate_url(schema.url, ctx) |
| 159 | 254 |
| 160 | 255 |
| 161 @validation.project_config_rule( | 256 @validation.project_config_rule( |
| 162 common.PROJECT_METADATA_FILENAME, project_config_pb2.ProjectCfg) | 257 common.PROJECT_METADATA_FILENAME, project_config_pb2.ProjectCfg) |
| 163 def validate_project_metadata(cfg, ctx): | 258 def validate_project_metadata(cfg, ctx): |
| 164 if not cfg.name: | 259 if not cfg.name: |
| 165 ctx.error('name is not specified') | 260 ctx.error('name is not specified') |
| 261 validate_access_list(service.access, ctx) |
| 166 | 262 |
| 167 | 263 |
| 168 @validation.project_config_rule( | 264 @validation.project_config_rule( |
| 169 common.REFS_FILENAME, project_config_pb2.RefsCfg) | 265 common.REFS_FILENAME, project_config_pb2.RefsCfg) |
| 170 def validate_refs_cfg(cfg, ctx): | 266 def validate_refs_cfg(cfg, ctx): |
| 171 refs = set() | 267 refs = set() |
| 172 for i, ref in enumerate(cfg.refs): | 268 for i, ref in enumerate(cfg.refs): |
| 173 with ctx.prefix('Ref #%d: ', i + 1): | 269 with ctx.prefix('Ref #%d: ', i + 1): |
| 174 if not ref.name: | 270 if not ref.name: |
| 175 ctx.error('name is not specified') | 271 ctx.error('name is not specified') |
| 176 elif not ref.name.startswith('refs/'): | 272 elif not ref.name.startswith('refs/'): |
| 177 ctx.error('name does not start with "refs/": %s', ref.name) | 273 ctx.error('name does not start with "refs/": %s', ref.name) |
| 178 elif ref.name in refs: | 274 elif ref.name in refs: |
| 179 ctx.error('duplicate ref: %s', ref.name) | 275 ctx.error('duplicate ref: %s', ref.name) |
| 180 else: | 276 else: |
| 181 refs.add(ref.name) | 277 refs.add(ref.name) |
| 182 if ref.config_path: | 278 if ref.config_path: |
| 183 validate_path(ref.config_path, ctx) | 279 validate_path(ref.config_path, ctx) |
| 184 | 280 |
| 185 | 281 |
| 186 @ndb.tasklet | 282 @ndb.tasklet |
| 187 def _endpoint_validate_async(url, config_set, path, content, ctx): | 283 def _validate_by_service_async(service, config_set, path, content, ctx): |
| 188 """Validates a config with an external service.""" | 284 """Validates a config with an external service.""" |
| 285 try: |
| 286 metadata = yield services.get_metadata_async(service.id) |
| 287 except services.DynamicMetadataError as ex: |
| 288 logging.error('Could not load dynamic metadata for %s: %s', service.id, ex) |
| 289 return |
| 290 |
| 291 assert metadata and metadata.validation |
| 292 url = metadata.validation.url |
| 293 if not url: |
| 294 return |
| 295 |
| 296 match = False |
| 297 for p in metadata.validation.patterns: |
| 298 # TODO(nodir): optimize if necessary. |
| 299 if (validation.compile_pattern(p.config_set)(config_set) and |
| 300 validation.compile_pattern(p.path)(path)): |
| 301 match = True |
| 302 break |
| 303 if not match: |
| 304 return |
| 305 |
| 189 res = None | 306 res = None |
| 190 | 307 |
| 191 def report_error(text): | 308 def report_error(text): |
| 192 text = ( | 309 text = ( |
| 193 'Error during external validation: %s\n' | 310 'Error during external validation: %s\n' |
| 194 'url: %s\n' | 311 'url: %s\n' |
| 195 'config_set: %s\n' | 312 'config_set: %s\n' |
| 196 'path: %s\n' | 313 'path: %s\n' |
| 197 'response: %r') % (text, url, config_set, path, res) | 314 'response: %r') % (text, url, config_set, path, res) |
| 198 logging.error(text) | 315 logging.error(text) |
| 199 ctx.critical(text) | 316 ctx.critical(text) |
| 200 | 317 |
| 201 try: | 318 try: |
| 202 req = { | 319 req = { |
| 203 'config_set': config_set, | 320 'config_set': config_set, |
| 204 'path': path, | 321 'path': path, |
| 205 'content': base64.b64encode(content), | 322 'content': base64.b64encode(content), |
| 206 } | 323 } |
| 207 res = yield net.json_request_async( | 324 res = yield net.json_request_async( |
| 208 url, method='POST', payload=req, | 325 url, method='POST', payload=req, scope=net.EMAIL_SCOPE) |
| 209 scope='https://www.googleapis.com/auth/userinfo.email') | |
| 210 except net.Error as ex: | 326 except net.Error as ex: |
| 211 report_error('Net error: %s' % ex) | 327 report_error('Net error: %s' % ex) |
| 212 return | 328 return |
| 213 | 329 |
| 214 try: | 330 try: |
| 215 for msg in res.get('messages', []): | 331 for msg in res.get('messages', []): |
| 216 if not isinstance(msg, dict): | 332 if not isinstance(msg, dict): |
| 217 report_error('invalid response: message is not a dict: %r' % msg) | 333 report_error('invalid response: message is not a dict: %r' % msg) |
| 218 continue | 334 continue |
| 219 severity = msg.get('severity') or 'INFO' | 335 severity = msg.get('severity') or 'INFO' |
| (...skipping 18 matching lines...) Expand all Loading... |
| 238 | 354 |
| 239 Returns: | 355 Returns: |
| 240 components.config.validation_context.Result. | 356 components.config.validation_context.Result. |
| 241 """ | 357 """ |
| 242 ctx = ctx or validation.Context() | 358 ctx = ctx or validation.Context() |
| 243 | 359 |
| 244 # Check the config against built-in validators, | 360 # Check the config against built-in validators, |
| 245 # defined using validation.self_rule. | 361 # defined using validation.self_rule. |
| 246 validation.validate(config_set, path, content, ctx=ctx) | 362 validation.validate(config_set, path, content, ctx=ctx) |
| 247 | 363 |
| 248 validation_cfg = yield storage.get_self_config_async( | 364 all_services = yield services.get_services_async() |
| 249 common.VALIDATION_FILENAME, service_config_pb2.ValidationCfg) | |
| 250 # Be paranoid, check yourself. | |
| 251 validate_validation_cfg(validation_cfg, validation.Context.raise_on_error()) | |
| 252 | |
| 253 futures = [] | 365 futures = [] |
| 254 for rule in validation_cfg.rules: | 366 for service in all_services: |
| 255 if (_pattern_match(rule.config_set, config_set) and | 367 futures.append( |
| 256 _pattern_match(rule.path, path)): | 368 _validate_by_service_async(service, config_set, path, content, ctx)) |
| 257 futures.append( | |
| 258 _endpoint_validate_async(rule.url, config_set, path, content, ctx)) | |
| 259 yield futures | 369 yield futures |
| 260 raise ndb.Return(ctx.result()) | 370 raise ndb.Return(ctx.result()) |
| 261 | 371 |
| 262 | 372 |
| 263 def validate_config(*args, **kwargs): | 373 def validate_config(*args, **kwargs): |
| 264 """Blocking version of validate_async.""" | 374 """Blocking version of validate_async.""" |
| 265 return validate_config_async(*args, **kwargs).get_result() | 375 return validate_config_async(*args, **kwargs).get_result() |
| 266 | 376 |
| 267 | 377 |
| 268 def _pattern_match(pattern, value): | |
| 269 # Assume pattern is valid. | |
| 270 if ':' not in pattern: | |
| 271 return pattern == value | |
| 272 else: | |
| 273 kind, pattern = pattern.split(':', 2) | |
| 274 assert kind == 'regex' | |
| 275 return bool(re.match('^%s$' % pattern, value)) | |
| 276 | |
| 277 | |
| 278 def is_url_relative(url): | 378 def is_url_relative(url): |
| 279 parsed = urlparse.urlparse(url) | 379 parsed = urlparse.urlparse(url) |
| 280 return bool(not parsed.scheme and not parsed.netloc and parsed.path) | 380 return bool(not parsed.scheme and not parsed.netloc and parsed.path) |
| OLD | NEW |