OLD | NEW |
1 # Copyright 2015 The Swarming Authors. All rights reserved. | 1 # Copyright 2015 The Swarming Authors. All rights reserved. |
2 # Use of this source code is governed by the Apache v2.0 license that can be | 2 # Use of this source code is governed by the Apache v2.0 license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 import base64 | 5 import base64 |
6 import logging | 6 import logging |
7 import posixpath | 7 import posixpath |
8 import re | 8 import re |
9 import urlparse | 9 import urlparse |
10 | 10 |
11 from google.appengine.ext import ndb | 11 from google.appengine.ext import ndb |
12 | 12 |
| 13 from components import auth |
13 from components import config | 14 from components import config |
14 from components import gitiles | 15 from components import gitiles |
15 from components import net | 16 from components import net |
16 from components.config import validation | 17 from components.config import validation |
17 | 18 |
18 from proto import project_config_pb2 | 19 from proto import project_config_pb2 |
19 from proto import service_config_pb2 | 20 from proto import service_config_pb2 |
20 import common | 21 import common |
| 22 import services |
21 import storage | 23 import storage |
22 | 24 |
23 | 25 |
24 def validate_config_set(config_set, ctx=None): | 26 def validate_config_set(config_set, ctx=None): |
25 ctx = ctx or validation.Context.raise_on_error() | 27 ctx = ctx or validation.Context.raise_on_error() |
26 if not any(r.match(config_set) for r in config.ALL_CONFIG_SET_RGX): | 28 if not any(r.match(config_set) for r in config.ALL_CONFIG_SET_RGX): |
27 ctx.error('invalid config set: %s', config_set) | 29 ctx.error('invalid config set: %s', config_set) |
28 | 30 |
29 | 31 |
30 def validate_path(path, ctx=None): | 32 def validate_path(path, ctx=None): |
(...skipping 12 matching lines...) Expand all Loading... |
43 ctx.error('not specified') | 45 ctx.error('not specified') |
44 return | 46 return |
45 parsed = urlparse.urlparse(url) | 47 parsed = urlparse.urlparse(url) |
46 if not parsed.netloc: | 48 if not parsed.netloc: |
47 ctx.error('hostname not specified') | 49 ctx.error('hostname not specified') |
48 if parsed.scheme != 'https': | 50 if parsed.scheme != 'https': |
49 ctx.error('scheme must be "https"') | 51 ctx.error('scheme must be "https"') |
50 | 52 |
51 | 53 |
52 def validate_pattern(pattern, literal_validator, ctx): | 54 def validate_pattern(pattern, literal_validator, ctx): |
| 55 try: |
| 56 config.validation.compile_pattern(pattern) |
| 57 except ValueError as ex: |
| 58 ctx.error(ex.message) |
| 59 return |
| 60 |
53 if ':' not in pattern: | 61 if ':' not in pattern: |
54 literal_validator(pattern, ctx) | 62 literal_validator(pattern, ctx) |
55 return | 63 elif pattern.startswith('text:'): |
56 | 64 literal_validator(pattern.split(':', 2)[1], ctx) |
57 pattern_type, pattern_text = pattern.split(':', 2) | |
58 if pattern_type != 'regex': | |
59 ctx.error('unknown pattern type: %s', pattern_type) | |
60 return | |
61 try: | |
62 re.compile(pattern_text) | |
63 except re.error as ex: | |
64 ctx.error('invalid regular expression "%s": %s', pattern_text, ex) | |
65 | 65 |
66 | 66 |
67 @validation.self_rule( | 67 def check_id_sorted(iterable, list_name, ctx): |
68 common.VALIDATION_FILENAME, service_config_pb2.ValidationCfg) | 68 """Emits a warning if the iterable is not sorted by id.""" |
69 def validate_validation_cfg(cfg, ctx): | 69 prev = None |
70 for i, rule in enumerate(cfg.rules): | 70 for item in iterable: |
71 with ctx.prefix('Rule #%d: ', i + 1): | 71 if not item.id: |
72 with ctx.prefix('config_set: '): | 72 continue |
73 validate_pattern(rule.config_set, validate_config_set, ctx) | 73 if prev is not None and item.id < prev: |
74 with ctx.prefix('path: '): | 74 ctx.warning( |
75 validate_pattern(rule.path, validate_path, ctx) | 75 '%s are not sorted by id. First offending id: %s', list_name, item.id) |
76 with ctx.prefix('url: '): | 76 return |
77 validate_url(rule.url, ctx) | 77 prev = item.id |
| 78 |
| 79 |
| 80 def validate_id(id, rgx, known_ids, ctx): |
| 81 if not id: |
| 82 ctx.error('id is not specified') |
| 83 return |
| 84 if not rgx.match(id): |
| 85 ctx.error('id "%s" does not match %s regex', id, rgx.pattern) |
| 86 return |
| 87 if id in known_ids: |
| 88 ctx.error('id is not unique') |
| 89 else: |
| 90 known_ids.add(id) |
78 | 91 |
79 | 92 |
80 def validate_config_set_location(loc, ctx, allow_relative_url=False): | 93 def validate_config_set_location(loc, ctx, allow_relative_url=False): |
81 if not loc: | 94 if not loc: |
82 ctx.error('not specified') | 95 ctx.error('not specified') |
83 return | 96 return |
84 if loc.storage_type == service_config_pb2.ConfigSetLocation.UNSET: | 97 if allow_relative_url and is_url_relative(loc.url): |
| 98 if loc.storage_type != service_config_pb2.ConfigSetLocation.UNSET: |
| 99 ctx.error('storage_type must not be set if relative url is used') |
| 100 elif loc.storage_type == service_config_pb2.ConfigSetLocation.UNSET: |
85 ctx.error('storage_type is not set') | 101 ctx.error('storage_type is not set') |
86 else: | 102 else: |
87 assert loc.storage_type == service_config_pb2.ConfigSetLocation.GITILES | 103 assert loc.storage_type == service_config_pb2.ConfigSetLocation.GITILES |
88 if allow_relative_url and is_url_relative(loc.url): | |
89 # It is relative. Avoid calling gitiles.Location.parse. | |
90 return | |
91 try: | 104 try: |
92 gitiles.Location.parse(loc.url) | 105 gitiles.Location.parse(loc.url) |
93 except ValueError as ex: | 106 except ValueError as ex: |
94 ctx.error(ex.message) | 107 ctx.error(ex.message) |
95 | 108 |
96 | 109 |
97 @validation.self_rule( | 110 @validation.self_rule( |
98 common.PROJECT_REGISTRY_FILENAME, service_config_pb2.ProjectsCfg) | 111 common.PROJECT_REGISTRY_FILENAME, service_config_pb2.ProjectsCfg) |
99 def validate_project_registry(cfg, ctx): | 112 def validate_project_registry(cfg, ctx): |
100 project_ids = set() | 113 project_ids = set() |
101 unsorted_id = None | |
102 for i, project in enumerate(cfg.projects): | 114 for i, project in enumerate(cfg.projects): |
103 with ctx.prefix('Project %s: ', project.id or ('#%d' % (i + 1))): | 115 with ctx.prefix('Project %s: ', project.id or ('#%d' % (i + 1))): |
104 if not project.id: | 116 validate_id(project.id, config.common.PROJECT_ID_RGX, project_ids, ctx) |
105 ctx.error('id is not specified') | |
106 else: | |
107 if project.id in project_ids: | |
108 ctx.error('id is not unique') | |
109 else: | |
110 project_ids.add(project.id) | |
111 if not unsorted_id and i > 0: | |
112 if cfg.projects[i - 1].id and project.id < cfg.projects[i - 1].id: | |
113 unsorted_id = project.id | |
114 with ctx.prefix('config_location: '): | 117 with ctx.prefix('config_location: '): |
115 validate_config_set_location(project.config_location, ctx) | 118 validate_config_set_location(project.config_location, ctx) |
| 119 check_id_sorted(cfg.projects, 'Projects', ctx) |
116 | 120 |
117 | 121 |
118 if unsorted_id: | 122 def validate_email(email, ctx): |
119 ctx.warning( | 123 try: |
120 'Project list is not sorted by id. First offending id: %s', | 124 auth.Identity('user', email) |
121 unsorted_id) | 125 except ValueError as ex: |
| 126 ctx.error('invalid email: "%s"', email) |
| 127 |
| 128 |
| 129 def validate_group(group, ctx): |
| 130 if not auth.is_valid_group_name(group): |
| 131 ctx.error('invalid group: %s', group) |
| 132 |
| 133 |
| 134 @validation.self_rule( |
| 135 common.SERVICES_REGISTRY_FILENAME, service_config_pb2.ServicesCfg) |
| 136 def validate_services_cfg(cfg, ctx): |
| 137 service_ids = set() |
| 138 for i, service in enumerate(cfg.services): |
| 139 with ctx.prefix('Service %s: ', service.id or ('#%d' % (i + 1))): |
| 140 validate_id(service.id, config.common.SERVICE_ID_RGX, service_ids, ctx) |
| 141 if service.config_location and service.config_location.url: |
| 142 with ctx.prefix('config_location: '): |
| 143 validate_config_set_location( |
| 144 service.config_location, ctx, allow_relative_url=True) |
| 145 for owner in service.owners: |
| 146 validate_email(owner, ctx) |
| 147 if service.metadata_url: |
| 148 with ctx.prefix('metadata_url: '): |
| 149 validate_url(service.metadata_url, ctx) |
| 150 if service.access: |
| 151 validate_group(service.access, ctx) |
| 152 |
| 153 check_id_sorted(cfg.services, 'Services', ctx) |
| 154 |
| 155 |
| 156 def validate_service_dynamic_metadata_blob(metadata, ctx): |
| 157 """Validates JSON-encoded ServiceDynamicMetadata""" |
| 158 if not isinstance(metadata, dict): |
| 159 ctx.error('Service dynamic metadata must be an object') |
| 160 return |
| 161 |
| 162 if not metadata.get('version') != '1.0': |
| 163 ctx.error( |
| 164 'Expected format version 1.0, but found "%s"', metadata.get('version')) |
| 165 |
| 166 validation = metadata.get('validation') |
| 167 if validation is None: |
| 168 return |
| 169 |
| 170 with ctx.prefix('validation: '): |
| 171 if not isinstance(validation, dict): |
| 172 ctx.error('must be an object') |
| 173 return |
| 174 with ctx.prefix('url: '): |
| 175 validate_url(validation.get('url'), ctx) |
| 176 patterns = validation.get('patterns') |
| 177 if not isinstance(patterns, list): |
| 178 ctx.error('patterns must be a list') |
| 179 return |
| 180 for i, p in enumerate(patterns): |
| 181 with ctx.prefix('pattern #%d: ', i + 1): |
| 182 if not isinstance(p, dict): |
| 183 ctx.error('must be an object') |
| 184 continue |
| 185 with ctx.prefix('config_set: '): |
| 186 validate_pattern(p.get('config_set'), validate_config_set, ctx) |
| 187 with ctx.prefix('path: '): |
| 188 validate_pattern(p.get('path'), validate_path, ctx) |
122 | 189 |
123 | 190 |
124 @validation.self_rule(common.ACL_FILENAME, service_config_pb2.AclCfg) | 191 @validation.self_rule(common.ACL_FILENAME, service_config_pb2.AclCfg) |
125 def validate_acl_cfg(_cfg, _ctx): | 192 def validate_acl_cfg(_cfg, _ctx): |
126 # A valid protobuf message is enough. | 193 # A valid protobuf message is enough. |
127 pass | 194 pass |
128 | 195 |
129 @validation.self_rule(common.IMPORT_FILENAME, service_config_pb2.ImportCfg) | 196 @validation.self_rule(common.IMPORT_FILENAME, service_config_pb2.ImportCfg) |
130 def validate_import_cfg(_cfg, _ctx): | 197 def validate_import_cfg(_cfg, _ctx): |
131 # A valid protobuf message is enough. | 198 # A valid protobuf message is enough. |
(...skipping 24 matching lines...) Expand all Loading... |
156 validate_path(path, ctx) | 223 validate_path(path, ctx) |
157 with ctx.prefix('url: '): | 224 with ctx.prefix('url: '): |
158 validate_url(schema.url, ctx) | 225 validate_url(schema.url, ctx) |
159 | 226 |
160 | 227 |
161 @validation.project_config_rule( | 228 @validation.project_config_rule( |
162 common.PROJECT_METADATA_FILENAME, project_config_pb2.ProjectCfg) | 229 common.PROJECT_METADATA_FILENAME, project_config_pb2.ProjectCfg) |
163 def validate_project_metadata(cfg, ctx): | 230 def validate_project_metadata(cfg, ctx): |
164 if not cfg.name: | 231 if not cfg.name: |
165 ctx.error('name is not specified') | 232 ctx.error('name is not specified') |
| 233 if cfg.access: |
| 234 validate_group(cfg.access, ctx) |
166 | 235 |
167 | 236 |
168 @validation.project_config_rule( | 237 @validation.project_config_rule( |
169 common.REFS_FILENAME, project_config_pb2.RefsCfg) | 238 common.REFS_FILENAME, project_config_pb2.RefsCfg) |
170 def validate_refs_cfg(cfg, ctx): | 239 def validate_refs_cfg(cfg, ctx): |
171 refs = set() | 240 refs = set() |
172 for i, ref in enumerate(cfg.refs): | 241 for i, ref in enumerate(cfg.refs): |
173 with ctx.prefix('Ref #%d: ', i + 1): | 242 with ctx.prefix('Ref #%d: ', i + 1): |
174 if not ref.name: | 243 if not ref.name: |
175 ctx.error('name is not specified') | 244 ctx.error('name is not specified') |
176 elif not ref.name.startswith('refs/'): | 245 elif not ref.name.startswith('refs/'): |
177 ctx.error('name does not start with "refs/": %s', ref.name) | 246 ctx.error('name does not start with "refs/": %s', ref.name) |
178 elif ref.name in refs: | 247 elif ref.name in refs: |
179 ctx.error('duplicate ref: %s', ref.name) | 248 ctx.error('duplicate ref: %s', ref.name) |
180 else: | 249 else: |
181 refs.add(ref.name) | 250 refs.add(ref.name) |
182 if ref.config_path: | 251 if ref.config_path: |
183 validate_path(ref.config_path, ctx) | 252 validate_path(ref.config_path, ctx) |
184 | 253 |
185 | 254 |
186 @ndb.tasklet | 255 @ndb.tasklet |
187 def _endpoint_validate_async(url, config_set, path, content, ctx): | 256 def _validate_by_service_async(service, config_set, path, content, ctx): |
188 """Validates a config with an external service.""" | 257 """Validates a config with an external service.""" |
| 258 try: |
| 259 metadata = yield services.get_metadata_async(service.id) |
| 260 except services.DynamicMetadataError as ex: |
| 261 logging.error('Could not load dynamic metadata for %s: %s', service.id, ex) |
| 262 return |
| 263 |
| 264 assert metadata and metadata.validation |
| 265 url = metadata.validation.url |
| 266 if not url: |
| 267 return |
| 268 |
| 269 match = False |
| 270 for p in metadata.validation.patterns: |
| 271 # TODO(nodir): optimize if necessary. |
| 272 if (validation.compile_pattern(p.config_set)(config_set) and |
| 273 validation.compile_pattern(p.path)(path)): |
| 274 match = True |
| 275 break |
| 276 if not match: |
| 277 return |
| 278 |
189 res = None | 279 res = None |
190 | 280 |
191 def report_error(text): | 281 def report_error(text): |
192 text = ( | 282 text = ( |
193 'Error during external validation: %s\n' | 283 'Error during external validation: %s\n' |
194 'url: %s\n' | 284 'url: %s\n' |
195 'config_set: %s\n' | 285 'config_set: %s\n' |
196 'path: %s\n' | 286 'path: %s\n' |
197 'response: %r') % (text, url, config_set, path, res) | 287 'response: %r') % (text, url, config_set, path, res) |
198 logging.error(text) | 288 logging.error(text) |
199 ctx.critical(text) | 289 ctx.critical(text) |
200 | 290 |
201 try: | 291 try: |
202 req = { | 292 req = { |
203 'config_set': config_set, | 293 'config_set': config_set, |
204 'path': path, | 294 'path': path, |
205 'content': base64.b64encode(content), | 295 'content': base64.b64encode(content), |
206 } | 296 } |
207 res = yield net.json_request_async( | 297 res = yield net.json_request_async( |
208 url, method='POST', payload=req, | 298 url, method='POST', payload=req, scope=net.EMAIL_SCOPE) |
209 scope='https://www.googleapis.com/auth/userinfo.email') | |
210 except net.Error as ex: | 299 except net.Error as ex: |
211 report_error('Net error: %s' % ex) | 300 report_error('Net error: %s' % ex) |
212 return | 301 return |
213 | 302 |
214 try: | 303 try: |
215 for msg in res.get('messages', []): | 304 for msg in res.get('messages', []): |
216 if not isinstance(msg, dict): | 305 if not isinstance(msg, dict): |
217 report_error('invalid response: message is not a dict: %r' % msg) | 306 report_error('invalid response: message is not a dict: %r' % msg) |
218 continue | 307 continue |
219 severity = msg.get('severity') or 'INFO' | 308 severity = msg.get('severity') or 'INFO' |
(...skipping 18 matching lines...) Expand all Loading... |
238 | 327 |
239 Returns: | 328 Returns: |
240 components.config.validation_context.Result. | 329 components.config.validation_context.Result. |
241 """ | 330 """ |
242 ctx = ctx or validation.Context() | 331 ctx = ctx or validation.Context() |
243 | 332 |
244 # Check the config against built-in validators, | 333 # Check the config against built-in validators, |
245 # defined using validation.self_rule. | 334 # defined using validation.self_rule. |
246 validation.validate(config_set, path, content, ctx=ctx) | 335 validation.validate(config_set, path, content, ctx=ctx) |
247 | 336 |
248 validation_cfg = yield storage.get_self_config_async( | 337 all_services = yield services.get_services_async() |
249 common.VALIDATION_FILENAME, service_config_pb2.ValidationCfg) | |
250 # Be paranoid, check yourself. | |
251 validate_validation_cfg(validation_cfg, validation.Context.raise_on_error()) | |
252 | |
253 futures = [] | 338 futures = [] |
254 for rule in validation_cfg.rules: | 339 for service in all_services: |
255 if (_pattern_match(rule.config_set, config_set) and | 340 futures.append( |
256 _pattern_match(rule.path, path)): | 341 _validate_by_service_async(service, config_set, path, content, ctx)) |
257 futures.append( | |
258 _endpoint_validate_async(rule.url, config_set, path, content, ctx)) | |
259 yield futures | 342 yield futures |
260 raise ndb.Return(ctx.result()) | 343 raise ndb.Return(ctx.result()) |
261 | 344 |
262 | 345 |
263 def validate_config(*args, **kwargs): | 346 def validate_config(*args, **kwargs): |
264 """Blocking version of validate_async.""" | 347 """Blocking version of validate_async.""" |
265 return validate_config_async(*args, **kwargs).get_result() | 348 return validate_config_async(*args, **kwargs).get_result() |
266 | 349 |
267 | 350 |
268 def _pattern_match(pattern, value): | |
269 # Assume pattern is valid. | |
270 if ':' not in pattern: | |
271 return pattern == value | |
272 else: | |
273 kind, pattern = pattern.split(':', 2) | |
274 assert kind == 'regex' | |
275 return bool(re.match('^%s$' % pattern, value)) | |
276 | |
277 | |
278 def is_url_relative(url): | 351 def is_url_relative(url): |
279 parsed = urlparse.urlparse(url) | 352 parsed = urlparse.urlparse(url) |
280 return bool(not parsed.scheme and not parsed.netloc and parsed.path) | 353 return bool(not parsed.scheme and not parsed.netloc and parsed.path) |
OLD | NEW |