Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The Chromium Authors. All rights reserved. | 2 # Copyright 2013 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """Closes tree if configured masters have failed tree-closing steps. | 6 """Closes tree if configured masters have failed tree-closing steps. |
| 7 | 7 |
| 8 Given a list of masters, gatekeeper_ng will get a list of the latest builds from | 8 Given a list of masters, gatekeeper_ng will get a list of the latest builds from |
| 9 the specified masters. It then checks if any tree-closing steps have failed, and | 9 the specified masters. It then checks if any tree-closing steps have failed, and |
| 10 if so closes the tree and emails appropriate parties. Configuration for which | 10 if so closes the tree and emails appropriate parties. Configuration for which |
| (...skipping 20 matching lines...) Expand all Loading... | |
| 31 from common import chromium_utils | 31 from common import chromium_utils |
| 32 from slave import gatekeeper_ng_config | 32 from slave import gatekeeper_ng_config |
| 33 | 33 |
| 34 SCRIPTS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), | 34 SCRIPTS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), |
| 35 '..', '..') | 35 '..', '..') |
| 36 | 36 |
| 37 # Buildbot status enum. | 37 # Buildbot status enum. |
| 38 SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = range(6) | 38 SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = range(6) |
| 39 | 39 |
| 40 | 40 |
| 41 # Bump each time there is an incompatible change in build_db. | |
| 42 BUILD_DB_VERSION = 1 | |
| 43 | |
| 44 | |
| 41 def get_pwd(password_file): | 45 def get_pwd(password_file): |
| 42 if os.path.isfile(password_file): | 46 if os.path.isfile(password_file): |
| 43 return open(password_file, 'r').read().strip() | 47 return open(password_file, 'r').read().strip() |
| 44 return getpass.getpass() | 48 return getpass.getpass() |
| 45 | 49 |
| 46 | 50 |
| 47 def update_status(tree_message, tree_status_url, username, password): | 51 def update_status(tree_message, tree_status_url, username, password): |
| 48 """Connects to chromium-status and closes the tree.""" | 52 """Connects to chromium-status and closes the tree.""" |
| 49 #TODO(xusydoc): append status if status is already closed. | 53 #TODO(xusydoc): append status if status is already closed. |
| 50 params = urllib.urlencode({ | 54 params = urllib.urlencode({ |
| 51 'message': tree_message, | 55 'message': tree_message, |
| 52 'username': username, | 56 'username': username, |
| 53 'password': password | 57 'password': password |
| 54 }) | 58 }) |
| 55 | 59 |
| 56 # Standard urllib doesn't raise an exception on 403, urllib2 does. | 60 # Standard urllib doesn't raise an exception on 403, urllib2 does. |
| 57 f = urllib2.urlopen(tree_status_url, params) | 61 f = urllib2.urlopen(tree_status_url, params) |
| 58 f.close() | 62 f.close() |
| 59 logging.info('success') | 63 logging.info('success') |
| 60 | 64 |
| 61 | 65 |
| 62 def get_root_json(master_url): | 66 def get_root_json(master_url): |
| 63 """Pull down root JSON which contains builder and build info.""" | 67 """Pull down root JSON which contains builder and build info.""" |
| 64 logging.info('opening %s' % (master_url + '/json')) | 68 logging.info('opening %s' % (master_url + '/json')) |
| 65 with closing(urllib2.urlopen(master_url + '/json')) as f: | 69 with closing(urllib2.urlopen(master_url + '/json')) as f: |
| 66 return json.load(f) | 70 return json.load(f) |
| 67 | 71 |
| 68 | 72 |
| 69 def find_new_builds(master_url, root_json, build_db): | 73 def find_new_builds(master_url, root_json, build_db, options): |
|
iannucci
2014/02/20 03:30:55
GREAT EVIL!!!
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 70 """Given a dict of previously-seen builds, find new builds on each builder. | 74 """Given a dict of previously-seen builds, find new builds on each builder. |
| 71 | 75 |
| 72 Note that we use the 'cachedBuilds here since it should be faster, and this | 76 Note that we use the 'cachedBuilds' here since it should be faster, and this |
| 73 script is meant to be run frequently enough that it shouldn't skip any builds. | 77 script is meant to be run frequently enough that it shouldn't skip any builds. |
| 74 | 78 |
| 75 'Frequently enough' means 1 minute in the case of Buildbot or cron, so the | 79 'Frequently enough' means 1 minute in the case of Buildbot or cron, so the |
| 76 only way for gatekeeper_ng to be overwhelmed is if > cachedBuilds builds | 80 only way for gatekeeper_ng to be overwhelmed is if > cachedBuilds builds |
| 77 complete within 1 minute. As cachedBuilds is scaled per number of slaves per | 81 complete within 1 minute. As cachedBuilds is scaled per number of slaves per |
| 78 builder, the only way for this to really happen is if a build consistently | 82 builder, the only way for this to really happen is if a build consistently |
| 79 takes < 1 minute to complete. | 83 takes < 1 minute to complete. |
| 80 """ | 84 """ |
| 81 new_builds = {} | 85 new_builds = {} |
| 82 build_db[master_url] = build_db.get(master_url, {}) | 86 build_db[master_url] = build_db.get(master_url, {}) |
| 87 | |
| 88 last_finished_build = {} | |
| 89 for builder, builds in build_db[master_url].iteritems(): | |
| 90 if any(b.get('finished') for b in builds): | |
| 91 last_finished_build[builder] = max( | |
| 92 b['build'] for b in builds if b.get('finished')) | |
|
iannucci
2014/02/20 03:30:55
lame to iterate over builds twice, but still O(N)
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 93 | |
| 83 for buildername, builder in root_json['builders'].iteritems(): | 94 for buildername, builder in root_json['builders'].iteritems(): |
| 84 candidate_builds = set(builder['cachedBuilds'] + builder['currentBuilds']) | 95 candidate_builds = set(builder['cachedBuilds'] + builder['currentBuilds']) |
|
iannucci
2014/02/20 03:30:55
comment: cachedBuilds == finishedBuilds
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 85 if buildername in build_db[master_url]: | 96 if buildername in last_finished_build: |
| 86 new_builds[buildername] = [x for x in candidate_builds | 97 new_builds[buildername] = [{'build': x} for x in candidate_builds |
| 87 if x > build_db[master_url][buildername]] | 98 if x > last_finished_build[buildername]] |
| 88 else: | 99 else: |
| 89 new_builds[buildername] = candidate_builds | 100 if (buildername in build_db[master_url] or |
| 101 options.process_finished_builds_on_new_builder): | |
| 102 # Scan finished builds as well as unfinished. | |
| 103 new_builds[buildername] = [{'build': x} for x in candidate_builds] | |
|
iannucci
2014/02/20 03:30:55
should have structs or namedtuples or classes or s
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 104 else: | |
| 105 # New builder or master, ignore past builds. | |
| 106 new_builds[buildername] = [ | |
| 107 {'build': x} for x in builder['currentBuilds']] | |
| 90 | 108 |
| 91 # This is a heuristic, as currentBuilds may become completed by the time we | 109 # Update build_db but don't duplicate builds already in there. |
| 92 # scan them. The buildDB is fixed up later to account for this. | 110 for build in new_builds.get(buildername, []): |
| 93 completed = set(builder['cachedBuilds']) - set(builder['currentBuilds']) | 111 build_db_builds = build_db[master_url].setdefault(buildername, []) |
| 94 if completed: | 112 if not any(x['build'] == build['build'] for x in build_db_builds): |
| 95 build_db[master_url][buildername] = max(completed) | 113 build_db_builds.append(build) |
| 114 | |
| 115 # Pull old + new unfinished builds from build_db. | |
| 116 new_builds[buildername] = [ | |
| 117 b for b in build_db[master_url].setdefault(buildername, []) | |
| 118 if not b.get('finished')] | |
| 96 | 119 |
| 97 return new_builds | 120 return new_builds |
| 98 | 121 |
| 99 | 122 |
| 100 def find_new_builds_per_master(masters, build_db): | 123 def find_new_builds_per_master(masters, build_db, options): |
| 101 """Given a list of masters, find new builds and collect them under a dict.""" | 124 """Given a list of masters, find new builds and collect them under a dict.""" |
| 102 builds = {} | 125 builds = {} |
| 103 master_jsons = {} | 126 master_jsons = {} |
| 104 for master in masters: | 127 for master in masters: |
| 105 root_json = get_root_json(master) | 128 root_json = get_root_json(master) |
| 106 master_jsons[master] = root_json | 129 master_jsons[master] = root_json |
| 107 builds[master] = find_new_builds(master, root_json, build_db) | 130 builds[master] = find_new_builds(master, root_json, build_db, options) |
| 108 return builds, master_jsons | 131 return builds, master_jsons |
| 109 | 132 |
| 110 | 133 |
| 111 def get_build_json(url_pair): | 134 def get_build_json(url_tuple): |
| 112 url, master = url_pair | 135 url, master, builder, build = url_tuple |
| 113 logging.debug('opening %s...' % url) | 136 logging.debug('opening %s...' % url) |
| 114 with closing(urllib2.urlopen(url)) as f: | 137 with closing(urllib2.urlopen(url)) as f: |
| 115 return json.load(f), master | 138 return json.load(f), master, builder, build |
| 116 | 139 |
| 117 | 140 |
| 118 def get_build_jsons(master_builds, build_db, processes): | 141 def get_build_jsons(master_builds, build_db, processes): |
| 119 """Get all new builds on specified masters. | 142 """Get all new builds on specified masters. |
| 120 | 143 |
| 121 This takes a dict in the form of [master][builder][build], formats that URL | 144 This takes a dict in the form of [master][builder][build], formats that URL |
| 122 and appends that to url_list. Then, it forks out and queries each build_url | 145 and appends that to url_list. Then, it forks out and queries each build_url |
| 123 for build information. | 146 for build information. |
| 124 """ | 147 """ |
| 125 url_list = [] | 148 url_list = [] |
| 126 for master, builder_dict in master_builds.iteritems(): | 149 for master, builder_dict in master_builds.iteritems(): |
| 127 for builder, new_builds in builder_dict.iteritems(): | 150 for builder, new_builds in builder_dict.iteritems(): |
| 128 for build in new_builds: | 151 for build in new_builds: |
| 129 safe_builder = urllib.quote(builder) | 152 safe_builder = urllib.quote(builder) |
| 130 url = master + '/json/builders/%s/builds/%s' % (safe_builder, build) | 153 url = master + '/json/builders/%s/builds/%s' % (safe_builder, |
| 131 url_list.append((url, master)) | 154 build['build']) |
| 132 # The async/get is so that ctrl-c can interrupt the scans. | 155 url_list.append((url, master, builder, build)) |
| 133 # See http://stackoverflow.com/questions/1408356/ | 156 # Prevent map from hanging, see http://bugs.python.org/issue12157. |
| 134 # keyboard-interrupts-with-pythons-multiprocessing-pool | 157 if url_list: |
| 135 with chromium_utils.MultiPool(processes) as pool: | 158 # The async/get is so that ctrl-c can interrupt the scans. |
| 136 builds = filter(bool, pool.map_async(get_build_json, url_list).get(9999999)) | 159 # See http://stackoverflow.com/questions/1408356/ |
| 160 # keyboard-interrupts-with-pythons-multiprocessing-pool | |
| 161 with chromium_utils.MultiPool(processes) as pool: | |
| 162 builds = filter(bool, pool.map_async(get_build_json, url_list).get( | |
| 163 9999999)) | |
| 164 else: | |
| 165 builds = [] | |
| 137 | 166 |
| 138 for build_json, master in builds: | 167 # Pools pickle and unpickle, which means the build object we use isn't the |
| 168 # real build object. We recover it here so we can modify build_db later in the | |
| 169 # program. | |
| 170 def find_original_build(master, builder, build): | |
| 171 return next(b for b in build_db[master][builder] | |
| 172 if b['build'] == build['build']) | |
| 173 builds = [(u, m, find_original_build(m, bd, bl)) | |
| 174 for u, m, bd, bl in builds] | |
|
iannucci
2014/02/20 03:30:55
zip url_list with the rest of the data so that you
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 175 | |
| 176 # This is needed for the --sync-db option. | |
| 177 for build_json, master, build in builds: | |
| 139 if build_json.get('results', None) is not None: | 178 if build_json.get('results', None) is not None: |
| 140 build_db[master][build_json['builderName']] = max( | 179 build['finished'] = True |
| 141 build_json['number'], | |
| 142 build_db[master][build_json['builderName']]) | |
| 143 return builds | 180 return builds |
| 144 | 181 |
| 145 | 182 |
| 146 def check_builds(master_builds, master_jsons, build_db, gatekeeper_config): | 183 def check_builds(master_builds, master_jsons, gatekeeper_config): |
| 147 """Given a gatekeeper configuration, see which builds have failed.""" | 184 """Given a gatekeeper configuration, see which builds have failed.""" |
| 148 failed_builds = [] | 185 failed_builds = [] |
| 149 for build_json, master_url in master_builds: | 186 for build_json, master_url, build in master_builds: |
| 150 gatekeeper_sections = gatekeeper_config.get(master_url, []) | 187 gatekeeper_sections = gatekeeper_config.get(master_url, []) |
| 151 for gatekeeper_section in gatekeeper_sections: | 188 for gatekeeper_section in gatekeeper_sections: |
| 189 section_hash = gatekeeper_ng_config.gatekeeper_section_hash( | |
| 190 gatekeeper_section) | |
| 191 | |
| 152 if build_json['builderName'] in gatekeeper_section: | 192 if build_json['builderName'] in gatekeeper_section: |
| 153 gatekeeper = gatekeeper_section[build_json['builderName']] | 193 gatekeeper = gatekeeper_section[build_json['builderName']] |
| 154 elif '*' in gatekeeper_section: | 194 elif '*' in gatekeeper_section: |
| 155 gatekeeper = gatekeeper_section['*'] | 195 gatekeeper = gatekeeper_section['*'] |
| 156 else: | 196 else: |
| 157 gatekeeper = {} | 197 gatekeeper = {} |
| 198 | |
| 158 steps = build_json['steps'] | 199 steps = build_json['steps'] |
| 159 forgiving = set(gatekeeper.get('forgiving_steps', [])) | 200 forgiving = set(gatekeeper.get('forgiving_steps', [])) |
| 160 forgiving_optional = set(gatekeeper.get('forgiving_optional', [])) | 201 forgiving_optional = set(gatekeeper.get('forgiving_optional', [])) |
| 161 closing_steps = set(gatekeeper.get('closing_steps', [])) | forgiving | 202 closing_steps = set(gatekeeper.get('closing_steps', [])) | forgiving |
| 162 closing_optional = set( | 203 closing_optional = set( |
| 163 gatekeeper.get('closing_optional', [])) | forgiving_optional | 204 gatekeeper.get('closing_optional', [])) | forgiving_optional |
| 164 tree_notify = set(gatekeeper.get('tree_notify', [])) | 205 tree_notify = set(gatekeeper.get('tree_notify', [])) |
| 165 sheriff_classes = set(gatekeeper.get('sheriff_classes', [])) | 206 sheriff_classes = set(gatekeeper.get('sheriff_classes', [])) |
| 166 subject_template = gatekeeper.get('subject_template', | 207 subject_template = gatekeeper.get('subject_template', |
| 167 gatekeeper_ng_config.DEFAULTS[ | 208 gatekeeper_ng_config.DEFAULTS[ |
| (...skipping 25 matching lines...) Expand all Loading... | |
| 193 # If the entire build failed. | 234 # If the entire build failed. |
| 194 if (not unsatisfied_steps and 'results' in build_json and | 235 if (not unsatisfied_steps and 'results' in build_json and |
| 195 build_json['results'] != SUCCESS and respect_build_status): | 236 build_json['results'] != SUCCESS and respect_build_status): |
| 196 unsatisfied_steps.add('[overall build status]') | 237 unsatisfied_steps.add('[overall build status]') |
| 197 | 238 |
| 198 buildbot_url = master_jsons[master_url]['project']['buildbotURL'] | 239 buildbot_url = master_jsons[master_url]['project']['buildbotURL'] |
| 199 project_name = master_jsons[master_url]['project']['title'] | 240 project_name = master_jsons[master_url]['project']['title'] |
| 200 | 241 |
| 201 logging.debug('%sbuilders/%s/builds/%d ----', buildbot_url, | 242 logging.debug('%sbuilders/%s/builds/%d ----', buildbot_url, |
| 202 build_json['builderName'], build_json['number']) | 243 build_json['builderName'], build_json['number']) |
| 244 logging.debug(' section hash: %s', section_hash) | |
| 203 logging.debug(' build steps: %s', ', '.join(s['name'] for s in steps)) | 245 logging.debug(' build steps: %s', ', '.join(s['name'] for s in steps)) |
| 204 logging.debug(' closing steps: %s', ', '.join(closing_steps)) | 246 logging.debug(' closing steps: %s', ', '.join(closing_steps)) |
| 205 logging.debug(' closing optional steps: %s', ', '.join(closing_optional)) | 247 logging.debug(' closing optional steps: %s', ', '.join(closing_optional)) |
| 206 logging.debug(' finished steps: %s', ', '.join(finished_steps)) | 248 logging.debug(' finished steps: %s', ', '.join(finished_steps)) |
| 207 logging.debug(' successful: %s', ', '.join(successful_steps)) | 249 logging.debug(' successful: %s', ', '.join(successful_steps)) |
| 208 logging.debug(' build complete: %s', bool( | 250 logging.debug(' build complete: %s', bool( |
| 209 build_json.get('results', None) is not None)) | 251 build_json.get('results', None) is not None)) |
| 210 logging.debug(' unsatisfied steps: %s', ', '.join(unsatisfied_steps)) | 252 logging.debug(' unsatisfied steps: %s', ', '.join(unsatisfied_steps)) |
| 211 logging.debug(' set to close tree: %s', close_tree) | 253 logging.debug(' set to close tree: %s', close_tree) |
| 212 logging.debug(' build failed: %s', bool(unsatisfied_steps)) | 254 logging.debug(' build failed: %s', bool(unsatisfied_steps)) |
| 213 logging.debug('----') | |
| 214 | 255 |
| 215 | 256 |
| 216 if unsatisfied_steps: | 257 if unsatisfied_steps: |
| 217 build_db[master_url][build_json['builderName']] = max( | 258 if section_hash in build.get('triggered', []): |
| 218 build_json['number'], | 259 logging.debug(' section has already been triggered for this build, ' |
| 219 build_db[master_url][build_json['builderName']]) | 260 'skipping...') |
| 261 else: | |
| 262 build.setdefault('triggered', []).append(section_hash) | |
| 220 | 263 |
| 221 failed_builds.append({'base_url': buildbot_url, | 264 failed_builds.append({'base_url': buildbot_url, |
| 222 'build': build_json, | 265 'build': build_json, |
| 223 'close_tree': close_tree, | 266 'close_tree': close_tree, |
| 224 'forgiving_steps': forgiving | forgiving_optional, | 267 'forgiving_steps': ( |
| 225 'project_name': project_name, | 268 forgiving | forgiving_optional), |
| 226 'sheriff_classes': sheriff_classes, | 269 'project_name': project_name, |
| 227 'subject_template': subject_template, | 270 'sheriff_classes': sheriff_classes, |
| 228 'tree_notify': tree_notify, | 271 'subject_template': subject_template, |
| 229 'unsatisfied': unsatisfied_steps, | 272 'tree_notify': tree_notify, |
| 230 }) | 273 'unsatisfied': unsatisfied_steps, |
| 274 }) | |
| 275 logging.debug('----') | |
| 231 | 276 |
| 232 return failed_builds | 277 return failed_builds |
| 233 | 278 |
| 234 | 279 |
| 235 def parse_sheriff_file(url): | 280 def parse_sheriff_file(url): |
| 236 """Given a sheriff url, download and parse the appropirate sheriff list.""" | 281 """Given a sheriff url, download and parse the appropirate sheriff list.""" |
| 237 with closing(urllib2.urlopen(url)) as f: | 282 with closing(urllib2.urlopen(url)) as f: |
| 238 line = f.readline() | 283 line = f.readline() |
| 239 usernames_matcher_ = re.compile(r'document.write\(\'([\w, ]+)\'\)') | 284 usernames_matcher_ = re.compile(r'document.write\(\'([\w, ]+)\'\)') |
| 240 usernames_match = usernames_matcher_.match(line) | 285 usernames_match = usernames_matcher_.match(line) |
| (...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 399 """Open the build_db file. | 444 """Open the build_db file. |
| 400 | 445 |
| 401 filename: the filename of the build db. | 446 filename: the filename of the build db. |
| 402 """ | 447 """ |
| 403 build_db = None | 448 build_db = None |
| 404 if os.path.isfile(filename): | 449 if os.path.isfile(filename): |
| 405 print 'loading build_db from', filename | 450 print 'loading build_db from', filename |
| 406 with open(filename) as f: | 451 with open(filename) as f: |
| 407 build_db = json.load(f) | 452 build_db = json.load(f) |
| 408 | 453 |
| 409 return build_db or {} | 454 if build_db and build_db.get('build_db_version', 0) != BUILD_DB_VERSION: |
| 455 new_fn = '%s.old' % filename | |
| 456 logging.warn('%s is an older db version: %d (expecting %d). moving to ' | |
| 457 '%s' % (filename, build_db.get('build_db_version', 0), | |
| 458 BUILD_DB_VERSION, new_fn)) | |
| 459 chromium_utils.MoveFile(filename, new_fn) | |
| 460 build_db = None | |
| 461 | |
| 462 if build_db and 'masters' in build_db: | |
| 463 return build_db['masters'] | |
| 464 return {} | |
| 410 | 465 |
| 411 | 466 |
| 412 def save_build_db(build_db, filename): | 467 def save_build_db(build_db_data, gatekeeper_config, filename): |
| 413 """Save the build_db file. | 468 """Save the build_db file. |
| 414 | 469 |
| 415 build_db: dictionary to jsonize and store as build_db. | 470 build_db: dictionary to jsonize and store as build_db. |
| 416 filename: the filename of the build db. | 471 filename: the filename of the build db. |
| 472 gatekeeper_config: the gatekeeper config used for this pass. | |
| 417 """ | 473 """ |
| 418 print 'saving build_db to', filename | 474 print 'saving build_db to', filename |
| 475 | |
| 476 # Remove all but the last finished build. | |
| 477 for builders in build_db_data.values(): | |
| 478 for builder in builders: | |
| 479 if any(b.get('finished') for b in builders[builder]): | |
| 480 last_finished_build = max( | |
| 481 b['build'] for b in builders[builder] if b.get('finished')) | |
| 482 builders[builder] = [ | |
| 483 b for b in builders[builder] if (b['build'] == last_finished_build | |
| 484 or not b.get('finished'))] | |
| 485 | |
| 486 build_db = { | |
| 487 'build_db_version': BUILD_DB_VERSION, | |
| 488 'masters': build_db_data, | |
| 489 'sections': {}, | |
| 490 } | |
| 491 | |
| 492 # Output the gatekeeper sections we're operating with, so a human reading the | |
| 493 # file can debug issues. This is discarded by the parser in get_build_db. | |
| 494 used_sections = set([]) | |
| 495 for builders in build_db_data.values(): | |
| 496 for builds in builders.values(): | |
| 497 used_sections |= reduce( | |
| 498 lambda x, y: x | set(y.get('triggered', [])), builds, set([])) | |
|
iannucci
2014/02/20 03:30:55
{ t
for b in builds
for t in b.get('triggered
ghost stip (do not use)
2014/02/22 10:03:07
Done.
| |
| 499 for master in gatekeeper_config.values(): | |
| 500 for section in master: | |
| 501 section_hash = gatekeeper_ng_config.gatekeeper_section_hash(section) | |
| 502 if (section_hash in used_sections and | |
| 503 section_hash not in build_db['sections']): | |
|
iannucci
2014/02/20 03:30:55
Use the sets, luke.
ghost stip (do not use)
2014/02/22 10:03:07
dude you just unlocked an entire set of lgtm sagas
| |
| 504 build_db['sections'][section_hash] = section | |
| 505 | |
| 419 with open(filename, 'wb') as f: | 506 with open(filename, 'wb') as f: |
| 420 json.dump(build_db, f) | 507 gatekeeper_ng_config.flatten_to_json(build_db, f) |
| 421 | 508 |
| 422 | 509 |
| 423 def get_options(): | 510 def get_options(): |
| 424 prog_desc = 'Closes the tree if annotated builds fail.' | 511 prog_desc = 'Closes the tree if annotated builds fail.' |
| 425 usage = '%prog [options] <one or more master urls>' | 512 usage = '%prog [options] <one or more master urls>' |
| 426 parser = optparse.OptionParser(usage=(usage + '\n\n' + prog_desc)) | 513 parser = optparse.OptionParser(usage=(usage + '\n\n' + prog_desc)) |
| 427 parser.add_option('--build-db', default='build_db.json', | 514 parser.add_option('--build-db', default='build_db.json', |
| 428 help='records the last-seen build for each builder') | 515 help='records the last-seen build for each builder') |
| 429 parser.add_option('--clear-build-db', action='store_true', | 516 parser.add_option('--clear-build-db', action='store_true', |
| 430 help='reset build_db to be empty') | 517 help='reset build_db to be empty') |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 466 parser.add_option('--no-email-app', action='store_true', | 553 parser.add_option('--no-email-app', action='store_true', |
| 467 help='don\'t send emails') | 554 help='don\'t send emails') |
| 468 parser.add_option('--json', default='gatekeeper.json', | 555 parser.add_option('--json', default='gatekeeper.json', |
| 469 help='location of gatekeeper configuration file') | 556 help='location of gatekeeper configuration file') |
| 470 parser.add_option('--verify', action='store_true', | 557 parser.add_option('--verify', action='store_true', |
| 471 help='verify that the gatekeeper config file is correct') | 558 help='verify that the gatekeeper config file is correct') |
| 472 parser.add_option('--flatten-json', action='store_true', | 559 parser.add_option('--flatten-json', action='store_true', |
| 473 help='display flattened gatekeeper.json for debugging') | 560 help='display flattened gatekeeper.json for debugging') |
| 474 parser.add_option('--no-hashes', action='store_true', | 561 parser.add_option('--no-hashes', action='store_true', |
| 475 help='don\'t insert gatekeeper section hashes') | 562 help='don\'t insert gatekeeper section hashes') |
| 563 parser.add_option('--process-finished-builds-on-new-builder', | |
| 564 action='store_true', | |
| 565 help='when processing a new builder, process finished ' | |
| 566 'builds') | |
| 476 parser.add_option('-v', '--verbose', action='store_true', | 567 parser.add_option('-v', '--verbose', action='store_true', |
| 477 help='turn on extra debugging information') | 568 help='turn on extra debugging information') |
| 478 | 569 |
| 479 options, args = parser.parse_args() | 570 options, args = parser.parse_args() |
| 480 | 571 |
| 481 options.email_app_secret = None | 572 options.email_app_secret = None |
| 482 options.password = None | 573 options.password = None |
| 483 | 574 |
| 484 if options.no_hashes and not options.flatten_json: | 575 if options.no_hashes and not options.flatten_json: |
| 485 parser.error('specifying --no-hashes doesn\'t make sense without ' | 576 parser.error('specifying --no-hashes doesn\'t make sense without ' |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 528 | 619 |
| 529 masters = set(args) | 620 masters = set(args) |
| 530 if not masters <= set(gatekeeper_config): | 621 if not masters <= set(gatekeeper_config): |
| 531 print 'The following masters are not present in the gatekeeper config:' | 622 print 'The following masters are not present in the gatekeeper config:' |
| 532 for m in masters - set(gatekeeper_config): | 623 for m in masters - set(gatekeeper_config): |
| 533 print ' ' + m | 624 print ' ' + m |
| 534 return 1 | 625 return 1 |
| 535 | 626 |
| 536 if options.clear_build_db: | 627 if options.clear_build_db: |
| 537 build_db = {} | 628 build_db = {} |
| 538 save_build_db(build_db, options.build_db) | 629 save_build_db(build_db, gatekeeper_config, options.build_db) |
| 539 else: | 630 else: |
| 540 build_db = get_build_db(options.build_db) | 631 build_db = get_build_db(options.build_db) |
| 541 | 632 |
| 542 new_builds, master_jsons = find_new_builds_per_master(masters, build_db) | 633 new_builds, master_jsons = find_new_builds_per_master(masters, build_db, |
| 634 options) | |
| 635 build_jsons = get_build_jsons(new_builds, build_db, options.parallelism) | |
| 636 | |
| 543 if options.sync_build_db: | 637 if options.sync_build_db: |
| 544 save_build_db(build_db, options.build_db) | 638 save_build_db(build_db, gatekeeper_config, options.build_db) |
| 545 return 0 | 639 return 0 |
| 546 build_jsons = get_build_jsons(new_builds, build_db, options.parallelism) | 640 |
| 547 failed_builds = check_builds(build_jsons, master_jsons, build_db, | 641 failed_builds = check_builds(build_jsons, master_jsons, gatekeeper_config) |
| 548 gatekeeper_config) | |
| 549 if options.set_status: | 642 if options.set_status: |
| 550 options.password = get_pwd(options.password_file) | 643 options.password = get_pwd(options.password_file) |
| 551 | 644 |
| 552 close_tree_if_failure(failed_builds, options.status_user, options.password, | 645 close_tree_if_failure(failed_builds, options.status_user, options.password, |
| 553 options.status_url, options.set_status, | 646 options.status_url, options.set_status, |
| 554 options.sheriff_url, options.default_from_email, | 647 options.sheriff_url, options.default_from_email, |
| 555 options.email_app_url, options.email_app_secret, | 648 options.email_app_url, options.email_app_secret, |
| 556 options.email_domain, options.filter_domain, | 649 options.email_domain, options.filter_domain, |
| 557 options.disable_domain_filter) | 650 options.disable_domain_filter) |
| 558 | 651 |
| 559 if not options.skip_build_db_update: | 652 if not options.skip_build_db_update: |
| 560 save_build_db(build_db, options.build_db) | 653 save_build_db(build_db, gatekeeper_config, options.build_db) |
| 561 | 654 |
| 562 return 0 | 655 return 0 |
| 563 | 656 |
| 564 | 657 |
| 565 if __name__ == '__main__': | 658 if __name__ == '__main__': |
| 566 sys.exit(main()) | 659 sys.exit(main()) |
| OLD | NEW |