Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(499)

Side by Side Diff: scripts/slave/gatekeeper_ng.py

Issue 172523005: Keep track of hashes triggered instead of builds. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/build
Patch Set: Fix typos. Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « no previous file | scripts/slave/gatekeeper_ng_config.py » ('j') | scripts/slave/gatekeeper_ng_db.py » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved. 2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Closes tree if configured masters have failed tree-closing steps. 6 """Closes tree if configured masters have failed tree-closing steps.
7 7
8 Given a list of masters, gatekeeper_ng will get a list of the latest builds from 8 Given a list of masters, gatekeeper_ng will get a list of the latest builds from
9 the specified masters. It then checks if any tree-closing steps have failed, and 9 the specified masters. It then checks if any tree-closing steps have failed, and
10 if so closes the tree and emails appropriate parties. Configuration for which 10 if so closes the tree and emails appropriate parties. Configuration for which
(...skipping 12 matching lines...) Expand all
23 import os 23 import os
24 import random 24 import random
25 import re 25 import re
26 import sys 26 import sys
27 import time 27 import time
28 import urllib 28 import urllib
29 import urllib2 29 import urllib2
30 30
31 from common import chromium_utils 31 from common import chromium_utils
32 from slave import gatekeeper_ng_config 32 from slave import gatekeeper_ng_config
33 from slave import gatekeeper_ng_db
33 34
34 SCRIPTS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 35 SCRIPTS_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
35 '..', '..') 36 '..', '..')
36 37
37 # Buildbot status enum. 38 # Buildbot status enum.
38 SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = range(6) 39 SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY = range(6)
39 40
40 41
41 def get_pwd(password_file): 42 def get_pwd(password_file):
42 if os.path.isfile(password_file): 43 if os.path.isfile(password_file):
(...skipping 19 matching lines...) Expand all
62 def get_root_json(master_url): 63 def get_root_json(master_url):
63 """Pull down root JSON which contains builder and build info.""" 64 """Pull down root JSON which contains builder and build info."""
64 logging.info('opening %s' % (master_url + '/json')) 65 logging.info('opening %s' % (master_url + '/json'))
65 with closing(urllib2.urlopen(master_url + '/json')) as f: 66 with closing(urllib2.urlopen(master_url + '/json')) as f:
66 return json.load(f) 67 return json.load(f)
67 68
68 69
69 def find_new_builds(master_url, root_json, build_db): 70 def find_new_builds(master_url, root_json, build_db):
70 """Given a dict of previously-seen builds, find new builds on each builder. 71 """Given a dict of previously-seen builds, find new builds on each builder.
71 72
72 Note that we use the 'cachedBuilds here since it should be faster, and this 73 Note that we use the 'cachedBuilds' here since it should be faster, and this
73 script is meant to be run frequently enough that it shouldn't skip any builds. 74 script is meant to be run frequently enough that it shouldn't skip any builds.
74 75
75 'Frequently enough' means 1 minute in the case of Buildbot or cron, so the 76 'Frequently enough' means 1 minute in the case of Buildbot or cron, so the
76 only way for gatekeeper_ng to be overwhelmed is if > cachedBuilds builds 77 only way for gatekeeper_ng to be overwhelmed is if > cachedBuilds builds
77 complete within 1 minute. As cachedBuilds is scaled per number of slaves per 78 complete within 1 minute. As cachedBuilds is scaled per number of slaves per
78 builder, the only way for this to really happen is if a build consistently 79 builder, the only way for this to really happen is if a build consistently
79 takes < 1 minute to complete. 80 takes < 1 minute to complete.
80 """ 81 """
81 new_builds = {} 82 new_builds = {}
82 build_db[master_url] = build_db.get(master_url, {}) 83 build_db.masters[master_url] = build_db.masters.get(master_url, {})
84
85 last_finished_build = {}
86 for builder, builds in build_db.masters[master_url].iteritems():
87 finished = [int(y[0]) for y in filter(
88 lambda x: x[1].finished, builds.iteritems())]
89 if finished:
90 last_finished_build[builder] = max(finished)
91
83 for buildername, builder in root_json['builders'].iteritems(): 92 for buildername, builder in root_json['builders'].iteritems():
93 # cachedBuilds are the builds in the cache, while currentBuilds are the
94 # currently running builds. Thus cachedBuilds can be unfinished or finished,
95 # while currentBuilds are always unfinished.
84 candidate_builds = set(builder['cachedBuilds'] + builder['currentBuilds']) 96 candidate_builds = set(builder['cachedBuilds'] + builder['currentBuilds'])
85 if buildername in build_db[master_url]: 97 if buildername in last_finished_build:
86 new_builds[buildername] = [x for x in candidate_builds 98 new_builds[buildername] = [
87 if x > build_db[master_url][buildername]] 99 buildnum for buildnum in candidate_builds
100 if buildnum > last_finished_build[buildername]]
88 else: 101 else:
89 new_builds[buildername] = candidate_builds 102 if buildername in build_db.masters[master_url]:
90 103 # Scan finished builds as well as unfinished.
91 # This is a heuristic, as currentBuilds may become completed by the time we 104 new_builds[buildername] = candidate_builds
92 # scan them. The buildDB is fixed up later to account for this. 105 else:
93 completed = set(builder['cachedBuilds']) - set(builder['currentBuilds']) 106 # New builder or master, ignore past builds.
94 if completed: 107 new_builds[buildername] = builder['currentBuilds']
95 build_db[master_url][buildername] = max(completed)
96 108
97 return new_builds 109 return new_builds
98 110
99 111
100 def find_new_builds_per_master(masters, build_db): 112 def find_new_builds_per_master(masters, build_db):
101 """Given a list of masters, find new builds and collect them under a dict.""" 113 """Given a list of masters, find new builds and collect them under a dict."""
102 builds = {} 114 builds = {}
103 master_jsons = {} 115 master_jsons = {}
104 for master in masters: 116 for master in masters:
105 root_json = get_root_json(master) 117 root_json = get_root_json(master)
106 master_jsons[master] = root_json 118 master_jsons[master] = root_json
107 builds[master] = find_new_builds(master, root_json, build_db) 119 builds[master] = find_new_builds(master, root_json, build_db)
108 return builds, master_jsons 120 return builds, master_jsons
109 121
110 122
111 def get_build_json(url_pair): 123 def get_build_json(url_tuple):
112 url, master = url_pair 124 """Downloads the json of a specific build."""
125 url, master, builder, buildnum = url_tuple
113 logging.debug('opening %s...' % url) 126 logging.debug('opening %s...' % url)
114 with closing(urllib2.urlopen(url)) as f: 127 with closing(urllib2.urlopen(url)) as f:
115 return json.load(f), master 128 return json.load(f), master, builder, buildnum
116 129
117 130
118 def get_build_jsons(master_builds, build_db, processes): 131 def get_build_jsons(master_builds, processes):
119 """Get all new builds on specified masters. 132 """Get all new builds on specified masters.
120 133
121 This takes a dict in the form of [master][builder][build], formats that URL 134 This takes a dict in the form of [master][builder][build], formats that URL
122 and appends that to url_list. Then, it forks out and queries each build_url 135 and appends that to url_list. Then, it forks out and queries each build_url
123 for build information. 136 for build information.
124 """ 137 """
125 url_list = [] 138 url_list = []
126 for master, builder_dict in master_builds.iteritems(): 139 for master, builder_dict in master_builds.iteritems():
127 for builder, new_builds in builder_dict.iteritems(): 140 for builder, new_builds in builder_dict.iteritems():
128 for build in new_builds: 141 for buildnum in new_builds:
129 safe_builder = urllib.quote(builder) 142 safe_builder = urllib.quote(builder)
130 url = master + '/json/builders/%s/builds/%s' % (safe_builder, build) 143 url = master + '/json/builders/%s/builds/%s' % (safe_builder,
131 url_list.append((url, master)) 144 buildnum)
132 # The async/get is so that ctrl-c can interrupt the scans. 145 url_list.append((url, master, builder, buildnum))
133 # See http://stackoverflow.com/questions/1408356/
134 # keyboard-interrupts-with-pythons-multiprocessing-pool
135 with chromium_utils.MultiPool(processes) as pool:
136 builds = filter(bool, pool.map_async(get_build_json, url_list).get(9999999))
137 146
138 for build_json, master in builds: 147 # Prevent map from hanging, see http://bugs.python.org/issue12157.
139 if build_json.get('results', None) is not None: 148 if url_list:
140 build_db[master][build_json['builderName']] = max( 149 # The async/get is so that ctrl-c can interrupt the scans.
141 build_json['number'], 150 # See http://stackoverflow.com/questions/1408356/
142 build_db[master][build_json['builderName']]) 151 # keyboard-interrupts-with-pythons-multiprocessing-pool
152 with chromium_utils.MultiPool(processes) as pool:
153 builds = filter(bool, pool.map_async(get_build_json, url_list).get(
154 9999999))
155 else:
156 builds = []
157
143 return builds 158 return builds
144 159
145 160
146 def check_builds(master_builds, master_jsons, build_db, gatekeeper_config): 161 def propagate_build_json_to_db(build_db, builds):
162 """Propagates build status changes from build_json to build_db."""
163 for build_json, master, builder, buildnum in builds:
164 build = build_db.masters[master].setdefault(builder, {}).get(buildnum)
iannucci 2014/02/24 20:20:23 TBH, this should be done with defaultdict's in you
165 if not build:
166 build = gatekeeper_ng_db.gen_build()
167
168 if build_json.get('results', None) is not None:
169 build = build._replace(finished=True) # pylint: disable=W0212
170
171 build_db.masters[master][builder][buildnum] = build
172
173
174 def check_builds(master_builds, master_jsons, gatekeeper_config):
147 """Given a gatekeeper configuration, see which builds have failed.""" 175 """Given a gatekeeper configuration, see which builds have failed."""
148 failed_builds = [] 176 failed_builds = []
149 for build_json, master_url in master_builds: 177 for build_json, master_url, builder, buildnum in master_builds:
150 gatekeeper_sections = gatekeeper_config.get(master_url, []) 178 gatekeeper_sections = gatekeeper_config.get(master_url, [])
151 for gatekeeper_section in gatekeeper_sections: 179 for gatekeeper_section in gatekeeper_sections:
180 section_hash = gatekeeper_ng_config.gatekeeper_section_hash(
181 gatekeeper_section)
182
152 if build_json['builderName'] in gatekeeper_section: 183 if build_json['builderName'] in gatekeeper_section:
153 gatekeeper = gatekeeper_section[build_json['builderName']] 184 gatekeeper = gatekeeper_section[build_json['builderName']]
154 elif '*' in gatekeeper_section: 185 elif '*' in gatekeeper_section:
155 gatekeeper = gatekeeper_section['*'] 186 gatekeeper = gatekeeper_section['*']
156 else: 187 else:
157 gatekeeper = {} 188 gatekeeper = {}
189
158 steps = build_json['steps'] 190 steps = build_json['steps']
159 forgiving = set(gatekeeper.get('forgiving_steps', [])) 191 forgiving = set(gatekeeper.get('forgiving_steps', []))
160 forgiving_optional = set(gatekeeper.get('forgiving_optional', [])) 192 forgiving_optional = set(gatekeeper.get('forgiving_optional', []))
161 closing_steps = set(gatekeeper.get('closing_steps', [])) | forgiving 193 closing_steps = set(gatekeeper.get('closing_steps', [])) | forgiving
162 closing_optional = set( 194 closing_optional = set(
163 gatekeeper.get('closing_optional', [])) | forgiving_optional 195 gatekeeper.get('closing_optional', [])) | forgiving_optional
164 tree_notify = set(gatekeeper.get('tree_notify', [])) 196 tree_notify = set(gatekeeper.get('tree_notify', []))
165 sheriff_classes = set(gatekeeper.get('sheriff_classes', [])) 197 sheriff_classes = set(gatekeeper.get('sheriff_classes', []))
166 subject_template = gatekeeper.get('subject_template', 198 subject_template = gatekeeper.get('subject_template',
167 gatekeeper_ng_config.DEFAULTS[ 199 gatekeeper_ng_config.DEFAULTS[
(...skipping 23 matching lines...) Expand all
191 unsatisfied_steps &= finished_steps 223 unsatisfied_steps &= finished_steps
192 224
193 # If the entire build failed. 225 # If the entire build failed.
194 if (not unsatisfied_steps and 'results' in build_json and 226 if (not unsatisfied_steps and 'results' in build_json and
195 build_json['results'] != SUCCESS and respect_build_status): 227 build_json['results'] != SUCCESS and respect_build_status):
196 unsatisfied_steps.add('[overall build status]') 228 unsatisfied_steps.add('[overall build status]')
197 229
198 buildbot_url = master_jsons[master_url]['project']['buildbotURL'] 230 buildbot_url = master_jsons[master_url]['project']['buildbotURL']
199 project_name = master_jsons[master_url]['project']['title'] 231 project_name = master_jsons[master_url]['project']['title']
200 232
201 logging.debug('%sbuilders/%s/builds/%d ----', buildbot_url, 233 if unsatisfied_steps:
202 build_json['builderName'], build_json['number']) 234 failed_builds.append(({'base_url': buildbot_url,
203 logging.debug(' build steps: %s', ', '.join(s['name'] for s in steps)) 235 'build': build_json,
204 logging.debug(' closing steps: %s', ', '.join(closing_steps)) 236 'close_tree': close_tree,
205 logging.debug(' closing optional steps: %s', ', '.join(closing_optional)) 237 'forgiving_steps': (
206 logging.debug(' finished steps: %s', ', '.join(finished_steps)) 238 forgiving | forgiving_optional),
207 logging.debug(' successful: %s', ', '.join(successful_steps)) 239 'project_name': project_name,
208 logging.debug(' build complete: %s', bool( 240 'sheriff_classes': sheriff_classes,
209 build_json.get('results', None) is not None)) 241 'subject_template': subject_template,
210 logging.debug(' unsatisfied steps: %s', ', '.join(unsatisfied_steps)) 242 'tree_notify': tree_notify,
211 logging.debug(' set to close tree: %s', close_tree) 243 'unsatisfied': unsatisfied_steps,
212 logging.debug(' build failed: %s', bool(unsatisfied_steps)) 244 },
213 logging.debug('----') 245 master_url,
246 builder,
247 buildnum,
248 section_hash))
249
250 return failed_builds
214 251
215 252
216 if unsatisfied_steps: 253 def debounce_failures(failed_builds, build_db):
iannucci 2014/02/24 20:20:23 "debounce" hardware nerd :P
217 build_db[master_url][build_json['builderName']] = max( 254 """Using trigger information in build_db, make sure we don't double-fire."""
218 build_json['number'], 255 true_failed_builds = []
219 build_db[master_url][build_json['builderName']]) 256 for build, master_url, builder, buildnum, section_hash in failed_builds:
257 logging.debug('%sbuilders/%s/builds/%d ----', build['base_url'],
258 builder, buildnum)
220 259
221 failed_builds.append({'base_url': buildbot_url, 260 build_db_builder = build_db.masters[master_url][builder]
222 'build': build_json, 261 triggered = build_db_builder[buildnum].triggered
223 'close_tree': close_tree, 262 if section_hash in triggered:
224 'forgiving_steps': forgiving | forgiving_optional, 263 logging.debug(' section has already been triggered for this build, '
225 'project_name': project_name, 264 'skipping...')
226 'sheriff_classes': sheriff_classes, 265 else:
227 'subject_template': subject_template, 266 # Propagates since the dictionary is the same as in build_db.
228 'tree_notify': tree_notify, 267 triggered.append(section_hash)
229 'unsatisfied': unsatisfied_steps, 268 true_failed_builds.append(build)
230 })
231 269
232 return failed_builds 270 logging.debug(' section hash: %s', section_hash)
271 logging.debug(' build steps: %s', ', '.join(
272 s['name'] for s in build['build']['steps']))
273 logging.debug(' build complete: %s', bool(
274 build['build'].get('results', None) is not None))
275 logging.debug(' unsatisfied steps: %s', ', '.join(build['unsatisfied']))
276 logging.debug(' set to close tree: %s', build['close_tree'])
277 logging.debug(' build failed: %s', bool(build['unsatisfied']))
278
279 logging.debug('----')
280
281 return true_failed_builds
233 282
234 283
235 def parse_sheriff_file(url): 284 def parse_sheriff_file(url):
236 """Given a sheriff url, download and parse the appropirate sheriff list.""" 285 """Given a sheriff url, download and parse the appropirate sheriff list."""
237 with closing(urllib2.urlopen(url)) as f: 286 with closing(urllib2.urlopen(url)) as f:
238 line = f.readline() 287 line = f.readline()
239 usernames_matcher_ = re.compile(r'document.write\(\'([\w, ]+)\'\)') 288 usernames_matcher_ = re.compile(r'document.write\(\'([\w, ]+)\'\)')
240 usernames_match = usernames_matcher_.match(line) 289 usernames_match = usernames_matcher_.match(line)
241 sheriffs = set() 290 sheriffs = set()
242 if usernames_match: 291 if usernames_match:
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
387 # Deduplicate emails. 436 # Deduplicate emails.
388 keyfunc = lambda x: x[1] 437 keyfunc = lambda x: x[1]
389 for k, g in itertools.groupby(sorted(filtered_emails_to_send, key=keyfunc), 438 for k, g in itertools.groupby(sorted(filtered_emails_to_send, key=keyfunc),
390 keyfunc): 439 keyfunc):
391 watchers = list(reduce(operator.or_, [set(e[0]) for e in g], set())) 440 watchers = list(reduce(operator.or_, [set(e[0]) for e in g], set()))
392 build_data = json.loads(k) 441 build_data = json.loads(k)
393 build_data['recipients'] = watchers 442 build_data['recipients'] = watchers
394 submit_email(email_app_url, build_data, secret) 443 submit_email(email_app_url, build_data, secret)
395 444
396 445
397
398 def get_build_db(filename):
399 """Open the build_db file.
400
401 filename: the filename of the build db.
402 """
403 build_db = None
404 if os.path.isfile(filename):
405 print 'loading build_db from', filename
406 with open(filename) as f:
407 build_db = json.load(f)
408
409 return build_db or {}
410
411
412 def save_build_db(build_db, filename):
413 """Save the build_db file.
414
415 build_db: dictionary to jsonize and store as build_db.
416 filename: the filename of the build db.
417 """
418 print 'saving build_db to', filename
419 with open(filename, 'wb') as f:
420 json.dump(build_db, f)
421
422
423 def get_options(): 446 def get_options():
424 prog_desc = 'Closes the tree if annotated builds fail.' 447 prog_desc = 'Closes the tree if annotated builds fail.'
425 usage = '%prog [options] <one or more master urls>' 448 usage = '%prog [options] <one or more master urls>'
426 parser = optparse.OptionParser(usage=(usage + '\n\n' + prog_desc)) 449 parser = optparse.OptionParser(usage=(usage + '\n\n' + prog_desc))
427 parser.add_option('--build-db', default='build_db.json', 450 parser.add_option('--build-db', default='build_db.json',
428 help='records the last-seen build for each builder') 451 help='records the last-seen build for each builder')
429 parser.add_option('--clear-build-db', action='store_true', 452 parser.add_option('--clear-build-db', action='store_true',
430 help='reset build_db to be empty') 453 help='reset build_db to be empty')
431 parser.add_option('--sync-build-db', action='store_true', 454 parser.add_option('--sync-build-db', action='store_true',
432 help='don\'t process any builds, but update build_db ' 455 help='don\'t process any builds, but update build_db '
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
519 if options.verify: 542 if options.verify:
520 return 0 543 return 0
521 544
522 if options.flatten_json: 545 if options.flatten_json:
523 if not options.no_hashes: 546 if not options.no_hashes:
524 gatekeeper_config = gatekeeper_ng_config.inject_hashes(gatekeeper_config) 547 gatekeeper_config = gatekeeper_ng_config.inject_hashes(gatekeeper_config)
525 gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout) 548 gatekeeper_ng_config.flatten_to_json(gatekeeper_config, sys.stdout)
526 print 549 print
527 return 0 550 return 0
528 551
552 if options.set_status:
553 options.password = get_pwd(options.password_file)
554
529 masters = set(args) 555 masters = set(args)
530 if not masters <= set(gatekeeper_config): 556 if not masters <= set(gatekeeper_config):
531 print 'The following masters are not present in the gatekeeper config:' 557 print 'The following masters are not present in the gatekeeper config:'
532 for m in masters - set(gatekeeper_config): 558 for m in masters - set(gatekeeper_config):
533 print ' ' + m 559 print ' ' + m
534 return 1 560 return 1
535 561
536 if options.clear_build_db: 562 if options.clear_build_db:
537 build_db = {} 563 build_db = {}
538 save_build_db(build_db, options.build_db) 564 gatekeeper_ng_db.save_build_db(build_db, gatekeeper_config,
565 options.build_db)
539 else: 566 else:
540 build_db = get_build_db(options.build_db) 567 build_db = gatekeeper_ng_db.get_build_db(options.build_db)
541 568
542 new_builds, master_jsons = find_new_builds_per_master(masters, build_db) 569 new_builds, master_jsons = find_new_builds_per_master(masters, build_db)
570 build_jsons = get_build_jsons(new_builds, options.parallelism)
571 propagate_build_json_to_db(build_db, build_jsons)
572
543 if options.sync_build_db: 573 if options.sync_build_db:
544 save_build_db(build_db, options.build_db) 574 gatekeeper_ng_db.save_build_db(build_db, gatekeeper_config,
575 options.build_db)
545 return 0 576 return 0
546 build_jsons = get_build_jsons(new_builds, build_db, options.parallelism) 577
547 failed_builds = check_builds(build_jsons, master_jsons, build_db, 578 failed_builds = check_builds(build_jsons, master_jsons, gatekeeper_config)
548 gatekeeper_config) 579 failed_builds = debounce_failures(failed_builds, build_db)
549 if options.set_status:
550 options.password = get_pwd(options.password_file)
551 580
552 close_tree_if_failure(failed_builds, options.status_user, options.password, 581 close_tree_if_failure(failed_builds, options.status_user, options.password,
553 options.status_url, options.set_status, 582 options.status_url, options.set_status,
554 options.sheriff_url, options.default_from_email, 583 options.sheriff_url, options.default_from_email,
555 options.email_app_url, options.email_app_secret, 584 options.email_app_url, options.email_app_secret,
556 options.email_domain, options.filter_domain, 585 options.email_domain, options.filter_domain,
557 options.disable_domain_filter) 586 options.disable_domain_filter)
558 587
559 if not options.skip_build_db_update: 588 if not options.skip_build_db_update:
560 save_build_db(build_db, options.build_db) 589 gatekeeper_ng_db.save_build_db(build_db, gatekeeper_config,
590 options.build_db)
561 591
562 return 0 592 return 0
563 593
564 594
565 if __name__ == '__main__': 595 if __name__ == '__main__':
566 sys.exit(main()) 596 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | scripts/slave/gatekeeper_ng_config.py » ('j') | scripts/slave/gatekeeper_ng_db.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698