Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(814)

Side by Side Diff: scripts/slave/build_scan.py

Issue 2235063003: Add logging to build_scan.py for url retries (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build.git@master
Patch Set: Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2014 The Chromium Authors. All rights reserved. 2 # Copyright 2014 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Scans a list of masters and saves information in a build_db.""" 6 """Scans a list of masters and saves information in a build_db."""
7 7
8 from contextlib import closing 8 from contextlib import closing
9 import json 9 import json
10 import logging 10 import logging
(...skipping 20 matching lines...) Expand all
31 attempts = 0 31 attempts = 0
32 while True: 32 while True:
33 try: 33 try:
34 with closing(urllib2.urlopen(url, timeout=URL_TIMEOUT)) as f: 34 with closing(urllib2.urlopen(url, timeout=URL_TIMEOUT)) as f:
35 return json.load(f) 35 return json.load(f)
36 except (urllib2.URLError, IOError) as f: 36 except (urllib2.URLError, IOError) as f:
37 if attempts > MAX_ATTEMPTS: 37 if attempts > MAX_ATTEMPTS:
38 raise 38 raise
39 39
40 attempts += 1 40 attempts += 1
41 time.sleep(2 ** attempts) 41 time_to_sleep = 2 ** attempts
42 logging.info(
43 "url fetch encountered %s, sleeping for %d seconds and retrying..." % (
44 f, time_to_sleep))
45
46 time.sleep(time_to_sleep)
42 47
43 48
44 def get_root_json(master_url): 49 def get_root_json(master_url):
45 """Pull down root JSON which contains builder and build info.""" 50 """Pull down root JSON which contains builder and build info."""
46 url = master_url + '/json' 51 url = master_url + '/json'
47 logging.info('opening %s' % url) 52 logging.info('opening %s' % url)
48 return _url_open_json(url) 53 return _url_open_json(url)
49 54
50 55
51 def find_new_builds(master_url, root_json, build_db): 56 def find_new_builds(master_url, root_json, build_db):
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 print '%s:%s:%s' % (master_url, builder, buildnum) 225 print '%s:%s:%s' % (master_url, builder, buildnum)
221 226
222 if not options.skip_build_db_update: 227 if not options.skip_build_db_update:
223 build_scan_db.save_build_db(build_db, {}, options.build_db) 228 build_scan_db.save_build_db(build_db, {}, options.build_db)
224 229
225 return 0 230 return 0
226 231
227 232
228 if __name__ == '__main__': 233 if __name__ == '__main__':
229 sys.exit(main()) 234 sys.exit(main())
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698