| OLD | NEW |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 import argparse | 4 import argparse |
| 5 import json | 5 import json |
| 6 import logging | 6 import logging |
| 7 import multiprocessing | 7 import multiprocessing |
| 8 import os | 8 import os |
| 9 import re | 9 import re |
| 10 import sys | 10 import sys |
| (...skipping 355 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 366 return alerts_for_builder(cache, master_url, builder_name, | 366 return alerts_for_builder(cache, master_url, builder_name, |
| 367 recent_build_ids, old_alerts) | 367 recent_build_ids, old_alerts) |
| 368 except: | 368 except: |
| 369 # Put all exception text into an exception and raise that so it doesn't | 369 # Put all exception text into an exception and raise that so it doesn't |
| 370 # get eaten by the multiprocessing code. | 370 # get eaten by the multiprocessing code. |
| 371 raise Exception(''.join(traceback.format_exception(*sys.exc_info()))) | 371 raise Exception(''.join(traceback.format_exception(*sys.exc_info()))) |
| 372 finally: | 372 finally: |
| 373 logging.debug('Thread for builder %s has finished', builder_name) | 373 logging.debug('Thread for builder %s has finished', builder_name) |
| 374 | 374 |
| 375 pool = multiprocessing.dummy.Pool(processes=jobs) | 375 pool = multiprocessing.dummy.Pool(processes=jobs) |
| 376 logging.debug('Processing all threads via thread pool') | 376 logging.debug('Processing all builders via thread pool') |
| 377 builder_alerts = pool.map(process_builder, master_json['builders'].keys()) | 377 builder_alerts = pool.map(process_builder, master_json['builders'].keys()) |
| 378 logging.debug('Closing all threads in thread pool') | 378 logging.debug('Closing all threads in builder thread pool') |
| 379 pool.close() | 379 pool.close() |
| 380 pool.join() | 380 pool.join() |
| 381 logging.debug('Joined all threads in thread pool') | 381 logging.debug('Joined all threads in builder thread pool') |
| 382 | 382 |
| 383 alerts = [] | 383 alerts = [] |
| 384 for alert in builder_alerts: | 384 for alert in builder_alerts: |
| 385 if alert: | 385 if alert: |
| 386 alerts.extend(alert) | 386 alerts.extend(alert) |
| 387 | 387 |
| 388 logging.debug('Computing alerts_for_stale_master_data') |
| 388 stale_master_data_alert = alert_for_stale_master_data(master_url, master_json) | 389 stale_master_data_alert = alert_for_stale_master_data(master_url, master_json) |
| 390 logging.debug('Finished alerts_for_master') |
| 389 return (alerts, stale_master_data_alert) | 391 return (alerts, stale_master_data_alert) |
| 390 | 392 |
| 391 | 393 |
| 392 def main(args): # pragma: no cover | 394 def main(args): # pragma: no cover |
| 393 logging.basicConfig(level=logging.DEBUG) | 395 logging.basicConfig(level=logging.DEBUG) |
| 394 | 396 |
| 395 parser = argparse.ArgumentParser() | 397 parser = argparse.ArgumentParser() |
| 396 parser.add_argument('builder_url', action='store') | 398 parser.add_argument('builder_url', action='store') |
| 397 args = parser.parse_args(args) | 399 args = parser.parse_args(args) |
| 398 | 400 |
| 399 # https://build.chromium.org/p/chromium.win/builders/XP%20Tests%20(1) | 401 # https://build.chromium.org/p/chromium.win/builders/XP%20Tests%20(1) |
| 400 url_regexp = re.compile('(?P<master_url>.*)/builders/(?P<builder_name>.*)/?') | 402 url_regexp = re.compile('(?P<master_url>.*)/builders/(?P<builder_name>.*)/?') |
| 401 match = url_regexp.match(args.builder_url) | 403 match = url_regexp.match(args.builder_url) |
| 402 | 404 |
| 403 # FIXME: HACK | 405 # FIXME: HACK |
| 404 CACHE_PATH = 'build_cache' | 406 CACHE_PATH = 'build_cache' |
| 405 cache = buildbot.DiskCache(CACHE_PATH) | 407 cache = buildbot.DiskCache(CACHE_PATH) |
| 406 | 408 |
| 407 master_url = match.group('master_url') | 409 master_url = match.group('master_url') |
| 408 builder_name = urllib.unquote_plus(match.group('builder_name')) | 410 builder_name = urllib.unquote_plus(match.group('builder_name')) |
| 409 master_json = buildbot.fetch_master_json(master_url) | 411 master_json = buildbot.fetch_master_json(master_url) |
| 410 # This is kinda a hack, but uses more of our existing code this way: | 412 # This is kinda a hack, but uses more of our existing code this way: |
| 411 alerts = alerts_for_master(cache, master_url, master_json, builder_name) | 413 alerts = alerts_for_master(cache, master_url, master_json, builder_name) |
| 412 print json.dumps(alerts[0], indent=1) | 414 print json.dumps(alerts[0], indent=1) |
| 413 | 415 |
| 414 | 416 |
| 415 if __name__ == '__main__': | 417 if __name__ == '__main__': |
| 416 sys.exit(main(sys.argv[1:])) | 418 sys.exit(main(sys.argv[1:])) |
| OLD | NEW |