| OLD | NEW |
| 1 #!/usr/bin/python | 1 #!/usr/bin/python |
| 2 # Copyright 2015 The Chromium Authors. All rights reserved. | 2 # Copyright 2015 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 '''Generate stats of CQ usage.''' | 5 '''Generate stats of CQ usage.''' |
| 6 | 6 |
| 7 import argparse | 7 import argparse |
| 8 import calendar | 8 import calendar |
| 9 import collections | 9 import collections |
| 10 import datetime | 10 import datetime |
| (...skipping 597 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 608 # Create map issue:patchset -> #attempts | 608 # Create map issue:patchset -> #attempts |
| 609 patches, issues = set(), set() | 609 patches, issues = set(), set() |
| 610 for reason in results: | 610 for reason in results: |
| 611 issues.add(reason['fields']['issue']) | 611 issues.add(reason['fields']['issue']) |
| 612 patches.add((reason['fields']['issue'], reason['fields']['patchset'])) | 612 patches.add((reason['fields']['issue'], reason['fields']['patchset'])) |
| 613 stats['issue-count'] = len(issues) | 613 stats['issue-count'] = len(issues) |
| 614 stats['patchset-count'] = len(patches) | 614 stats['patchset-count'] = len(patches) |
| 615 patch_stats = {} | 615 patch_stats = {} |
| 616 # Fetch and process each patchset log | 616 # Fetch and process each patchset log |
| 617 def get_patch_stats(patch_id): | 617 def get_patch_stats(patch_id): |
| 618 return derive_patch_stats(end_date, patch_id) | 618 return derive_patch_stats(begin_date, end_date, patch_id) |
| 619 | 619 |
| 620 if args.seq or not args.thread_pool: | 620 if args.seq or not args.thread_pool: |
| 621 iterable = map(get_patch_stats, patches) | 621 iterable = map(get_patch_stats, patches) |
| 622 else: | 622 else: |
| 623 pool = ThreadPool(min(args.thread_pool, len(patches))) | 623 pool = ThreadPool(min(args.thread_pool, len(patches))) |
| 624 iterable = pool.imap_unordered(get_patch_stats, patches) | 624 iterable = pool.imap_unordered(get_patch_stats, patches) |
| 625 for patch_id, pstats in iterable: | 625 for patch_id, pstats in iterable: |
| 626 if not pstats['supported']: | 626 if not pstats['supported']: |
| 627 continue | 627 continue |
| 628 patch_stats[patch_id] = pstats | 628 patch_stats[patch_id] = pstats |
| (...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 661 msg_lines = message.splitlines() | 661 msg_lines = message.splitlines() |
| 662 for line in msg_lines[1:]: | 662 for line in msg_lines[1:]: |
| 663 words = line.split(None, 1) | 663 words = line.split(None, 1) |
| 664 if not words: | 664 if not words: |
| 665 continue | 665 continue |
| 666 builder = words[0] | 666 builder = words[0] |
| 667 builders.append(builder) | 667 builders.append(builder) |
| 668 return builders | 668 return builders |
| 669 | 669 |
| 670 | 670 |
| 671 def derive_patch_stats(end_date, patch_id): | 671 def derive_patch_stats(begin_date, end_date, patch_id): |
| 672 """``patch_id`` is a tuple (issue, patchset).""" | 672 """``patch_id`` is a tuple (issue, patchset).""" |
| 673 results = fetch_cq_logs(end_date=end_date, filters=[ | 673 results = fetch_cq_logs(start_date=begin_date, end_date=end_date, filters=[ |
| 674 'issue=%s' % patch_id[0], 'patchset=%s' % patch_id[1]]) | 674 'issue=%s' % patch_id[0], 'patchset=%s' % patch_id[1]]) |
| 675 # The results should already ordered, but sort it again just to be sure. | 675 # The results should already ordered, but sort it again just to be sure. |
| 676 results = sorted(results, key=lambda r: r['timestamp'], reverse=True) | 676 results = sorted(results, key=lambda r: r['timestamp'], reverse=True) |
| 677 logging.debug('derive_patch_stats(%r): fetched %d entries.', | 677 logging.debug('derive_patch_stats(%r): fetched %d entries.', |
| 678 patch_id, len(results)) | 678 patch_id, len(results)) |
| 679 # Group by attempts | 679 # Group by attempts |
| 680 attempts = [] | 680 attempts = [] |
| 681 | 681 |
| 682 def new_attempt(): | 682 def new_attempt(): |
| 683 attempt_empty = { | 683 attempt_empty = { |
| (...skipping 510 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1194 logger = logging.getLogger() | 1194 logger = logging.getLogger() |
| 1195 # TODO(sergeyberezin): how do I derive local timezone string? | 1195 # TODO(sergeyberezin): how do I derive local timezone string? |
| 1196 # Need to be able to pass dateutil.tz.tzlocal() directly. | 1196 # Need to be able to pass dateutil.tz.tzlocal() directly. |
| 1197 infra_libs.logs.process_argparse_options(args, logger) | 1197 infra_libs.logs.process_argparse_options(args, logger) |
| 1198 stats = acquire_stats(args) | 1198 stats = acquire_stats(args) |
| 1199 print_stats(args, stats) | 1199 print_stats(args, stats) |
| 1200 | 1200 |
| 1201 | 1201 |
| 1202 if __name__ == '__main__': | 1202 if __name__ == '__main__': |
| 1203 sys.exit(main()) | 1203 sys.exit(main()) |
| OLD | NEW |