Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(85)

Side by Side Diff: tools/perf/generate_perf_json.py

Issue 2713553008: Add PRESUBMIT to make sure that perf JSON configs are always up-to-date (Closed)
Patch Set: Address Randy's nits Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « tools/perf/PRESUBMIT.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2016 The Chromium Authors. All rights reserved. 2 # Copyright 2016 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Script to generate chromium.perf.json and chromium.perf.fyi.json in 6 """Script to generate chromium.perf.json and chromium.perf.fyi.json in
7 the src/testing/buildbot directory. Maintaining these files by hand is 7 the src/testing/buildbot directory. Maintaining these files by hand is
8 too unwieldy. 8 too unwieldy.
9 """ 9 """
10 10 import argparse
11 import json 11 import json
12 import os 12 import os
13 import sys 13 import sys
14 14
15 from chrome_telemetry_build import chromium_config 15 from chrome_telemetry_build import chromium_config
16 16
17 sys.path.append(chromium_config.GetTelemetryDir()) 17 sys.path.append(chromium_config.GetTelemetryDir())
18 from telemetry import benchmark as benchmark_module 18 from telemetry import benchmark as benchmark_module
19 from telemetry.core import discover 19 from telemetry.core import discover
20 from telemetry.util import bot_utils 20 from telemetry.util import bot_utils
(...skipping 750 matching lines...) Expand 10 before | Expand all | Expand 10 after
771 if scripts: 771 if scripts:
772 tests[tester_name] = { 772 tests[tester_name] = {
773 'scripts': sorted(scripts, key=lambda x: x['name']) 773 'scripts': sorted(scripts, key=lambda x: x['name'])
774 } 774 }
775 775
776 for name, config in waterfall['builders'].iteritems(): 776 for name, config in waterfall['builders'].iteritems():
777 tests[name] = config 777 tests[name] = config
778 778
779 tests['AAAAA1 AUTOGENERATED FILE DO NOT EDIT'] = {} 779 tests['AAAAA1 AUTOGENERATED FILE DO NOT EDIT'] = {}
780 tests['AAAAA2 See //tools/perf/generate_perf_json.py to make changes'] = {} 780 tests['AAAAA2 See //tools/perf/generate_perf_json.py to make changes'] = {}
781 return tests
782
783
784 def get_json_config_file_for_waterfall(waterfall):
781 filename = '%s.json' % waterfall['name'] 785 filename = '%s.json' % waterfall['name']
786 buildbot_dir = os.path.join(src_dir(), 'testing', 'buildbot')
787 return os.path.join(buildbot_dir, filename)
782 788
783 buildbot_dir = os.path.join(src_dir(), 'testing', 'buildbot') 789
784 with open(os.path.join(buildbot_dir, filename), 'w') as fp: 790 def tests_are_up_to_date(waterfall):
791 tests = generate_all_tests(waterfall)
792 tests_data = json.dumps(tests, indent=2, separators=(',', ': '),
793 sort_keys=True)
794 config_file = get_json_config_file_for_waterfall(waterfall)
795 with open(config_file, 'r') as fp:
796 config_data = fp.read().strip()
797 return tests_data == config_data
798
799
800 def update_all_tests(waterfall):
801 tests = generate_all_tests(waterfall)
802 config_file = get_json_config_file_for_waterfall(waterfall)
803 with open(config_file, 'w') as fp:
785 json.dump(tests, fp, indent=2, separators=(',', ': '), sort_keys=True) 804 json.dump(tests, fp, indent=2, separators=(',', ': '), sort_keys=True)
786 fp.write('\n') 805 fp.write('\n')
787 806
807
788 def src_dir(): 808 def src_dir():
789 file_path = os.path.abspath(__file__) 809 file_path = os.path.abspath(__file__)
790 return os.path.dirname(os.path.dirname(os.path.dirname(file_path))) 810 return os.path.dirname(os.path.dirname(os.path.dirname(file_path)))
791 811
792 def main(): 812
813 def main(args):
814 parser = argparse.ArgumentParser(
815 description=('Generate perf test\' json config. This need to be done '
816 'anytime you add/remove any existing benchmarks in '
817 'tools/perf/benchmarks.'))
818 parser.add_argument(
819 '--validate-only', action='store_true', default=False,
820 help=('Validate whether the perf json generated will be the same as the '
821 'existing configs. This does not change the contain of existing '
822 'configs'))
823 options = parser.parse_args(args)
824
793 waterfall = get_waterfall_config() 825 waterfall = get_waterfall_config()
794 waterfall['name'] = 'chromium.perf' 826 waterfall['name'] = 'chromium.perf'
795 fyi_waterfall = get_fyi_waterfall_config() 827 fyi_waterfall = get_fyi_waterfall_config()
796 fyi_waterfall['name'] = 'chromium.perf.fyi' 828 fyi_waterfall['name'] = 'chromium.perf.fyi'
797 829
798 generate_all_tests(fyi_waterfall) 830 if options.validate_only:
799 generate_all_tests(waterfall) 831 if tests_are_up_to_date(fyi_waterfall) and tests_are_up_to_date(waterfall):
832 print 'All the perf JSON config files are up-to-date. \\o/'
833 return 0
834 else:
835 print ('The perf JSON config files are not up-to-date. Please run %s '
836 'without --validate-only flag to update the perf JSON '
837 'configs.') % sys.argv[0]
838 return 1
839 else:
840 update_all_tests(fyi_waterfall)
841 update_all_tests(waterfall)
800 return 0 842 return 0
801 843
802 if __name__ == '__main__': 844 if __name__ == '__main__':
803 sys.exit(main()) 845 sys.exit(main(sys.argv[1:]))
OLDNEW
« no previous file with comments | « tools/perf/PRESUBMIT.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698