Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 3 # Use of this source code is governed by a BSD-style license that can be | 3 # Use of this source code is governed by a BSD-style license that can be |
| 4 # found in the LICENSE file. | 4 # found in the LICENSE file. |
| 5 | 5 |
| 6 """ Upload benchmark performance data results. """ | 6 """ Upload benchmark performance data results. """ |
| 7 | 7 |
| 8 from build_step import BuildStep | 8 from build_step import BuildStep |
| 9 from utils import sync_bucket_subdir | 9 from utils import sync_bucket_subdir |
| 10 | 10 |
| 11 import builder_name_schema | |
| 12 | |
| 11 import json | 13 import json |
| 12 import os | 14 import os |
| 13 import os.path | 15 import os.path |
| 14 import posixpath | 16 import posixpath |
| 15 import re | 17 import re |
| 16 import sys | 18 import sys |
| 17 from datetime import datetime | 19 from datetime import datetime |
| 18 | 20 |
| 19 | 21 |
| 22 # Modified from Skia repo code, roughly line 108 down of | |
| 23 # github.com/google/skia/blob/795905562d1bd8bbedcf47f6a00efb220ec8bbe0/bench/che ck_bench_regressions.py | |
|
benchen
2014/05/30 00:57:23
just bench/check_bench_regressions.py will be suff
kelvinly
2014/05/30 02:15:31
Done.
| |
| 24 def ReadExpectations(filename): | |
| 25 """Reads expectations data from file. | |
| 26 | |
| 27 It returns a dictionary containing tuples of the lower, upper, and expected | |
| 28 bounds, using the testname and configuration concatenated together as the | |
| 29 key.""" | |
| 30 expectations = {} | |
| 31 unstripped_keys = [] | |
| 32 for expectation in open(filename).readlines(): | |
| 33 elements = expectation.strip().split(',') | |
| 34 if not elements[0] or elements[0].startswith('#'): | |
| 35 continue | |
| 36 if len(elements) != 5: | |
| 37 raise Exception("Invalid expectation line format: %s" % | |
| 38 expectation) | |
| 39 bench_entry = elements[0] + ',' + elements[1] | |
| 40 if bench_entry in unstripped_keys: | |
| 41 raise Exception("Dup entries for bench expectation %s" % | |
| 42 bench_entry) | |
| 43 unstripped_keys.append(bench_entry) # Using this in case multiple lines | |
| 44 # share everything except for the | |
| 45 # algorithm and/or platform | |
| 46 entry = elements[0] | |
| 47 # [<Bench_BmpConfig_TimeType>] -> (LB, UB, EXPECTED) | |
| 48 expectations[entry] = (float(elements[-2]), | |
| 49 float(elements[-1]), | |
| 50 float(elements[-3])) | |
| 51 return expectations | |
| 52 | |
| 53 | |
| 20 class UploadBenchResults(BuildStep): | 54 class UploadBenchResults(BuildStep): |
| 21 | 55 |
| 22 def __init__(self, attempts=5, **kwargs): | 56 def __init__(self, attempts=5, **kwargs): |
| 23 super(UploadBenchResults, self).__init__(attempts=attempts, **kwargs) | 57 super(UploadBenchResults, self).__init__(attempts=attempts, **kwargs) |
| 24 | 58 |
| 25 def _GetPerfDataDir(self): | 59 def _GetPerfDataDir(self): |
| 26 return self._perf_data_dir | 60 return self._perf_data_dir |
| 27 | 61 |
| 28 def _GetBucketSubdir(self): | 62 def _GetBucketSubdir(self): |
| 29 subdirs = ['perfdata', self._builder_name] | 63 subdirs = ['perfdata', self._builder_name] |
| 30 if self._is_try: | 64 if self._is_try: |
| 31 # Trybots need to isolate their results by build number. | 65 # Trybots need to isolate their results by build number. |
| 32 subdirs.append(self._build_number) | 66 subdirs.append(self._build_number) |
| 33 return posixpath.join(*subdirs) | 67 return posixpath.join(*subdirs) |
| 34 | 68 |
| 35 def _RunInternal(self): | 69 def _RunInternal(self): |
| 36 dest_gsbase = (self._args.get('dest_gsbase') or | 70 dest_gsbase = (self._args.get('dest_gsbase') or |
| 37 sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) | 71 sync_bucket_subdir.DEFAULT_PERFDATA_GS_BASE) |
| 38 | 72 |
| 39 # Upload the normal bench logs | 73 # Upload the normal bench logs |
| 40 sync_bucket_subdir.SyncBucketSubdir( | 74 sync_bucket_subdir.SyncBucketSubdir( |
| 41 directory=self._GetPerfDataDir(), | 75 directory=self._GetPerfDataDir(), |
| 42 dest_gsbase=dest_gsbase, | 76 dest_gsbase=dest_gsbase, |
| 43 subdir=self._GetBucketSubdir(), | 77 subdir=self._GetBucketSubdir(), |
| 44 do_upload=True, | 78 do_upload=True, |
| 79 exclude_json=True, | |
| 45 do_download=False) | 80 do_download=False) |
| 46 | 81 |
| 47 # Find and open JSON file to add in additional fields, then upload. | |
| 48 json_file_name = None | |
| 49 file_list = os.listdir(self._GetPerfDataDir()) | 82 file_list = os.listdir(self._GetPerfDataDir()) |
| 83 expectations = {} | |
| 84 | |
| 85 path_to_bench_expectations = os.path.join( | |
| 86 'expectations', | |
| 87 'bench', | |
| 88 'bench_expectations_%s.txt' % builder_name_schema.GetWaterfallBot( | |
| 89 self._builder_name)) | |
| 90 try: | |
| 91 expectations = ReadExpectations(path_to_bench_expectations) | |
| 92 except IOError: | |
| 93 print "Unable to open expectations file" | |
| 94 | |
| 95 re_file_extract = re.compile(r'(^.*)\.skp.*$') | |
| 96 json_total = {} | |
| 97 pictures_timestamp = '' | |
| 98 for file_name in file_list: | |
| 99 # Find bench picture files, splice in expectation data | |
| 100 if not re.search('^bench_{}_data_skp_(.*)_([0-9]*)\.json$'.format( | |
| 101 self._got_revision), file_name): | |
| 102 continue | |
| 103 json_pictures_data = {} | |
| 104 if not pictures_timestamp: | |
| 105 pictures_timestamp = file_name.split('_')[-1].split('.json')[0] | |
| 106 full_file_name = os.path.join(self._GetPerfDataDir(), file_name) | |
| 107 with open(full_file_name) as json_pictures: | |
| 108 print 'Loading file {}'.format(file_name) | |
| 109 json_pictures_data = json.load(json_pictures) | |
| 110 | |
| 111 if json_total: | |
| 112 json_total['benches'].extend(json_pictures_data['benches']) | |
| 113 else: | |
| 114 json_total = json_pictures_data | |
| 115 | |
| 116 # Now add expectations to all keys | |
| 117 for bench in json_total['benches']: | |
| 118 for tileSet in bench['tileSets']: | |
| 119 search_for_name = re_file_extract.search(bench['name']) | |
| 120 if not search_for_name: | |
| 121 print 'Invalid bench name: {}'.format(bench['name']) | |
| 122 continue | |
| 123 key = '_'.join([ | |
| 124 search_for_name.group(1)+'.skp', | |
| 125 tileSet['name'], | |
| 126 '']) | |
| 127 if key in expectations.keys(): | |
| 128 (lower, upper, expected) = expectations[key] | |
| 129 tileSet['lower'] = lower | |
| 130 tileSet['upper'] = upper | |
| 131 tileSet['expected'] = expected | |
| 132 else: | |
| 133 print "Unable to find key: {}".format(key) | |
| 134 | |
| 135 json_total['commitHash'] = self._got_revision | |
| 136 json_total['machine'] = self._builder_name | |
| 137 | |
| 138 json_write_name = 'skpbench_{}_{}.json'.format(self._got_revision, | |
| 139 pictures_timestamp) | |
| 140 full_json_write_name = os.path.join(self._GetPerfDataDir(), json_write_name) | |
| 141 with open(full_json_write_name, 'w') as json_picture_write: | |
| 142 json.dump(json_total, json_picture_write) | |
| 143 | |
| 144 now = datetime.utcnow() | |
| 145 gs_json_path = '/'.join((str(now.year).zfill(4), str(now.month).zfill(2), | |
| 146 str(now.day).zfill(2), str(now.hour).zfill(2))) | |
| 147 gs_dir = 'pics-json/{}/{}'.format(gs_json_path, self._builder_name) | |
| 148 sync_bucket_subdir.SyncBucketSubdir( | |
| 149 directory=self._GetPerfDataDir(), | |
| 150 dest_gsbase=dest_gsbase, | |
| 151 subdir=gs_dir, | |
| 152 # TODO(kelvinly): Set up some way to configure this, | |
| 153 # rather than hard coding it | |
| 154 do_upload=True, | |
| 155 do_download=False, | |
| 156 exclude_json=False, | |
| 157 filenames_filter= | |
| 158 'skpbench_({})_[0-9]+\.json'.format(self._got_revision)) | |
| 159 | |
| 160 # Find and open the bench JSON file to add in additional fields, then upload . | |
| 161 microbench_json_file = None | |
| 162 | |
| 50 for file_name in file_list: | 163 for file_name in file_list: |
| 51 if re.search('microbench_({})_[0-9]+\.json'.format(self._got_revision), | 164 if re.search('microbench_({})_[0-9]+\.json'.format(self._got_revision), |
| 52 file_name): | 165 file_name): |
| 53 json_file_name = os.path.join(self._GetPerfDataDir(), file_name) | 166 microbench_json_file = os.path.join(self._GetPerfDataDir(), file_name) |
| 54 break | 167 break |
| 55 | 168 |
| 56 if json_file_name: | 169 if microbench_json_file: |
| 57 json_data = {} | 170 json_data = {} |
| 58 | 171 |
| 59 with open(json_file_name) as json_file: | 172 with open(microbench_json_file) as json_file: |
| 60 json_data = json.load(json_file) | 173 json_data = json.load(json_file) |
| 61 | 174 |
| 62 json_data['machine'] = self._builder_name | 175 json_data['machine'] = self._builder_name |
| 63 json_data['commitHash'] = self._got_revision | 176 json_data['commitHash'] = self._got_revision |
| 64 | 177 |
| 65 with open(json_file_name, 'w') as json_file: | 178 with open(microbench_json_file, 'w') as json_file: |
| 66 json.dump(json_data, json_file) | 179 json.dump(json_data, json_file) |
| 67 | 180 |
| 68 now = datetime.utcnow() | |
| 69 gs_json_path = '/'.join((str(now.year).zfill(4), str(now.month).zfill(2), | 181 gs_json_path = '/'.join((str(now.year).zfill(4), str(now.month).zfill(2), |
| 70 str(now.day).zfill(2), str(now.hour).zfill(2))) | 182 str(now.day).zfill(2), str(now.hour).zfill(2))) |
| 71 gs_dir = 'stats-json/{}/{}'.format(gs_json_path, self._builder_name) | 183 gs_dir = 'stats-json/{}/{}'.format(gs_json_path, self._builder_name) |
| 72 sync_bucket_subdir.SyncBucketSubdir( | 184 sync_bucket_subdir.SyncBucketSubdir( |
| 73 directory=self._GetPerfDataDir(), | 185 directory=self._GetPerfDataDir(), |
| 74 dest_gsbase=dest_gsbase, | 186 dest_gsbase=dest_gsbase, |
| 75 subdir=gs_dir, | 187 subdir=gs_dir, |
| 76 # TODO(kelvinly): Set up some way to configure this, | 188 # TODO(kelvinly): Set up some way to configure this, |
| 77 # rather than hard coding it | 189 # rather than hard coding it |
| 78 do_upload=True, | 190 do_upload=True, |
| 79 do_download=False, | 191 do_download=False, |
| 192 exclude_json=False, | |
| 80 filenames_filter= | 193 filenames_filter= |
| 81 'microbench_({})_[0-9]+\.json'.format(self._got_revision)) | 194 'microbench_({})_[0-9]+\.json'.format(self._got_revision)) |
| 82 | 195 |
| 83 def _Run(self): | 196 def _Run(self): |
| 84 self._RunInternal() | 197 self._RunInternal() |
| 85 | 198 |
| 86 | 199 |
| 87 if '__main__' == __name__: | 200 if '__main__' == __name__: |
| 88 sys.exit(BuildStep.RunBuildStep(UploadBenchResults)) | 201 sys.exit(BuildStep.RunBuildStep(UploadBenchResults)) |
| OLD | NEW |