| OLD | NEW |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import datetime | 5 import datetime |
| 6 import json | 6 import json |
| 7 import re | 7 import re |
| 8 import string | 8 import string |
| 9 | 9 |
| 10 | 10 |
| (...skipping 1122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1133 try: | 1133 try: |
| 1134 if not 'charts' in results: | 1134 if not 'charts' in results: |
| 1135 step_result.presentation.logs['chartjson_info'] = \ | 1135 step_result.presentation.logs['chartjson_info'] = \ |
| 1136 ['Info: No chart json present'] | 1136 ['Info: No chart json present'] |
| 1137 return | 1137 return |
| 1138 | 1138 |
| 1139 if not results.get('enabled', True): | 1139 if not results.get('enabled', True): |
| 1140 step_result.presentation.logs['chartjson_info'] = \ | 1140 step_result.presentation.logs['chartjson_info'] = \ |
| 1141 ['Info: Benchmark disabled, not sending results to dashboard'] | 1141 ['Info: Benchmark disabled, not sending results to dashboard'] |
| 1142 return | 1142 return |
| 1143 import pdb; pdb.set_trace() |
| 1143 | 1144 |
| 1144 # TODO(eyaich): Remove logging once we debug uploading chartjson | 1145 # TODO(eyaich): Remove logging once we debug uploading chartjson |
| 1145 # to perf dashboard | 1146 # to perf dashboard |
| 1146 step_result.presentation.logs['chartjson_info'] = \ | 1147 step_result.presentation.logs['chartjson_info'] = \ |
| 1147 ['Info: Setting up arguments for perf dashboard'] | 1148 ['Info: Setting up arguments for perf dashboard'] |
| 1148 | 1149 |
| 1149 with api.tempfile.temp_dir('chartjson_results_output') as temp_output_dir: | 1150 filepath = api.raw_io.input(data=json.dumps(results)) |
| 1150 filepath = temp_output_dir.join('chartjson-results.json') | 1151 # Produces a step that uploads results to dashboard |
| 1151 api.file.write(name='write results to file', | 1152 args = [ |
| 1152 path=filepath, | 1153 '--chartjson-results-file', filepath, |
| 1153 data=json.dumps(results)) | 1154 '--perf-id', self._perf_id, |
| 1154 """Produces a step that uploads results to dashboard""" | 1155 '--results-url', self._results_url, |
| 1155 args = [ | 1156 '--name', self._perf_dashboard_id, |
| 1156 '--chartjson-results-file', filepath, | 1157 '--buildername', api.properties['buildername'], |
| 1157 '--perf-id', self._perf_id, | 1158 '--buildnumber', api.properties['buildnumber'], |
| 1158 '--results-url', self._results_url, | 1159 ] |
| 1159 '--name', self._perf_dashboard_id, | 1160 print args |
| 1160 '--buildername', api.properties['buildername'], | 1161 print self._name |
| 1161 '--buildnumber', api.properties['buildnumber'], | 1162 if api.chromium.c.build_dir: |
| 1162 ] | 1163 args.extend(['--build-dir', api.chromium.c.build_dir]) |
| 1163 if api.chromium.c.build_dir: | 1164 if 'got_revision_cp' in api.properties: |
| 1164 args.extend(['--build-dir', api.chromium.c.build_dir]) | 1165 args.extend(['--got-revision-cp', api.properties['got_revision_cp']]) |
| 1165 if 'got_revision_cp' in api.properties: | 1166 if 'version' in api.properties: |
| 1166 args.extend(['--got-revision-cp', api.properties['got_revision_cp']]) | 1167 args.extend(['--version', api.properties['version']]) |
| 1167 if 'version' in api.properties: | 1168 if 'git_revision' in api.properties: |
| 1168 args.extend(['--version', api.properties['version']]) | 1169 args.extend(['--git-revision', api.properties['git_revision']]) |
| 1169 if 'git_revision' in api.properties: | 1170 if 'got_webrtc_revision' in api.properties: |
| 1170 args.extend(['--git-revision', api.properties['git_revision']]) | 1171 args.extend(['--got-webrtc-revision', |
| 1171 if 'got_webrtc_revision' in api.properties: | 1172 api.properties['got_webrtc_revision']]) |
| 1172 args.extend(['--got-webrtc-revision', | 1173 if 'got_v8_revision' in api.properties: |
| 1173 api.properties['got_webrtc_revision']]) | 1174 args.extend(['--got-v8-revision', api.properties['got_v8_revision']]) |
| 1174 if 'got_v8_revision' in api.properties: | |
| 1175 args.extend(['--got-v8-revision', api.properties['got_v8_revision']]) | |
| 1176 | 1175 |
| 1177 api.python( | 1176 api.python( |
| 1178 'Upload Perf Dashboard Results', | 1177 'Upload Perf Dashboard Results', |
| 1179 api.chromium.package_repo_resource( | 1178 api.chromium.package_repo_resource( |
| 1180 'scripts', 'slave', 'upload_perf_dashboard_results.py'), | 1179 'scripts', 'slave', 'upload_perf_dashboard_results.py'), |
| 1181 args) | 1180 args) |
| 1182 | 1181 |
| 1183 except Exception as e: | 1182 except Exception as e: |
| 1184 step_result.presentation.logs['chartjson_info'].append( | 1183 step_result.presentation.logs['chartjson_info'].append( |
| 1185 'Error: Unable to upload chartjson results to perf dashboard') | 1184 'Error: Unable to upload chartjson results to perf dashboard') |
| 1186 step_result.presentation.logs['chartjson_info'].append('%r' % e) | 1185 step_result.presentation.logs['chartjson_info'].append('%r' % e) |
| 1186 raise |
| 1187 | 1187 |
| 1188 | 1188 |
| 1189 def generate_isolated_script(api, chromium_tests_api, mastername, buildername, | 1189 def generate_isolated_script(api, chromium_tests_api, mastername, buildername, |
| 1190 test_spec, bot_update_step, enable_swarming=False, | 1190 test_spec, bot_update_step, enable_swarming=False, |
| 1191 swarming_dimensions=None, | 1191 swarming_dimensions=None, |
| 1192 scripts_compile_targets=None): | 1192 scripts_compile_targets=None): |
| 1193 # Get the perf id and results url if present. | 1193 # Get the perf id and results url if present. |
| 1194 bot_config = (chromium_tests_api.builders.get(mastername, {}) | 1194 bot_config = (chromium_tests_api.builders.get(mastername, {}) |
| 1195 .get('builders', {}).get(buildername, {})) | 1195 .get('builders', {}).get(buildername, {})) |
| 1196 perf_id = bot_config.get('perf-id') | 1196 perf_id = bot_config.get('perf-id') |
| (...skipping 691 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1888 args=args) | 1888 args=args) |
| 1889 api.gsutil.upload( | 1889 api.gsutil.upload( |
| 1890 temp_output_dir.join( | 1890 temp_output_dir.join( |
| 1891 '%s-android-chrome.json' % timestamp_string), | 1891 '%s-android-chrome.json' % timestamp_string), |
| 1892 'chromium-annotated-tests', 'android') | 1892 'chromium-annotated-tests', 'android') |
| 1893 | 1893 |
| 1894 GOMA_TESTS = [ | 1894 GOMA_TESTS = [ |
| 1895 GTestTest('base_unittests'), | 1895 GTestTest('base_unittests'), |
| 1896 GTestTest('content_unittests'), | 1896 GTestTest('content_unittests'), |
| 1897 ] | 1897 ] |
| OLD | NEW |