| Index: scripts/slave/runtest.py
|
| diff --git a/scripts/slave/runtest.py b/scripts/slave/runtest.py
|
| index deb791da7b88610b43fa1aeb5d9be5d88e3a3d6a..e154907966eb825c70d91d89fc01e7a29bc553e0 100755
|
| --- a/scripts/slave/runtest.py
|
| +++ b/scripts/slave/runtest.py
|
| @@ -42,6 +42,7 @@ import config
|
| from slave import crash_utils
|
| from slave import gtest_slave_utils
|
| from slave import process_log_utils
|
| +from slave import results_dashboard
|
| from slave import slave_utils
|
| from slave import xvfb
|
| from slave.gtest.json_results_generator import GetSvnRevision
|
| @@ -346,6 +347,14 @@ def create_results_tracker(tracker_class, options):
|
| return tracker_obj
|
|
|
|
|
| +def send_results_to_dashboard(
|
| + results_tracker, master, system, test, url, stdio_url):
|
| + for logname, log in results_tracker.PerformanceLogs().iteritems():
|
| + lines = [str(l).rstrip() for l in log]
|
| + results_dashboard.SendResults(
|
| + logname, lines, master, system, test, url, stdio_url)
|
| +
|
| +
|
| def annotate(test_name, result, results_tracker, full_name=False,
|
| perf_dashboard_id=None):
|
| """Given a test result and tracker, update the waterfall with test results."""
|
| @@ -591,6 +600,12 @@ def main_mac(options, args):
|
| perf_dashboard_id=options.factory_properties.get(
|
| 'test_name'))
|
|
|
| + if options.results_url:
|
| + send_results_to_dashboard(
|
| + results_tracker, options.factory_properties.get('master'),
|
| + options.factory_properties.get('perf_id'), options.test_type,
|
| + options.results_url, options.factory_properties.get('stdio_url', None))
|
| +
|
| return result
|
|
|
|
|
| @@ -837,6 +852,12 @@ def main_linux(options, args):
|
| perf_dashboard_id=options.factory_properties.get(
|
| 'test_name'))
|
|
|
| + if options.results_url:
|
| + send_results_to_dashboard(
|
| + results_tracker, options.factory_properties.get('master_name'),
|
| + options.factory_properties.get('perf_id'), options.test_type,
|
| + options.results_url, options.factory_properties.get('stdio_url', None))
|
| +
|
| return result
|
|
|
|
|
| @@ -911,6 +932,12 @@ def main_win(options, args):
|
| perf_dashboard_id=options.factory_properties.get(
|
| 'test_name'))
|
|
|
| + if options.results_url:
|
| + send_results_to_dashboard(
|
| + results_tracker, options.factory_properties.get('master'),
|
| + options.factory_properties.get('perf_id'), options.test_type,
|
| + options.results_url, options.factory_properties.get('stdio_url', None))
|
| +
|
| return result
|
|
|
|
|
| @@ -1020,6 +1047,9 @@ def main():
|
| help='Sets the return value of the simulated '
|
| 'executable under test. Only has meaning when '
|
| '--parse-input is used.')
|
| + option_parser.add_option('', '--results-url', default='',
|
| + help='The URI of the perf dashboard to upload '
|
| + 'results to.')
|
| chromium_utils.AddPropertiesOptions(option_parser)
|
| options, args = option_parser.parse_args()
|
|
|
|
|