Index: testing/scripts/run_gpu_integration_test_as_googletest.py |
diff --git a/testing/scripts/run_telemetry_benchmark_as_googletest.py b/testing/scripts/run_gpu_integration_test_as_googletest.py |
similarity index 72% |
copy from testing/scripts/run_telemetry_benchmark_as_googletest.py |
copy to testing/scripts/run_gpu_integration_test_as_googletest.py |
index 387c5c8da95993f984f20522253b471eca93a58d..3174c6138528d70fae115e50a60663b7ea6f7e0d 100755 |
--- a/testing/scripts/run_telemetry_benchmark_as_googletest.py |
+++ b/testing/scripts/run_gpu_integration_test_as_googletest.py |
@@ -3,7 +3,7 @@ |
# Use of this source code is governed by a BSD-style license that can be |
# found in the LICENSE file. |
-"""Runs an isolate bundled Telemetry benchmark. |
+"""Runs an isolate bundled Telemetry GPU integration test. |
This script attempts to emulate the contract of gtest-style tests |
invoked via recipes. The main contract is that the caller passes the |
@@ -42,7 +42,7 @@ CHROME_SANDBOX_PATH = '/opt/chromium/chrome_sandbox' |
def main(): |
parser = argparse.ArgumentParser() |
parser.add_argument( |
- '--isolated-script-test-output', type=argparse.FileType('w'), |
+ '--isolated-script-test-output', type=str, |
required=True) |
parser.add_argument('--xvfb', help='Start xvfb.', action='store_true') |
args, rest_args = parser.parse_known_args() |
@@ -58,37 +58,41 @@ def main(): |
xvfb_proc, openbox_proc, xcompmgr_proc = xvfb.start_xvfb(env=env, |
build_dir='.') |
assert xvfb_proc and openbox_proc and xcompmgr_proc, 'Failed to start xvfb' |
+ # Compatibility with gtest-based sharding. |
+ total_shards = None |
+ shard_index = None |
+ if 'GTEST_TOTAL_SHARDS' in env: |
+ total_shards = int(env['GTEST_TOTAL_SHARDS']) |
+ del env['GTEST_TOTAL_SHARDS'] |
+ if 'GTEST_SHARD_INDEX' in env: |
+ shard_index = int(env['GTEST_SHARD_INDEX']) |
+ del env['GTEST_SHARD_INDEX'] |
+ sharding_args = [] |
+ if total_shards is not None and shard_index is not None: |
+ sharding_args = [ |
+ '--total-shards=%d' % total_shards, |
+ '--shard-index=%d' % shard_index |
+ ] |
try: |
- tempfile_dir = tempfile.mkdtemp('telemetry') |
valid = True |
- failures = [] |
+ rc = 0 |
try: |
- rc = common.run_command([sys.executable] + rest_args + [ |
- '--output-dir', tempfile_dir, |
- '--output-format=json' |
+ rc = common.run_command([sys.executable] + rest_args + sharding_args + [ |
+ '--write-abbreviated-json-results-to', args.isolated_script_test_output, |
], env=env) |
- tempfile_name = os.path.join(tempfile_dir, 'results.json') |
- with open(tempfile_name) as f: |
- results = json.load(f) |
- for value in results['per_page_values']: |
- if value['type'] == 'failure': |
- failures.append(results['pages'][str(value['page_id'])]['name']) |
- valid = bool(rc == 0 or failures) |
+ valid = bool(rc == 0) |
except Exception: |
traceback.print_exc() |
valid = False |
- finally: |
- shutil.rmtree(tempfile_dir) |
- if not valid and not failures: |
+ if not valid: |
failures = ['(entire test suite)'] |
- if rc == 0: |
- rc = 1 # Signal an abnormal exit. |
+ with open(args.isolated_script_test_output, 'w') as fp: |
+ json.dump({ |
+ 'valid': valid, |
+ 'failures': failures, |
+ }, fp) |
- json.dump({ |
- 'valid': valid, |
- 'failures': failures, |
- }, args.isolated_script_test_output) |
return rc |
finally: |