Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 | |
| 6 DEPS = [ | |
| 7 'archive', | |
| 8 'bot_update', | |
| 9 'chromium', | |
| 10 'file', | |
| 11 'gclient', | |
| 12 'gsutil', | |
| 13 'isolate', | |
| 14 'path', | |
| 15 'platform', | |
| 16 'properties', | |
| 17 'python', | |
| 18 'step', | |
| 19 'time', | |
| 20 'swarming', | |
| 21 'swarming_client', | |
| 22 'zip', | |
| 23 ] | |
| 24 | |
| 25 | |
| 26 CT_BUCKET = 'cluster-telemetry' | |
| 27 CT_PAGE_TYPE = '1k' | |
| 28 CT_BINARY = 'run_chromium_perf_swarming' | |
| 29 CT_ISOLATE_TEMPLATE = 'ct_top1k.isolate.tmpl' | |
| 30 | |
| 31 # Number of slaves to shard CT runs to. | |
| 32 # TODO(rmistry): Change the below to 100 when ready to run the full top 1k. | |
| 33 CT_NUM_SLAVES = 2 | |
| 34 | |
| 35 | |
| 36 def _DownloadAndExtractBinary(api): | |
| 37 """Downloads the binary from the revision passed to the recipe.""" | |
| 38 build_archive_url = api.properties['parent_build_archive_url'] | |
| 39 api.archive.download_and_unzip_build( | |
| 40 step_name='Download and Extract Binary', | |
| 41 target='Release', | |
| 42 build_url=None, # This is a required parameter, but has no effect. | |
| 43 build_archive_url=build_archive_url) | |
| 44 | |
| 45 | |
| 46 # TODO(rmistry): What priority can I give the below tasks?? | |
| 47 def RunSteps(api): | |
| 48 # Figure out which benchmark to use. | |
| 49 buildername = api.properties['buildername'] | |
| 50 if 'Repaint' in buildername: | |
| 51 benchmark = 'repaint' | |
| 52 elif 'RR' in buildername: | |
| 53 benchmark = 'rasterize_and_record_micro' | |
| 54 else: | |
| 55 raise Exception('Do not recognise the buildername %s.' % buildername) | |
| 56 | |
| 57 # Checkout chromium and swarming. | |
| 58 api.chromium.set_config('chromium') | |
| 59 api.gclient.set_config('chromium') | |
| 60 api.bot_update.ensure_checkout(force=True) | |
| 61 api.swarming_client.checkout() | |
| 62 | |
| 63 # Download the prebuilt chromium binary. | |
| 64 _DownloadAndExtractBinary(api) | |
| 65 | |
| 66 # Path to the chromium src directory. | |
| 67 chromium_src_dir = api.path['checkout'] | |
| 68 # Path to where artifacts should be downloaded from Google Storage. | |
| 69 downloads_dir = chromium_src_dir.join('content', 'test', 'ct') | |
| 70 # Path where swarming artifacts (isolate file, json output) will be stored. | |
| 71 swarming_temp_dir = api.path['tmp_base'].join('swarming_temp_dir') | |
| 72 api.file.makedirs('makedirs swarming_temp_dir', swarming_temp_dir) | |
| 73 | |
| 74 # Download Cluster Telemetry binary. | |
| 75 ct_binary_path = downloads_dir.join(CT_BINARY) | |
| 76 api.gsutil.download( | |
| 77 bucket=CT_BUCKET, | |
| 78 source='swarming/binaries/%s' % CT_BINARY, | |
| 79 dest=ct_binary_path) | |
| 80 | |
| 81 # Record how long the step took in swarming tasks. | |
| 82 swarming_start_time = api.time.time() | |
| 83 | |
| 84 for slave_num in range(1, CT_NUM_SLAVES + 1): | |
| 85 slave_dir = downloads_dir.join('slave%s' % slave_num) | |
| 86 api.file.makedirs('makedirs slave_dir', slave_dir) | |
| 87 | |
| 88 # Download page sets. | |
| 89 page_sets_dir = slave_dir.join('page_sets') | |
| 90 api.file.makedirs('makedirs page_sets', page_sets_dir) | |
| 91 api.gsutil.download( | |
| 92 bucket=CT_BUCKET, | |
| 93 source='swarming/page_sets/%s/slave%s/*' % (CT_PAGE_TYPE, slave_num), | |
| 94 dest=page_sets_dir) | |
| 95 | |
| 96 # Download archives. | |
| 97 wpr_dir = page_sets_dir.join('data') | |
| 98 api.file.makedirs('makedirs wpr', wpr_dir) | |
| 99 api.gsutil.download( | |
| 100 bucket=CT_BUCKET, | |
| 101 source='swarming/webpage_archives/%s/slave%s/*' % (CT_PAGE_TYPE, | |
| 102 slave_num), | |
| 103 dest=wpr_dir) | |
| 104 | |
| 105 # TODO(rmistry): Remove the entire below section after crrev.com/1410353007 | |
| 106 # is submitted. | |
| 107 api.file.copy( | |
| 108 'copy %s' % CT_ISOLATE_TEMPLATE, | |
| 109 '/repos/chromium/src/chrome/%s' % CT_ISOLATE_TEMPLATE, | |
| 110 chromium_src_dir.join('chrome', CT_ISOLATE_TEMPLATE)) | |
| 111 for f in ['run_ct_top1k.py', 'path_util.py']: | |
| 112 api.file.copy( | |
| 113 'copy %s' % f, | |
| 114 '/repos/chromium/src/content/test/ct/%s' % f, | |
| 115 chromium_src_dir.join('content', 'test', 'ct', f)) | |
| 116 | |
| 117 # Create this slave's isolate file from the CT_ISOLATE_TEMPLATE. | |
| 118 isolate_dir = chromium_src_dir.join('chrome') | |
| 119 isolate_template_path = isolate_dir.join(CT_ISOLATE_TEMPLATE) | |
| 120 generated_isolate_path = isolate_dir.join('ct_top1k.isolate') | |
| 121 with open(str(generated_isolate_path), 'wb') as fout: | |
| 122 with open(str(isolate_template_path), 'rb') as fin: | |
| 123 for line in fin: | |
| 124 fout.write(line.replace('[[SLAVE_NUM]]', str(slave_num)) | |
| 125 .replace('[[MASTER]]', api.properties['mastername']) | |
| 126 .replace('[[BUILDER]]', api.properties['buildername']) | |
| 127 .replace('[[GIT_HASH]]', | |
| 128 api.properties['git_revision']) | |
| 129 .replace('[[BENCHMARK]]', benchmark)) | |
| 130 | |
| 131 # Archive everything on the isolate server. | |
|
M-A Ruel
2015/11/09 16:51:21
Actually, you *really* want to use batcharchive an
rmistry
2015/11/09 19:37:24
Done. PTAL.
| |
| 132 isolated_path = swarming_temp_dir.join('ct-1k-task-%s.isolated' % slave_num) | |
| 133 isolate_args = [ | |
| 134 'archive', | |
| 135 '--isolate', generated_isolate_path, | |
| 136 '--isolated', isolated_path, | |
| 137 '--config-variable', 'OS', 'linux', | |
| 138 '--isolate-server', api.isolate.isolate_server, | |
| 139 # TODO(rmistry): Why do I need PRODUCT_DIR ? fails without it. It also | |
| 140 # requires bitmaptools in PRODUCT_DIR. | |
| 141 '--path-variable', 'PRODUCT_DIR', api.path['tmp_base'], | |
| 142 ] | |
| 143 api.python( | |
| 144 'archiving isolate for slave%s' % slave_num, | |
| 145 api.swarming_client.path.join('isolate.py'), | |
| 146 isolate_args) | |
| 147 | |
| 148 # Trigger swarming task. | |
| 149 task_name = 'ct-1k-task-%s' % slave_num | |
| 150 json_output = swarming_temp_dir.join('ct-1k-task-%s.json' % slave_num) | |
| 151 swarming_trigger_args = [ | |
| 152 'trigger', | |
| 153 '--task-name', task_name, | |
| 154 isolated_path, | |
| 155 '--swarming', api.swarming.swarming_server, | |
| 156 '--dimension', 'os', 'Ubuntu', | |
| 157 '--dimension', 'gpu', '10de', | |
| 158 '--isolate-server', api.isolate.isolate_server, | |
| 159 '--dump-json', json_output | |
| 160 ] | |
| 161 api.python( | |
| 162 'triggering task for slave%s' % slave_num, | |
| 163 api.swarming_client.path.join('swarming.py'), | |
| 164 swarming_trigger_args) | |
| 165 | |
| 166 # We have triggered this slave's swarming task. Cleanup slave artifacts. | |
| 167 api.file.rmtree('Remove slave dir', slave_dir) | |
| 168 | |
| 169 # Now collect all tasks. | |
| 170 for slave_num in range(1, CT_NUM_SLAVES + 1): | |
| 171 json_output = swarming_temp_dir.join('ct-1k-task-%s.json' % slave_num) | |
| 172 swarming_collect_args = [ | |
| 173 'collect', | |
| 174 '--swarming', api.swarming.swarming_server, | |
| 175 '--json', json_output | |
| 176 ] | |
| 177 api.python( | |
| 178 'collecting task for slave%s' % slave_num, | |
| 179 api.swarming_client.path.join('swarming.py'), | |
| 180 swarming_collect_args) | |
| 181 | |
| 182 # Cleanup the temporary swarming dir. | |
| 183 api.file.rmtree('Remove swarming temp dir', swarming_temp_dir) | |
| 184 | |
| 185 print ('Running isolating, triggering and collecting swarming tasks took a ' | |
| 186 'total of %s seconds') % (api.time.time() - swarming_start_time) | |
| OLD | NEW |