Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # Copyright 2015 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 | |
| 6 DEPS = [ | |
| 7 'archive', | |
| 8 'bot_update', | |
| 9 'chromium', | |
| 10 'file', | |
| 11 'gclient', | |
| 12 'gsutil', | |
| 13 'isolate', | |
| 14 'json', | |
| 15 'path', | |
| 16 'platform', | |
| 17 'properties', | |
| 18 'python', | |
| 19 'step', | |
| 20 'time', | |
| 21 'swarming', | |
| 22 'swarming_client', | |
| 23 'zip', | |
| 24 ] | |
| 25 | |
| 26 | |
| 27 CT_BUCKET = 'cluster-telemetry' | |
| 28 CT_PAGE_TYPE = '1k' | |
| 29 CT_BINARY = 'run_chromium_perf_swarming' | |
| 30 CT_ISOLATE_TEMPLATE = 'ct_top1k.isolate.tmpl' | |
| 31 | |
| 32 # Number of slaves to shard CT runs to. | |
| 33 # TODO(rmistry): Change the below to 100 when ready to run the full top 1k. | |
| 34 CT_NUM_SLAVES = 2 | |
| 35 | |
| 36 | |
| 37 def _DownloadAndExtractBinary(api): | |
| 38 """Downloads the binary from the revision passed to the recipe.""" | |
| 39 build_archive_url = api.properties['parent_build_archive_url'] | |
|
M-A Ruel
2015/11/09 19:46:45
I think you could inline this and it'd fit 80 cols
rmistry
2015/11/10 12:58:06
Done.
| |
| 40 api.archive.download_and_unzip_build( | |
| 41 step_name='Download and Extract Binary', | |
| 42 target='Release', | |
| 43 build_url=None, # This is a required parameter, but has no effect. | |
| 44 build_archive_url=build_archive_url) | |
| 45 | |
| 46 | |
| 47 # TODO(rmistry): What priority can I give the below tasks?? | |
| 48 def RunSteps(api): | |
| 49 # Figure out which benchmark to use. | |
| 50 buildername = api.properties['buildername'] | |
| 51 if 'Repaint' in buildername: | |
| 52 benchmark = 'repaint' | |
| 53 elif 'RR' in buildername: | |
| 54 benchmark = 'rasterize_and_record_micro' | |
| 55 else: | |
| 56 raise Exception('Do not recognise the buildername %s.' % buildername) | |
| 57 | |
| 58 # Checkout chromium and swarming. | |
| 59 api.chromium.set_config('chromium') | |
| 60 api.gclient.set_config('chromium') | |
| 61 api.bot_update.ensure_checkout(force=True) | |
| 62 api.swarming_client.checkout() | |
| 63 | |
| 64 # Download the prebuilt chromium binary. | |
| 65 _DownloadAndExtractBinary(api) | |
| 66 | |
| 67 # Path to the chromium src directory. | |
| 68 chromium_src_dir = api.path['checkout'] | |
| 69 # Path to where artifacts should be downloaded from Google Storage. | |
| 70 downloads_dir = chromium_src_dir.join('content', 'test', 'ct') | |
|
M-A Ruel
2015/11/09 19:46:45
Is it in the .gitignore? Otherwise it'll get conti
rmistry
2015/11/10 12:58:06
I think its ok to get continuously deleted for now
| |
| 71 # Path where swarming artifacts (isolate file, json output) will be stored. | |
| 72 swarming_temp_dir = api.path['tmp_base'].join('swarming_temp_dir') | |
| 73 api.file.makedirs('makedirs swarming_temp_dir', swarming_temp_dir) | |
| 74 | |
| 75 # Download Cluster Telemetry binary. | |
| 76 ct_binary_path = downloads_dir.join(CT_BINARY) | |
| 77 api.gsutil.download( | |
| 78 bucket=CT_BUCKET, | |
| 79 source='swarming/binaries/%s' % CT_BINARY, | |
| 80 dest=ct_binary_path) | |
| 81 | |
| 82 # List that will contain all .isolated.gen.json file locations. | |
| 83 isolated_gen_json_files = [] | |
| 84 | |
| 85 # Record how long the step took in swarming tasks. | |
| 86 swarming_start_time = api.time.time() | |
| 87 | |
| 88 for slave_num in range(1, CT_NUM_SLAVES + 1): | |
| 89 slave_dir = downloads_dir.join('slave%s' % slave_num) | |
| 90 api.file.makedirs('makedirs slave_dir', slave_dir) | |
| 91 | |
| 92 # Download page sets. | |
| 93 page_sets_dir = slave_dir.join('page_sets') | |
| 94 api.file.makedirs('makedirs page_sets', page_sets_dir) | |
| 95 api.gsutil.download( | |
| 96 bucket=CT_BUCKET, | |
| 97 source='swarming/page_sets/%s/slave%s/*' % (CT_PAGE_TYPE, slave_num), | |
| 98 dest=page_sets_dir) | |
| 99 | |
| 100 # Download archives. | |
| 101 wpr_dir = page_sets_dir.join('data') | |
| 102 api.file.makedirs('makedirs wpr', wpr_dir) | |
| 103 api.gsutil.download( | |
| 104 bucket=CT_BUCKET, | |
| 105 source='swarming/webpage_archives/%s/slave%s/*' % (CT_PAGE_TYPE, | |
| 106 slave_num), | |
| 107 dest=wpr_dir) | |
| 108 | |
| 109 # TODO(rmistry): Remove the entire below section after crrev.com/1410353007 | |
| 110 # is submitted. | |
| 111 api.file.copy( | |
| 112 'copy %s' % CT_ISOLATE_TEMPLATE, | |
| 113 '/repos/chromium/src/chrome/%s' % CT_ISOLATE_TEMPLATE, | |
| 114 chromium_src_dir.join('chrome', CT_ISOLATE_TEMPLATE)) | |
| 115 for f in ['run_ct_top1k.py', 'path_util.py']: | |
| 116 api.file.copy( | |
| 117 'copy %s' % f, | |
| 118 '/repos/chromium/src/content/test/ct/%s' % f, | |
| 119 chromium_src_dir.join('content', 'test', 'ct', f)) | |
| 120 | |
| 121 # Create this slave's isolate file from the CT_ISOLATE_TEMPLATE. | |
| 122 isolate_dir = chromium_src_dir.join('chrome') | |
| 123 isolate_template_path = isolate_dir.join(CT_ISOLATE_TEMPLATE) | |
| 124 generated_isolate_path = isolate_dir.join( | |
| 125 'slave%s.ct_top1k.isolate' % slave_num) | |
| 126 with open(str(generated_isolate_path), 'wb') as fout: | |
| 127 with open(str(isolate_template_path), 'rb') as fin: | |
| 128 for line in fin: | |
| 129 fout.write(line.replace('[[SLAVE_NUM]]', str(slave_num)) | |
| 130 .replace('[[MASTER]]', api.properties['mastername']) | |
| 131 .replace('[[BUILDER]]', api.properties['buildername']) | |
| 132 .replace('[[GIT_HASH]]', | |
| 133 api.properties['git_revision']) | |
| 134 .replace('[[BENCHMARK]]', benchmark)) | |
| 135 | |
| 136 # Batcharchive everything on the isolate server for efficiency. | |
| 137 isolated_path = swarming_temp_dir.join('ct-1k-task-%s.isolated' % slave_num) | |
| 138 isolate_args = [ | |
| 139 '--isolate', generated_isolate_path, | |
| 140 '--isolated', isolated_path, | |
| 141 '--config-variable', 'OS', 'linux', | |
| 142 # TODO(rmistry): Why do I need PRODUCT_DIR ? fails without it. It also | |
| 143 # requires bitmaptools in PRODUCT_DIR. | |
|
nednguyen
2015/11/09 23:04:32
Bitmaptools build is used by telemetry's image pro
rmistry
2015/11/10 12:58:06
I tried using it and it failed with:
IsolateError:
| |
| 144 '--path-variable', 'PRODUCT_DIR', api.path['tmp_base'], | |
| 145 ] | |
| 146 isolated_gen_dict = { | |
| 147 'version': 1, | |
| 148 'dir': isolate_dir, | |
| 149 'args': isolate_args, | |
| 150 } | |
| 151 # Write the slave isolated.gen.json file. | |
| 152 isolated_gen_json = swarming_temp_dir.join( | |
| 153 'slave%s.isolated.gen.json' % slave_num) | |
| 154 with open(str(isolated_gen_json), 'w') as fout: | |
| 155 fout.write(api.json.dumps(isolated_gen_dict, indent=4)) | |
| 156 isolated_gen_json_files.append(isolated_gen_json) | |
| 157 | |
| 158 # Batcharchive everything on the isolate server for efficiency. | |
| 159 batcharchive_args = [ | |
| 160 'batcharchive', | |
| 161 '--isolate-server', api.isolate.isolate_server, | |
| 162 '--', | |
| 163 ] | |
| 164 for isolated_gen_json in isolated_gen_json_files: | |
|
M-A Ruel
2015/11/09 19:46:45
batcharchive_args.extend(str(i) for i in isolated_
rmistry
2015/11/10 12:58:06
Done.
| |
| 165 batcharchive_args.append(str(isolated_gen_json)) | |
| 166 api.python( | |
| 167 'batcharchiving isolated.gen.json for all slaves', | |
| 168 api.swarming_client.path.join('isolate.py'), | |
|
M-A Ruel
2015/11/09 19:46:45
Use luci-go implementation which is an order of ma
rmistry
2015/11/10 12:58:06
How do I use it using https://code.google.com/p/ch
M-A Ruel
2015/11/12 00:36:01
isolate_tests() is the function you need. It is ha
rmistry
2015/11/12 14:39:17
I could use it without refactoring. Also used trig
| |
| 169 batcharchive_args) | |
| 170 | |
| 171 # Trigger all swarming tasks. | |
| 172 for slave_num in range(1, CT_NUM_SLAVES + 1): | |
| 173 task_name = 'ct-1k-task-%s' % slave_num | |
| 174 isolated_path = swarming_temp_dir.join('ct-1k-task-%s.isolated' % slave_num) | |
| 175 json_output = swarming_temp_dir.join('ct-1k-task-%s.json' % slave_num) | |
| 176 swarming_trigger_args = [ | |
| 177 'trigger', | |
| 178 '--task-name', task_name, | |
| 179 isolated_path, | |
| 180 '--swarming', api.swarming.swarming_server, | |
| 181 '--dimension', 'os', 'Ubuntu', | |
| 182 '--dimension', 'gpu', '10de', | |
| 183 '--isolate-server', api.isolate.isolate_server, | |
| 184 '--dump-json', json_output | |
| 185 ] | |
| 186 api.python( | |
| 187 'triggering task for slave%s' % slave_num, | |
| 188 api.swarming_client.path.join('swarming.py'), | |
| 189 swarming_trigger_args) | |
| 190 | |
| 191 # We have triggered this slave's swarming task. Cleanup slave artifacts. | |
| 192 api.file.rmtree('Remove slave dir', slave_dir) | |
| 193 | |
| 194 # Now collect all tasks. | |
| 195 for slave_num in range(1, CT_NUM_SLAVES + 1): | |
| 196 json_output = swarming_temp_dir.join('ct-1k-task-%s.json' % slave_num) | |
| 197 swarming_collect_args = [ | |
| 198 'collect', | |
| 199 '--swarming', api.swarming.swarming_server, | |
| 200 '--json', json_output | |
| 201 ] | |
| 202 api.python( | |
| 203 'collecting task for slave%s' % slave_num, | |
| 204 api.swarming_client.path.join('swarming.py'), | |
| 205 swarming_collect_args) | |
| 206 | |
| 207 # Cleanup the temporary swarming dir. | |
| 208 api.file.rmtree('Remove swarming temp dir', swarming_temp_dir) | |
| 209 | |
| 210 print ('Running isolating, triggering and collecting swarming tasks took a ' | |
| 211 'total of %s seconds') % (api.time.time() - swarming_start_time) | |
| OLD | NEW |