Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(404)

Side by Side Diff: scripts/slave/recipe_modules/ct_swarming/api.py

Issue 1423993007: CT Perf recipe to run benchmarks on the top 1k sites using swarming (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/build@master
Patch Set: Rename ct_top1k_rr_perf to ct_top1k_perf Created 5 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5
6 from recipe_engine import recipe_api
7
8
9 CT_GS_BUCKET = 'cluster-telemetry'
10
11
12 class CTSwarmingApi(recipe_api.RecipeApi):
13 """Provides steps to run CT tasks on swarming bots."""
14
15 def __init__(self, **kwargs):
16 super(CTSwarmingApi, self).__init__(**kwargs)
17 # Path to where artifacts should be downloaded from Google Storage. Will be
18 # populated when checkout_dependencies is called.
19 self._downloads_dir = None
20 # Path where swarming artifacts (isolate file, json output, etc) will be
21 # stored. Will be populated when checkout dependencies is called.
22 self._swarming_temp_dir = None
23 # Directory where the outputs of the swarming tasks will be stored.
24 self._tasks_output_dir = None
25 # Collection of all swarming tasks triggered by this recipe.
26 self._swarming_tasks = []
27
28 @property
29 def downloads_dir(self):
30 return self._downloads_dir
M-A Ruel 2015/11/20 18:14:01 I'd prefer return self.m.path['checkout'].join('co
rmistry 2015/11/23 15:13:48 Done.
31
32 @property
33 def tasks_output_dir(self):
34 return self._tasks_output_dir
35
36 def checkout_dependencies(self):
37 """Checks out all repositories required for CT to run on swarming bots."""
38 # Checkout chromium and swarming.
39 self.m.chromium.set_config('chromium')
40 self.m.gclient.set_config('chromium')
41 self.m.bot_update.ensure_checkout(force=True)
42 self.m.swarming_client.checkout()
43 # Set the paths required by this recipe module.
44 self._downloads_dir = self.m.path['checkout'].join('content', 'test', 'ct')
45 self._swarming_temp_dir = self.m.path['tmp_base'].join('swarming_temp_dir')
46 self.m.file.makedirs('Create swarming tmp dir', self._swarming_temp_dir)
47 # The directory to store swarming task outputs in.
48 self._tasks_output_dir = self._swarming_temp_dir.join('outputs')
49 # Ensure swarming_client is compatible with what recipes expect.
50 self.m.swarming.check_client_version()
51
52 def download_CT_binary(self, ct_binary_name):
53 """Downloads the specified CT binary from GS into the downloads_dir."""
54 binary_dest = self._downloads_dir.join(ct_binary_name)
55 self.m.gsutil.download(
56 name="download %s" % ct_binary_name,
57 bucket=CT_GS_BUCKET,
58 source='swarming/binaries/%s' % ct_binary_name,
59 dest=binary_dest)
60 # Set executable bit on the binary.
61 self.m.python.inline(
62 name='Set executable bit on %s' % ct_binary_name,
63 program='''
64 import os
65 import stat
66
67 os.chmod('%s', os.stat('%s').st_mode | stat.S_IEXEC)
68 ''' % (str(binary_dest), str(binary_dest))
69 )
70
71 def download_page_artifacts(self, page_type, slave_num):
72 """Downloads all the artifacts needed to run benchmarks on a page.
73
74 The artifacts are downloaded into subdirectories in the downloads_dir.
75
76 Args:
77 page_type: str. The CT page type. Eg: 1k, 10k.
78 slave_num: int. The number of the slave used to determine which GS
79 directory to download from. Eg: for the top 1k, slave1 will
80 contain webpages 1-10, slave2 will contain 11-20.
81 """
82 # Download page sets.
83 page_sets_dir = self._downloads_dir.join('slave%s' % slave_num, 'page_sets')
84 self.m.file.makedirs('Create page_sets dir', page_sets_dir)
85 self.m.gsutil.download(
86 bucket=CT_GS_BUCKET,
87 source='swarming/page_sets/%s/slave%s/*' % (page_type, slave_num),
88 dest=page_sets_dir)
89
90 # Download archives.
91 wpr_dir = page_sets_dir.join('data')
92 self.m.file.makedirs('Create WPR dir', wpr_dir)
93 self.m.gsutil.download(
94 bucket=CT_GS_BUCKET,
95 source='swarming/webpage_archives/%s/slave%s/*' % (page_type,
96 slave_num),
97 dest=wpr_dir)
98
99 def create_isolated_gen_json(self, isolate_path, base_dir, os_type,
100 slave_num, extra_variables):
101 """Creates an isolated.gen.json file.
102
103 Args:
104 isolate_path: path obj. Path to the isolate file.
105 base_dir: path obj. Dir that is the base of all paths in the isolate file.
106 os_type: str. The OS type to use when archiving the isolate file.
107 Eg: linux.
108 slave_num: int. The slave we want to create isolated.gen.json file for.
109 extra_variables: dict of str to str. The extra vars to pass to isolate.
110 Eg: {'SLAVE_NUM': '1', 'MASTER': 'ChromiumPerfFYI'}
111
112 Returns:
113 Path to the isolated.gen.json file.
114 """
115 isolated_path = self._swarming_temp_dir.join(
116 'ct-task-%s.isolated' % slave_num)
117 isolate_args = [
118 '--isolate', isolate_path,
119 '--isolated', isolated_path,
120 '--config-variable', 'OS', os_type,
121 ]
122 for k, v in extra_variables.iteritems():
123 isolate_args.extend(['--extra-variable', k, v])
124 isolated_gen_dict = {
125 'version': 1,
126 'dir': base_dir,
127 'args': isolate_args,
128 }
129 isolated_gen_json = self._swarming_temp_dir.join(
130 'slave%s.isolated.gen.json' % slave_num)
131 self.m.file.write(
132 'Write slave%s.isolated.gen.json' % slave_num,
133 isolated_gen_json,
134 self.m.json.dumps(isolated_gen_dict, indent=4),
135 )
136
137 def batcharchive(self, num_slaves):
138 """Calls batcharchive on the specified isolated.gen.json files.
139
140 Args:
141 num_slaves: int. The number of slaves we will batcharchive
142 isolated.gen.json files for.
143 """
144 self.m.isolate.isolate_tests(
145 build_dir=self._swarming_temp_dir,
146 targets=['slave%s' % num for num in range(1, num_slaves+1)])
147
148 def trigger_swarming_tasks(self, swarm_hashes, task_name_prefix, dimensions):
149 """Triggers swarming tasks using swarm hashes.
150
151 Args:
152 swarm_hashes: list of str. List of swarm hashes from the isolate server.
153 task_name_prefix: The prefix to use when creating task_name.
154 dimensions: dict of str to str. The dimensions to run the task on.
155 Eg: {'os': 'Ubuntu', 'gpu': '10de'}
156 """
157 task_num = 0
158 for swarm_hash in swarm_hashes:
159 task_num += 1
160 swarming_task = self.m.swarming.task(
161 title='%s-%s' % (task_name_prefix, task_num),
162 isolated_hash=swarm_hash,
163 task_output_dir=self._tasks_output_dir.join('slave%s' % task_num))
164 swarming_task.dimensions = dimensions
165 swarming_task.priority = 90
166 self._swarming_tasks.append(swarming_task)
167 self.m.swarming.trigger(self._swarming_tasks)
M-A Ruel 2015/11/20 18:14:01 You should return the tasks and remove self._swarm
rmistry 2015/11/23 15:13:48 Done.
168
169 def collect_swarming_tasks(self):
M-A Ruel 2015/11/20 18:14:01 Accept the tasks as a parameter.
rmistry 2015/11/23 15:13:48 Done.
170 """Collects all swarming tasks triggered by this recipe."""
171 return self.m.swarming.collect(self._swarming_tasks)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698