OLD | NEW |
| (Empty) |
1 # Copyright 2016 The Chromium Authors. All rights reserved. | |
2 # Use of this source code is governed by a BSD-style license that can be | |
3 # found in the LICENSE file. | |
4 | |
5 import collections | |
6 import hashlib | |
7 import json | |
8 import re | |
9 | |
10 from recipe_engine import recipe_api | |
11 | |
12 RECIPE_TRYJOB_BYPASS_REASON_TAG = "Recipe-Tryjob-Bypass-Reason" | |
13 | |
14 def get_recipes_path(project_config): | |
15 # Returns a tuple of the path components to traverse from the root of the repo | |
16 # to get to the directory containing recipes. | |
17 return project_config['recipes_path'][0].split('/') | |
18 | |
19 | |
20 def get_deps(project_config): | |
21 """ Get the recipe engine deps of a project from its recipes.cfg file. """ | |
22 # "[0]" Since parsing makes every field a list | |
23 return [dep['project_id'][0] for dep in project_config.get('deps', [])] | |
24 | |
25 | |
26 def get_deps_info(projects, configs): | |
27 """Calculates dependency information (forward and backwards) given configs.""" | |
28 deps = {p: get_deps(configs[p]) for p in projects} | |
29 | |
30 # Figure out the backwards version of the deps graph. This allows us to figure | |
31 # out which projects we need to test given a project. So, given | |
32 # | |
33 # A | |
34 # / \ | |
35 # B C | |
36 # | |
37 # We want to test B and C, if A changes. Recipe projects only know about he | |
38 # B-> A and C-> A dependencies, so we have to reverse this to get the | |
39 # information we want. | |
40 downstream_projects = collections.defaultdict(set) | |
41 for proj, targets in deps.items(): | |
42 for target in targets: | |
43 downstream_projects[target].add(proj) | |
44 | |
45 return deps, downstream_projects | |
46 | |
47 | |
48 RietveldPatch = collections.namedtuple( | |
49 'RietveldPatch', 'project server issue patchset') | |
50 | |
51 | |
52 def parse_patches(failing_step, patches_raw, rietveld, issue, patchset, | |
53 patch_project): | |
54 """ | |
55 gives mapping of project to patch | |
56 expect input of | |
57 project1:https://a.b.c/1342342#ps1,project2:https://d.ce.f/1231231#ps1 | |
58 """ | |
59 result = {} | |
60 | |
61 if rietveld and issue and patchset and patch_project: | |
62 # convert to str because recipes don't like unicode as step names | |
63 result[str(patch_project)] = RietveldPatch( | |
64 patch_project, rietveld, issue, patchset) | |
65 | |
66 if not patches_raw: | |
67 return result | |
68 | |
69 for patch_raw in patches_raw.split(','): | |
70 project, url = patch_raw.split(':', 1) | |
71 server, issue_and_patchset = url.rsplit('/', 1) | |
72 issue, patchset = issue_and_patchset.split('#') | |
73 patchset = patchset[2:] | |
74 | |
75 if project in result: | |
76 failing_step( | |
77 "Invalid patchset list", | |
78 "You have two patches for %r. Patches seen so far: %r" % ( | |
79 project, result) | |
80 ) | |
81 | |
82 result[project] = RietveldPatch(project, server, issue, patchset) | |
83 | |
84 return result | |
85 | |
86 | |
87 | |
88 PROJECTS_TO_TRY = [ | |
89 'build', | |
90 'build_limited_scripts_slave', | |
91 'recipe_engine', | |
92 'depot_tools', | |
93 ] | |
94 | |
95 PROJECT_TO_CONTINUOUS_WATERFALL = { | |
96 'build': 'https://build.chromium.org/p/chromium.tools.build/builders/' | |
97 'recipe-simulation_trusty64', | |
98 'recipe_engine': 'https://build.chromium.org/p/chromium.infra/builders/' | |
99 'recipe_engine-recipes-tests', | |
100 'depot_tools': 'https://build.chromium.org/p/chromium.infra/builders/' | |
101 'depot_tools-recipes-tests', | |
102 } | |
103 | |
104 FILE_BUG_FOR_CONTINUOUS_LINK = 'https://goo.gl/PoAPOJ' | |
105 | |
106 | |
107 class RecipeTryjobApi(recipe_api.RecipeApi): | |
108 """ | |
109 This is intended as a utility module for recipe tryjobs. Currently it's just a | |
110 refactored version of a recipe; eventually some of this, especially the | |
111 dependency information, will probably get moved into the recipe engine. | |
112 """ | |
113 def _get_project_config(self, project): | |
114 """Fetch the project config from luci-config. | |
115 | |
116 Args: | |
117 project: The name of the project in luci-config. | |
118 | |
119 Returns: | |
120 The recipes.cfg file for that project, as a parsed dictionary. See | |
121 parse_protobuf for details on the format to expect. | |
122 """ | |
123 result = self.m.luci_config.get_project_config(project, 'recipes.cfg') | |
124 | |
125 parsed = self.m.luci_config.parse_textproto(result['content'].split('\n')) | |
126 return parsed | |
127 | |
128 def _checkout_projects(self, root_dir, url_mapping, deps, | |
129 downstream_projects, patches): | |
130 """Checks out projects listed in projects into root_dir. | |
131 | |
132 Args: | |
133 root_dir: Root directory to check this project out in. | |
134 url_mapping: Project id to url of git repository. | |
135 downstream_projects: The mapping from project to dependent projects. | |
136 patches: Mapping of project id to patch to apply to that project. | |
137 | |
138 Returns: | |
139 The projects we want to test, and the locations of those projects | |
140 """ | |
141 # TODO(martiniss): be smarter about which projects we actually run tests on | |
142 | |
143 # All the projects we want to test. | |
144 projs_to_test = set() | |
145 # Projects we need to look at dependencies for. | |
146 queue = set(patches.keys()) | |
147 # luci config project name to file system path of the checkout | |
148 locations = {} | |
149 | |
150 while queue: | |
151 proj = queue.pop() | |
152 if proj not in projs_to_test: | |
153 locations[proj] = self._checkout_project( | |
154 proj, url_mapping[proj], root_dir, patches.get(proj)) | |
155 projs_to_test.add(proj) | |
156 | |
157 for downstream in downstream_projects[proj]: | |
158 queue.add(downstream) | |
159 for upstream in deps[proj]: | |
160 queue.add(upstream) | |
161 | |
162 return projs_to_test, locations | |
163 | |
164 def _checkout_project(self, proj, proj_config, root_dir, patch=None): | |
165 """ | |
166 Args: | |
167 proj: luci-config project name to checkout. | |
168 proj_config: The recipes.cfg configuration for the project. | |
169 root_dir: The temporary directory to check the project out in. | |
170 patch: optional patch to apply to checkout. | |
171 | |
172 Returns: | |
173 Path to repo on disk. | |
174 """ | |
175 checkout_path = root_dir.join(proj) | |
176 repo_path = checkout_path.join(proj) | |
177 self.m.file.makedirs('%s directory' % proj, repo_path) | |
178 | |
179 # Not working yet, but maybe?? | |
180 #api.file.rmtree('clean old %s repo' % proj, checkout_path) | |
181 | |
182 config = self.m.gclient.make_config( | |
183 GIT_MODE=True, CACHE_DIR=root_dir.join("__cache_dir")) | |
184 soln = config.solutions.add() | |
185 soln.name = proj | |
186 soln.url = proj_config['repo_url'] | |
187 | |
188 kwargs = { | |
189 'suffix': proj, | |
190 'gclient_config': config, | |
191 'force': True, | |
192 'cwd': checkout_path, | |
193 } | |
194 if patch: | |
195 kwargs['rietveld'] = patch.server | |
196 kwargs['issue'] = patch.issue | |
197 kwargs['patchset'] = patch.patchset | |
198 else: | |
199 kwargs['patch'] = False | |
200 | |
201 self.m.bot_update.ensure_checkout(**kwargs) | |
202 return repo_path | |
203 | |
204 def get_fail_build_info(self, downstream_projects, patches): | |
205 fail_build = collections.defaultdict(lambda: True) | |
206 | |
207 for proj, patch in patches.items(): | |
208 patch_url = "%s/%s" % (patch.server, patch.issue) | |
209 desc = self.m.git_cl.get_description( | |
210 patch=patch_url, codereview='rietveld', suffix=proj) | |
211 | |
212 assert desc.stdout is not None, "CL %s had no description!" % patch_url | |
213 | |
214 bypass_reason = self.m.tryserver.get_footer( | |
215 RECIPE_TRYJOB_BYPASS_REASON_TAG, patch_text=desc.stdout) | |
216 | |
217 fail_build[proj] = not bool(bypass_reason) | |
218 | |
219 # Propogate Falses down the deps tree | |
220 queue = list(patches.keys()) | |
221 while queue: | |
222 item = queue.pop(0) | |
223 | |
224 if not fail_build[item]: | |
225 for downstream in downstream_projects.get(item, []): | |
226 fail_build[downstream] = False | |
227 queue.append(downstream) | |
228 | |
229 return fail_build | |
230 | |
231 def simulation_test(self, proj, proj_config, repo_path, deps): | |
232 """ | |
233 Args: | |
234 proj: The luci-config project to simulation_test. | |
235 proj_config: The recipes.cfg configuration for the project. | |
236 repo_path: The path to the repository on disk. | |
237 deps: Mapping from project name to Path. Passed into the recipes.py | |
238 invocation via the "-O" options. | |
239 | |
240 Returns the result of running the simulation tests. | |
241 """ | |
242 recipes_path = get_recipes_path(proj_config) + ['recipes.py'] | |
243 recipes_py_loc = repo_path.join(*recipes_path) | |
244 args = [] | |
245 for dep_name, location in deps.items(): | |
246 args += ['-O', '%s=%s' % (dep_name, location)] | |
247 args += ['--package', repo_path.join('infra', 'config', 'recipes.cfg')] | |
248 | |
249 args += ['simulation_test'] | |
250 | |
251 return self._python('%s tests' % proj, recipes_py_loc, args) | |
252 | |
253 def _python(self, name, script, args, **kwargs): | |
254 """Call python from infra's virtualenv. | |
255 | |
256 This is needed because of the coverage module, which is not installed by | |
257 default, but which infra's python has installed.""" | |
258 return self.m.step(name, [ | |
259 self.m.path['checkout'].join('ENV', 'bin', 'python'), | |
260 '-u', script] + args, **kwargs) | |
261 | |
262 def run_tryjob(self, patches_raw, rietveld, issue, patchset, patch_project): | |
263 patches = parse_patches( | |
264 self.m.python.failing_step, patches_raw, rietveld, issue, patchset, | |
265 patch_project) | |
266 | |
267 root_dir = self.m.path['slave_build'] | |
268 | |
269 # Needed to set up the infra checkout, for _python | |
270 self.m.gclient.set_config('infra') | |
271 self.m.gclient.c.solutions[0].revision = 'origin/master' | |
272 self.m.gclient.checkout() | |
273 self.m.gclient.runhooks() | |
274 | |
275 url_mapping = self.m.luci_config.get_projects() | |
276 | |
277 # TODO(martiniss): use luci-config smarter; get recipes.cfg directly, rather | |
278 # than in two steps. | |
279 # luci config project name to recipe config namedtuple | |
280 recipe_configs = {} | |
281 | |
282 # List of all the projects we care about testing. luci-config names | |
283 all_projects = set(p for p in url_mapping if p in PROJECTS_TO_TRY) | |
284 | |
285 recipe_configs = { | |
286 p: self._get_project_config(p) for p in all_projects} | |
287 | |
288 deps, downstream_projects = get_deps_info(all_projects, recipe_configs) | |
289 should_fail_build_mapping = self.get_fail_build_info( | |
290 downstream_projects, patches) | |
291 | |
292 projs_to_test, locations = self._checkout_projects( | |
293 root_dir, url_mapping, deps, downstream_projects, patches) | |
294 | |
295 bad_projects = [] | |
296 for proj in projs_to_test: | |
297 deps_locs = {dep: locations[dep] for dep in deps[proj]} | |
298 | |
299 try: | |
300 result = self.simulation_test( | |
301 proj, recipe_configs[proj], locations[proj], deps_locs) | |
302 except recipe_api.StepFailure as f: | |
303 result = f.result | |
304 if should_fail_build_mapping.get(proj, True): | |
305 bad_projects.append(proj) | |
306 finally: | |
307 link = PROJECT_TO_CONTINUOUS_WATERFALL.get(proj) | |
308 if link: | |
309 result.presentation.links['reference builder'] = link | |
310 else: | |
311 result.presentation.links[ | |
312 'no reference builder; file a bug to get one?'] = ( | |
313 FILE_BUG_FOR_CONTINUOUS_LINK) | |
314 | |
315 | |
316 if bad_projects: | |
317 raise recipe_api.StepFailure( | |
318 "One or more projects failed tests: %s" % ( | |
319 ','.join(bad_projects))) | |
320 | |
321 | |
OLD | NEW |