Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1669)

Side by Side Diff: appengine/findit/waterfall/try_job_util.py

Issue 2227223002: [Findit] Fix 1500 byte cap on failed_steps_and_tests and output_nodes (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: Code review fixes Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « appengine/findit/model/wf_failure_group.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2015 The Chromium Authors. All rights reserved. 1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 from datetime import datetime 5 from datetime import datetime
6 from datetime import timedelta
6 import logging 7 import logging
7 8
8 from google.appengine.ext import ndb 9 from google.appengine.ext import ndb
9 10
10 from common import appengine_util 11 from common import appengine_util
11 from common import constants 12 from common import constants
12 from common.waterfall import failure_type 13 from common.waterfall import failure_type
13 from model import analysis_status 14 from model import analysis_status
14 from model.wf_analysis import WfAnalysis 15 from model.wf_analysis import WfAnalysis
15 from model.wf_build import WfBuild 16 from model.wf_build import WfBuild
16 from model.wf_failure_group import WfFailureGroup 17 from model.wf_failure_group import WfFailureGroup
17 from model.wf_try_job import WfTryJob 18 from model.wf_try_job import WfTryJob
18 from waterfall import swarming_tasks_to_try_job_pipeline 19 from waterfall import swarming_tasks_to_try_job_pipeline
19 from waterfall import waterfall_config 20 from waterfall import waterfall_config
20 from waterfall.try_job_type import TryJobType 21 from waterfall.try_job_type import TryJobType
21 22
23 # TODO(lijeffrey): Move this to config.
24 MATCHING_GROUPS_SECONDS_AGO = 24 * 60 * 60 # 24 hours
lijeffrey 2016/08/10 00:09:54 nit: comment ends with .
25
22 26
23 def _CheckFailureForTryJobKey( 27 def _CheckFailureForTryJobKey(
24 master_name, builder_name, build_number, 28 master_name, builder_name, build_number,
25 failure_result_map, failed_step_or_test, failure): 29 failure_result_map, failed_step_or_test, failure):
26 """Compares the current_failure and first_failure for each failed_step/test. 30 """Compares the current_failure and first_failure for each failed_step/test.
27 31
28 If equal, a new try_job needs to start; 32 If equal, a new try_job needs to start;
29 If not, apply the key of the first_failure's try_job to this failure. 33 If not, apply the key of the first_failure's try_job to this failure.
30 """ 34 """
31 # TODO(chanli): Need to compare failures across builders 35 # TODO(chanli): Need to compare failures across builders
(...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after
189 master_name, builder_name, build_number, failure_group_key): 193 master_name, builder_name, build_number, failure_group_key):
190 analysis = WfAnalysis.Get(master_name, builder_name, build_number) 194 analysis = WfAnalysis.Get(master_name, builder_name, build_number)
191 analysis.failure_group_key = failure_group_key 195 analysis.failure_group_key = failure_group_key
192 analysis.put() 196 analysis.put()
193 197
194 198
195 def _CreateBuildFailureGroup( 199 def _CreateBuildFailureGroup(
196 master_name, builder_name, build_number, build_failure_type, blame_list, 200 master_name, builder_name, build_number, build_failure_type, blame_list,
197 suspected_tuples, output_nodes=None, failed_steps_and_tests=None): 201 suspected_tuples, output_nodes=None, failed_steps_and_tests=None):
198 new_group = WfFailureGroup.Create(master_name, builder_name, build_number) 202 new_group = WfFailureGroup.Create(master_name, builder_name, build_number)
203 new_group.created_time = datetime.utcnow()
199 new_group.build_failure_type = build_failure_type 204 new_group.build_failure_type = build_failure_type
200 new_group.blame_list = blame_list 205 new_group.blame_list = blame_list
201 new_group.suspected_tuples = suspected_tuples 206 new_group.suspected_tuples = suspected_tuples
202 new_group.output_nodes = output_nodes 207 new_group.output_nodes = output_nodes
203 new_group.failed_steps_and_tests = failed_steps_and_tests 208 new_group.failed_steps_and_tests = failed_steps_and_tests
204 new_group.put() 209 new_group.put()
205 210
206 211
207 def _GetMatchingGroup(wf_failure_groups, blame_list, suspected_tuples): 212 def _GetMatchingGroup(wf_failure_groups, blame_list, suspected_tuples):
208 for group in wf_failure_groups: 213 for group in wf_failure_groups:
209 if _BlameListsIntersection(group.blame_list, blame_list): 214 if _BlameListsIntersection(group.blame_list, blame_list):
210 if suspected_tuples == group.suspected_tuples: 215 if suspected_tuples == group.suspected_tuples:
211 return group 216 return group
212 217
213 return None 218 return None
214 219
215 220
216 def _GetOutputNodes(signals): 221 def _GetOutputNodes(signals):
217 if not signals or 'compile' not in signals: 222 if not signals or 'compile' not in signals:
218 return [] 223 return []
219 224
220 # Compile failures with no output nodes will be considered unique. 225 # Compile failures with no output nodes will be considered unique.
221 return signals['compile'].get('failed_output_nodes', []) 226 return signals['compile'].get('failed_output_nodes', [])
222 227
223 228
229 def _GetMatchingFailureGroups(build_failure_type):
230 earliest_time = datetime.utcnow() - timedelta(
231 seconds=MATCHING_GROUPS_SECONDS_AGO)
232 return WfFailureGroup.query(ndb.AND(
233 WfFailureGroup.created_time >= earliest_time,
234 WfFailureGroup.build_failure_type == build_failure_type)).fetch()
235
236
224 def _GetMatchingCompileFailureGroups(output_nodes): 237 def _GetMatchingCompileFailureGroups(output_nodes):
238 groups = _GetMatchingFailureGroups(failure_type.COMPILE)
225 # Output nodes should already be unique and sorted. 239 # Output nodes should already be unique and sorted.
226 return WfFailureGroup.query(ndb.AND( 240 return [group for group in groups if group.output_nodes == output_nodes]
227 WfFailureGroup.build_failure_type == failure_type.COMPILE,
228 WfFailureGroup.output_nodes == output_nodes
229 )).fetch()
230 241
231 242
232 def _GetMatchingTestFailureGroups(failed_steps_and_tests): 243 def _GetMatchingTestFailureGroups(failed_steps_and_tests):
233 return WfFailureGroup.query(ndb.AND( 244 groups = _GetMatchingFailureGroups(failure_type.TEST)
234 WfFailureGroup.build_failure_type == failure_type.TEST, 245 return [group for group in groups
235 WfFailureGroup.failed_steps_and_tests == failed_steps_and_tests 246 if group.failed_steps_and_tests == failed_steps_and_tests]
236 )).fetch()
237 247
238 248
239 def _IsBuildFailureUniqueAcrossPlatforms( 249 def _IsBuildFailureUniqueAcrossPlatforms(
240 master_name, builder_name, build_number, build_failure_type, blame_list, 250 master_name, builder_name, build_number, build_failure_type, blame_list,
241 failed_steps, signals, heuristic_result): 251 failed_steps, signals, heuristic_result):
242 output_nodes = None 252 output_nodes = None
243 failed_steps_and_tests = None 253 failed_steps_and_tests = None
244 254
245 if build_failure_type == failure_type.COMPILE: 255 if build_failure_type == failure_type.COMPILE:
246 output_nodes = _GetOutputNodes(signals) 256 output_nodes = _GetOutputNodes(signals)
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
311 need_new_try_job, last_pass = _CheckFailureForTryJobKey( 321 need_new_try_job, last_pass = _CheckFailureForTryJobKey(
312 master_name, builder_name, build_number, 322 master_name, builder_name, build_number,
313 failure_result_map, TryJobType.COMPILE, failed_steps['compile']) 323 failure_result_map, TryJobType.COMPILE, failed_steps['compile'])
314 else: 324 else:
315 try_job_type = TryJobType.TEST 325 try_job_type = TryJobType.TEST
316 targeted_tests, need_new_try_job, last_pass = ( 326 targeted_tests, need_new_try_job, last_pass = (
317 _CheckIfNeedNewTryJobForTestFailure( 327 _CheckIfNeedNewTryJobForTestFailure(
318 'step', master_name, builder_name, build_number, failure_result_map, 328 'step', master_name, builder_name, build_number, failure_result_map,
319 failed_steps)) 329 failed_steps))
320 330
321
322 need_new_try_job = ( 331 need_new_try_job = (
323 need_new_try_job and ReviveOrCreateTryJobEntity( 332 need_new_try_job and ReviveOrCreateTryJobEntity(
324 master_name, builder_name, build_number, force_try_job)) 333 master_name, builder_name, build_number, force_try_job))
325 334
326 # TODO(josiahk): Integrate _IsBuildFailureUniqueAcrossPlatforms() into 335 # TODO(josiahk): Integrate _IsBuildFailureUniqueAcrossPlatforms() into
327 # need_new_try_job boolean 336 # need_new_try_job boolean
328 if need_new_try_job: 337 if need_new_try_job:
329 _IsBuildFailureUniqueAcrossPlatforms( 338 _IsBuildFailureUniqueAcrossPlatforms(
330 master_name, builder_name, build_number, build_failure_type, 339 master_name, builder_name, build_number, build_failure_type,
331 builds[str(build_number)]['blame_list'], failed_steps, signals, 340 builds[str(build_number)]['blame_list'], failed_steps, signals,
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
434 pipeline.pipeline_status_path, try_job_type) 443 pipeline.pipeline_status_path, try_job_type)
435 else: # pragma: no cover 444 else: # pragma: no cover
436 logging_str = ( 445 logging_str = (
437 'Try job was scheduled for build %s, %s, %s: %s because of %s ' 446 'Try job was scheduled for build %s, %s, %s: %s because of %s '
438 'failure.') % ( 447 'failure.') % (
439 master_name, builder_name, build_number, 448 master_name, builder_name, build_number,
440 pipeline.pipeline_status_path, try_job_type) 449 pipeline.pipeline_status_path, try_job_type)
441 logging.info(logging_str) 450 logging.info(logging_str)
442 451
443 return failure_result_map 452 return failure_result_map
OLDNEW
« no previous file with comments | « appengine/findit/model/wf_failure_group.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698