Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2015 The Chromium Authors. All rights reserved. | 1 # Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 from datetime import datetime | 5 from datetime import datetime |
| 6 from datetime import timedelta | |
| 6 import logging | 7 import logging |
| 7 | 8 |
| 8 from google.appengine.ext import ndb | 9 from google.appengine.ext import ndb |
| 9 | 10 |
| 10 from common import appengine_util | 11 from common import appengine_util |
| 11 from common import constants | 12 from common import constants |
| 12 from common.waterfall import failure_type | 13 from common.waterfall import failure_type |
| 13 from model import analysis_status | 14 from model import analysis_status |
| 14 from model.wf_analysis import WfAnalysis | 15 from model.wf_analysis import WfAnalysis |
| 15 from model.wf_build import WfBuild | 16 from model.wf_build import WfBuild |
| 16 from model.wf_failure_group import WfFailureGroup | 17 from model.wf_failure_group import WfFailureGroup |
| 17 from model.wf_try_job import WfTryJob | 18 from model.wf_try_job import WfTryJob |
| 18 from waterfall import swarming_tasks_to_try_job_pipeline | 19 from waterfall import swarming_tasks_to_try_job_pipeline |
| 19 from waterfall import waterfall_config | 20 from waterfall import waterfall_config |
| 20 from waterfall.try_job_type import TryJobType | 21 from waterfall.try_job_type import TryJobType |
| 21 | 22 |
| 22 | 23 |
| 24 MATCHING_GROUPS_HOURS_AGO = 24 | |
|
lijeffrey
2016/08/09 23:00:00
make this seconds, but as
MATCHING_GROUPS_SECONDS
josiahk
2016/08/09 23:10:36
Done! Thanks!
| |
| 25 | |
| 26 | |
| 23 def _CheckFailureForTryJobKey( | 27 def _CheckFailureForTryJobKey( |
| 24 master_name, builder_name, build_number, | 28 master_name, builder_name, build_number, |
| 25 failure_result_map, failed_step_or_test, failure): | 29 failure_result_map, failed_step_or_test, failure): |
| 26 """Compares the current_failure and first_failure for each failed_step/test. | 30 """Compares the current_failure and first_failure for each failed_step/test. |
| 27 | 31 |
| 28 If equal, a new try_job needs to start; | 32 If equal, a new try_job needs to start; |
| 29 If not, apply the key of the first_failure's try_job to this failure. | 33 If not, apply the key of the first_failure's try_job to this failure. |
| 30 """ | 34 """ |
| 31 # TODO(chanli): Need to compare failures across builders | 35 # TODO(chanli): Need to compare failures across builders |
| 32 # after the grouping of failures is implemented. | 36 # after the grouping of failures is implemented. |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 190 analysis = WfAnalysis.Get(master_name, builder_name, build_number) | 194 analysis = WfAnalysis.Get(master_name, builder_name, build_number) |
| 191 analysis.failure_group_key = failure_group_key | 195 analysis.failure_group_key = failure_group_key |
| 192 analysis.put() | 196 analysis.put() |
| 193 | 197 |
| 194 | 198 |
| 195 def _CreateBuildFailureGroup( | 199 def _CreateBuildFailureGroup( |
| 196 master_name, builder_name, build_number, build_failure_type, blame_list, | 200 master_name, builder_name, build_number, build_failure_type, blame_list, |
| 197 suspected_tuples, output_nodes=None, failed_steps_and_tests=None): | 201 suspected_tuples, output_nodes=None, failed_steps_and_tests=None): |
| 198 new_group = WfFailureGroup.Create(master_name, builder_name, build_number) | 202 new_group = WfFailureGroup.Create(master_name, builder_name, build_number) |
| 199 new_group.build_failure_type = build_failure_type | 203 new_group.build_failure_type = build_failure_type |
| 204 new_group.created_time = datetime.utcnow() | |
|
lijeffrey
2016/08/09 23:00:00
nit: you should move created time to the first cal
josiahk
2016/08/09 23:10:36
Done.
| |
| 200 new_group.blame_list = blame_list | 205 new_group.blame_list = blame_list |
| 201 new_group.suspected_tuples = suspected_tuples | 206 new_group.suspected_tuples = suspected_tuples |
| 202 new_group.output_nodes = output_nodes | 207 new_group.output_nodes = output_nodes |
| 203 new_group.failed_steps_and_tests = failed_steps_and_tests | 208 new_group.failed_steps_and_tests = failed_steps_and_tests |
| 204 new_group.put() | 209 new_group.put() |
| 205 | 210 |
| 206 | 211 |
| 207 def _GetMatchingGroup(wf_failure_groups, blame_list, suspected_tuples): | 212 def _GetMatchingGroup(wf_failure_groups, blame_list, suspected_tuples): |
| 208 for group in wf_failure_groups: | 213 for group in wf_failure_groups: |
| 209 if _BlameListsIntersection(group.blame_list, blame_list): | 214 if _BlameListsIntersection(group.blame_list, blame_list): |
| 210 if suspected_tuples == group.suspected_tuples: | 215 if suspected_tuples == group.suspected_tuples: |
| 211 return group | 216 return group |
| 212 | 217 |
| 213 return None | 218 return None |
| 214 | 219 |
| 215 | 220 |
| 216 def _GetOutputNodes(signals): | 221 def _GetOutputNodes(signals): |
| 217 if not signals or 'compile' not in signals: | 222 if not signals or 'compile' not in signals: |
| 218 return [] | 223 return [] |
| 219 | 224 |
| 220 # Compile failures with no output nodes will be considered unique. | 225 # Compile failures with no output nodes will be considered unique. |
| 221 return signals['compile'].get('failed_output_nodes', []) | 226 return signals['compile'].get('failed_output_nodes', []) |
| 222 | 227 |
| 223 | 228 |
| 229 def _GetMatchingFailureGroups(build_failure_type): | |
| 230 earliest_time = datetime.utcnow() - timedelta(hours=MATCHING_GROUPS_HOURS_AGO) | |
| 231 return WfFailureGroup.query(ndb.AND( | |
| 232 WfFailureGroup.created_time >= earliest_time, | |
| 233 WfFailureGroup.build_failure_type == build_failure_type)).fetch() | |
| 234 | |
| 235 | |
| 224 def _GetMatchingCompileFailureGroups(output_nodes): | 236 def _GetMatchingCompileFailureGroups(output_nodes): |
| 237 groups = _GetMatchingFailureGroups(failure_type.COMPILE) | |
| 225 # Output nodes should already be unique and sorted. | 238 # Output nodes should already be unique and sorted. |
| 226 return WfFailureGroup.query(ndb.AND( | 239 return [group for group in groups if group.output_nodes == output_nodes] |
| 227 WfFailureGroup.build_failure_type == failure_type.COMPILE, | |
| 228 WfFailureGroup.output_nodes == output_nodes | |
| 229 )).fetch() | |
| 230 | 240 |
| 231 | 241 |
| 232 def _GetMatchingTestFailureGroups(failed_steps_and_tests): | 242 def _GetMatchingTestFailureGroups(failed_steps_and_tests): |
| 233 return WfFailureGroup.query(ndb.AND( | 243 groups = _GetMatchingFailureGroups(failure_type.TEST) |
| 234 WfFailureGroup.build_failure_type == failure_type.TEST, | 244 return [group for group in groups |
| 235 WfFailureGroup.failed_steps_and_tests == failed_steps_and_tests | 245 if group.failed_steps_and_tests == failed_steps_and_tests] |
| 236 )).fetch() | |
| 237 | 246 |
| 238 | 247 |
| 239 def _IsBuildFailureUniqueAcrossPlatforms( | 248 def _IsBuildFailureUniqueAcrossPlatforms( |
| 240 master_name, builder_name, build_number, build_failure_type, blame_list, | 249 master_name, builder_name, build_number, build_failure_type, blame_list, |
| 241 failed_steps, signals, heuristic_result): | 250 failed_steps, signals, heuristic_result): |
| 242 output_nodes = None | 251 output_nodes = None |
| 243 failed_steps_and_tests = None | 252 failed_steps_and_tests = None |
| 244 | 253 |
| 245 if build_failure_type == failure_type.COMPILE: | 254 if build_failure_type == failure_type.COMPILE: |
| 246 output_nodes = _GetOutputNodes(signals) | 255 output_nodes = _GetOutputNodes(signals) |
| (...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 311 need_new_try_job, last_pass = _CheckFailureForTryJobKey( | 320 need_new_try_job, last_pass = _CheckFailureForTryJobKey( |
| 312 master_name, builder_name, build_number, | 321 master_name, builder_name, build_number, |
| 313 failure_result_map, TryJobType.COMPILE, failed_steps['compile']) | 322 failure_result_map, TryJobType.COMPILE, failed_steps['compile']) |
| 314 else: | 323 else: |
| 315 try_job_type = TryJobType.TEST | 324 try_job_type = TryJobType.TEST |
| 316 targeted_tests, need_new_try_job, last_pass = ( | 325 targeted_tests, need_new_try_job, last_pass = ( |
| 317 _CheckIfNeedNewTryJobForTestFailure( | 326 _CheckIfNeedNewTryJobForTestFailure( |
| 318 'step', master_name, builder_name, build_number, failure_result_map, | 327 'step', master_name, builder_name, build_number, failure_result_map, |
| 319 failed_steps)) | 328 failed_steps)) |
| 320 | 329 |
| 321 | |
| 322 need_new_try_job = ( | 330 need_new_try_job = ( |
| 323 need_new_try_job and ReviveOrCreateTryJobEntity( | 331 need_new_try_job and ReviveOrCreateTryJobEntity( |
| 324 master_name, builder_name, build_number, force_try_job)) | 332 master_name, builder_name, build_number, force_try_job)) |
| 325 | 333 |
| 326 # TODO(josiahk): Integrate _IsBuildFailureUniqueAcrossPlatforms() into | 334 # TODO(josiahk): Integrate _IsBuildFailureUniqueAcrossPlatforms() into |
| 327 # need_new_try_job boolean | 335 # need_new_try_job boolean |
| 328 if need_new_try_job: | 336 if need_new_try_job: |
| 329 _IsBuildFailureUniqueAcrossPlatforms( | 337 _IsBuildFailureUniqueAcrossPlatforms( |
| 330 master_name, builder_name, build_number, build_failure_type, | 338 master_name, builder_name, build_number, build_failure_type, |
| 331 builds[str(build_number)]['blame_list'], failed_steps, signals, | 339 builds[str(build_number)]['blame_list'], failed_steps, signals, |
| (...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 434 pipeline.pipeline_status_path, try_job_type) | 442 pipeline.pipeline_status_path, try_job_type) |
| 435 else: # pragma: no cover | 443 else: # pragma: no cover |
| 436 logging_str = ( | 444 logging_str = ( |
| 437 'Try job was scheduled for build %s, %s, %s: %s because of %s ' | 445 'Try job was scheduled for build %s, %s, %s: %s because of %s ' |
| 438 'failure.') % ( | 446 'failure.') % ( |
| 439 master_name, builder_name, build_number, | 447 master_name, builder_name, build_number, |
| 440 pipeline.pipeline_status_path, try_job_type) | 448 pipeline.pipeline_status_path, try_job_type) |
| 441 logging.info(logging_str) | 449 logging.info(logging_str) |
| 442 | 450 |
| 443 return failure_result_map | 451 return failure_result_map |
| OLD | NEW |