| OLD | NEW |
| 1 # Copyright 2015 The Chromium Authors. All rights reserved. | 1 # Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 from datetime import timedelta | 5 from datetime import timedelta |
| 6 import logging | 6 import logging |
| 7 | 7 |
| 8 from google.appengine.ext import ndb | 8 from google.appengine.ext import ndb |
| 9 | 9 |
| 10 from common import appengine_util | 10 from common import appengine_util |
| 11 from common import constants | 11 from common import constants |
| 12 from common import time_util | 12 from common import time_util |
| 13 from common.waterfall import failure_type | 13 from common.waterfall import failure_type |
| 14 from model import analysis_status | 14 from model import analysis_status |
| 15 from model.wf_analysis import WfAnalysis | 15 from model.wf_analysis import WfAnalysis |
| 16 from model.wf_build import WfBuild | 16 from model.wf_build import WfBuild |
| 17 from model.wf_failure_group import WfFailureGroup | 17 from model.wf_failure_group import WfFailureGroup |
| 18 from model.wf_try_job import WfTryJob | 18 from model.wf_try_job import WfTryJob |
| 19 from waterfall import waterfall_config | 19 from waterfall import waterfall_config |
| 20 | 20 |
| 21 # TODO(lijeffrey): Move this to config. | |
| 22 MATCHING_GROUPS_SECONDS_AGO = 24 * 60 * 60 # 24 hours. | |
| 23 | |
| 24 | 21 |
| 25 def _ShouldBailOutForOutdatedBuild(build): | 22 def _ShouldBailOutForOutdatedBuild(build): |
| 26 return (time_util.GetUTCNow() - build.start_time).days > 0 | 23 return (time_util.GetUTCNow() - build.start_time).days > 0 |
| 27 | 24 |
| 28 | 25 |
| 29 def _CurrentBuildKey(master_name, builder_name, build_number): | 26 def _CurrentBuildKey(master_name, builder_name, build_number): |
| 30 return '%s/%s/%d' % (master_name, builder_name, build_number) | 27 return '%s/%s/%d' % (master_name, builder_name, build_number) |
| 31 | 28 |
| 32 | 29 |
| 33 def _BlameListsIntersection(blame_list_1, blame_list_2): | 30 def _BlameListsIntersection(blame_list_1, blame_list_2): |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 179 def _GetOutputNodes(signals): | 176 def _GetOutputNodes(signals): |
| 180 if not signals or 'compile' not in signals: | 177 if not signals or 'compile' not in signals: |
| 181 return [] | 178 return [] |
| 182 | 179 |
| 183 # Compile failures with no output nodes will be considered unique. | 180 # Compile failures with no output nodes will be considered unique. |
| 184 return signals['compile'].get('failed_output_nodes', []) | 181 return signals['compile'].get('failed_output_nodes', []) |
| 185 | 182 |
| 186 | 183 |
| 187 def _GetMatchingFailureGroups(build_failure_type): | 184 def _GetMatchingFailureGroups(build_failure_type): |
| 188 earliest_time = time_util.GetUTCNow() - timedelta( | 185 earliest_time = time_util.GetUTCNow() - timedelta( |
| 189 seconds=MATCHING_GROUPS_SECONDS_AGO) | 186 seconds=waterfall_config.GetTryJobSettings().get( |
| 187 'max_seconds_look_back_for_group')) |
| 190 return WfFailureGroup.query(ndb.AND( | 188 return WfFailureGroup.query(ndb.AND( |
| 191 WfFailureGroup.created_time >= earliest_time, | 189 WfFailureGroup.created_time >= earliest_time, |
| 192 WfFailureGroup.build_failure_type == build_failure_type)).fetch() | 190 WfFailureGroup.build_failure_type == build_failure_type)).fetch() |
| 193 | 191 |
| 194 | 192 |
| 195 def _GetMatchingCompileFailureGroups(output_nodes): | 193 def _GetMatchingCompileFailureGroups(output_nodes): |
| 196 groups = _GetMatchingFailureGroups(failure_type.COMPILE) | 194 groups = _GetMatchingFailureGroups(failure_type.COMPILE) |
| 197 # Output nodes should already be unique and sorted. | 195 # Output nodes should already be unique and sorted. |
| 198 return [group for group in groups if group.output_nodes == output_nodes] | 196 return [group for group in groups if group.output_nodes == output_nodes] |
| 199 | 197 |
| (...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 293 return False | 291 return False |
| 294 | 292 |
| 295 | 293 |
| 296 def _NeedANewTestTryJob( | 294 def _NeedANewTestTryJob( |
| 297 master_name, builder_name, build_number, failure_info, force_try_job): | 295 master_name, builder_name, build_number, failure_info, force_try_job): |
| 298 if failure_info['failure_type'] != failure_type.TEST: | 296 if failure_info['failure_type'] != failure_type.TEST: |
| 299 return False | 297 return False |
| 300 | 298 |
| 301 if (not force_try_job and | 299 if (not force_try_job and |
| 302 waterfall_config.ShouldSkipTestTryJobs(master_name, builder_name)): | 300 waterfall_config.ShouldSkipTestTryJobs(master_name, builder_name)): |
| 303 logging.info('Test try jobs on %s, %s are not supported yet.', | 301 logging.info('Test try jobs on %s, %s are not supported yet.', |
| 304 master_name, builder_name) | 302 master_name, builder_name) |
| 305 return False | 303 return False |
| 306 | 304 |
| 307 return _CurrentBuildKeyInFailureResultMap( | 305 return _CurrentBuildKeyInFailureResultMap( |
| 308 master_name, builder_name, build_number) | 306 master_name, builder_name, build_number) |
| 309 | 307 |
| 310 | 308 |
| 311 def NeedANewTryJob( | 309 def NeedANewTryJob( |
| 312 master_name, builder_name, build_number, failure_info, signals, | 310 master_name, builder_name, build_number, failure_info, signals, |
| 313 heuristic_result, force_try_job=False): | 311 heuristic_result, force_try_job=False): |
| 314 | 312 |
| 315 tryserver_mastername, tryserver_buildername = ( | 313 tryserver_mastername, tryserver_buildername = ( |
| 316 waterfall_config.GetTrybotForWaterfallBuilder(master_name, builder_name)) | 314 waterfall_config.GetTrybotForWaterfallBuilder(master_name, builder_name)) |
| 317 | 315 |
| 318 try_job_type = failure_info['failure_type'] | 316 try_job_type = failure_info['failure_type'] |
| 319 if not tryserver_mastername or not tryserver_buildername: | 317 if not tryserver_mastername or not tryserver_buildername: |
| 320 logging.info('%s, %s is not supported yet.', master_name, builder_name) | 318 logging.info('%s, %s is not supported yet.', master_name, builder_name) |
| 321 return False | 319 return False |
| 322 | 320 |
| 323 if not force_try_job: | 321 if not force_try_job: |
| 324 build = WfBuild.Get(master_name, builder_name, build_number) | 322 build = WfBuild.Get(master_name, builder_name, build_number) |
| 325 | 323 |
| 326 if _ShouldBailOutForOutdatedBuild(build): | 324 if _ShouldBailOutForOutdatedBuild(build): |
| 327 logging.error('Build time %s is more than 24 hours old. ' | 325 logging.error('Build time %s is more than 24 hours old. ' |
| 328 'Try job will not be triggered.' % build.start_time) | 326 'Try job will not be triggered.' % build.start_time) |
| 329 return False | 327 return False |
| 330 | 328 |
| 331 need_new_try_job = (_NeedANewCompileTryJob( | 329 if try_job_type == failure_type.COMPILE: |
| 332 master_name, builder_name, build_number, failure_info) | 330 need_new_try_job = _NeedANewCompileTryJob( |
| 333 if try_job_type == failure_type.COMPILE else | 331 master_name, builder_name, build_number, failure_info) |
| 334 _NeedANewTestTryJob( | 332 else: |
| 335 master_name, builder_name, build_number, failure_info, force_try_job)) | 333 need_new_try_job = _NeedANewTestTryJob( |
| 334 master_name, builder_name, build_number, failure_info, force_try_job) |
| 336 | 335 |
| 337 if need_new_try_job: | 336 if need_new_try_job: |
| 338 # TODO(josiahk): Integrate this into need_new_try_job boolean | 337 # TODO(josiahk): Integrate this into need_new_try_job boolean |
| 339 _IsBuildFailureUniqueAcrossPlatforms( | 338 _IsBuildFailureUniqueAcrossPlatforms( |
| 340 master_name, builder_name, build_number, try_job_type, | 339 master_name, builder_name, build_number, try_job_type, |
| 341 failure_info['builds'][str(build_number)]['blame_list'], | 340 failure_info['builds'][str(build_number)]['blame_list'], |
| 342 failure_info['failed_steps'], signals, heuristic_result) | 341 failure_info['failed_steps'], signals, heuristic_result) |
| 343 | 342 |
| 344 need_new_try_job = need_new_try_job and ReviveOrCreateTryJobEntity( | 343 need_new_try_job = need_new_try_job and ReviveOrCreateTryJobEntity( |
| 345 master_name, builder_name, build_number, force_try_job) | 344 master_name, builder_name, build_number, force_try_job) |
| (...skipping 13 matching lines...) Expand all Loading... |
| 359 master_name, builder_name) | 358 master_name, builder_name) |
| 360 for source_target in signals['compile'].get('failed_targets', []): | 359 for source_target in signals['compile'].get('failed_targets', []): |
| 361 # For link failures, we pass the executable targets directly to try-job, and | 360 # For link failures, we pass the executable targets directly to try-job, and |
| 362 # there is no 'source' for link failures. | 361 # there is no 'source' for link failures. |
| 363 # For compile failures, only pass the object files as the compile targets | 362 # For compile failures, only pass the object files as the compile targets |
| 364 # for the bots that we use strict regex to extract such information. | 363 # for the bots that we use strict regex to extract such information. |
| 365 if not source_target.get('source') or strict_regex: | 364 if not source_target.get('source') or strict_regex: |
| 366 compile_targets.append(source_target.get('target')) | 365 compile_targets.append(source_target.get('target')) |
| 367 | 366 |
| 368 return compile_targets | 367 return compile_targets |
| OLD | NEW |