Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(982)

Side by Side Diff: appengine/findit/waterfall/flake/recursive_flake_pipeline.py

Issue 2630433002: Findit] Flake Checker: Pipeline to trigger try jobs to identify flake culprits (Closed)
Patch Set: Addressing comments Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 from datetime import timedelta 5 from datetime import timedelta
6 import logging 6 import logging
7 import random 7 import random
8 import textwrap 8 import textwrap
9 9
10 from common import appengine_util 10 from common import appengine_util
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 labels = ['AnalyzedByFindit'] 48 labels = ['AnalyzedByFindit']
49 comment_pipeline = PostCommentToBugPipeline(analysis.bug_id, comment, labels) 49 comment_pipeline = PostCommentToBugPipeline(analysis.bug_id, comment, labels)
50 comment_pipeline.target = appengine_util.GetTargetNameForModule( 50 comment_pipeline.target = appengine_util.GetTargetNameForModule(
51 constants.WATERFALL_BACKEND) 51 constants.WATERFALL_BACKEND)
52 comment_pipeline.start(queue_name=queue_name) 52 comment_pipeline.start(queue_name=queue_name)
53 return True 53 return True
54 54
55 55
56 def _UpdateAnalysisStatusUponCompletion( 56 def _UpdateAnalysisStatusUponCompletion(
57 master_flake_analysis, suspected_build, status, error): 57 master_flake_analysis, suspected_build, status, error):
58 master_flake_analysis.end_time = time_util.GetUTCNow()
59 master_flake_analysis.status = status
60
61 if error: 58 if error:
62 master_flake_analysis.error = error 59 master_flake_analysis.error = error
63 60
64 if suspected_build != -1: 61 if suspected_build == _NO_BUILD_NUMBER:
62 master_flake_analysis.end_time = time_util.GetUTCNow()
63 master_flake_analysis.status = status
64 master_flake_analysis.result_status = result_status.NOT_FOUND_UNTRIAGED
65 else:
65 master_flake_analysis.suspected_flake_build_number = suspected_build 66 master_flake_analysis.suspected_flake_build_number = suspected_build
66 master_flake_analysis.result_status = result_status.FOUND_UNTRIAGED
67 else:
68 master_flake_analysis.result_status = result_status.NOT_FOUND_UNTRIAGED
69 67
70 master_flake_analysis.put() 68 master_flake_analysis.put()
71 69
72 70
73 def _GetETAToStartAnalysis(manually_triggered): 71 def _GetETAToStartAnalysis(manually_triggered):
74 """Returns an ETA as of a UTC datetime.datetime to start the analysis. 72 """Returns an ETA as of a UTC datetime.datetime to start the analysis.
75 73
76 If not urgent, Swarming tasks should be run off PST peak hours from 11am to 74 If not urgent, Swarming tasks should be run off PST peak hours from 11am to
77 6pm on workdays. 75 6pm on workdays.
78 76
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after
310 308
311 Args: 309 Args:
312 data_points (list): A list of data points of already-completed tasks 310 data_points (list): A list of data points of already-completed tasks
313 for this analysis. Data_points are sorted by build_numbers in descending 311 for this analysis. Data_points are sorted by build_numbers in descending
314 order. 312 order.
315 flake_settings (dict): A dict of parameters for algorithms. 313 flake_settings (dict): A dict of parameters for algorithms.
316 314
317 Returns: 315 Returns:
318 (next_build_number, suspected_build): The next build number to check 316 (next_build_number, suspected_build): The next build number to check
319 and suspected build number that the flakiness was introduced in. 317 and suspected build number that the flakiness was introduced in.
320 If needs to check next_build_number, suspected_build will be -1; 318 If needs to check next_build_number, suspected_build will be
321 If finds the suspected_build, next_build_number will be -1; 319 _NO_BUILD_NUMBER; If suspected_build is found, next_build_number will be
322 If no findings evertually, both will be -1. 320 _NO_BUILD_NUMBER; If no findings eventually, both will be
321 _NO_BUILD_NUMBER.
323 """ 322 """
324 # A description of this algorithm can be found at: 323 # A description of this algorithm can be found at:
325 # https://docs.google.com/document/d/1wPYFZ5OT998Yn7O8wGDOhgfcQ98mknoX13AesJaS 6ig/edit 324 # https://docs.google.com/document/d/1wPYFZ5OT998Yn7O8wGDOhgfcQ98mknoX13AesJaS 6ig/edit
326 # Get the last result. 325 # Get the last result.
327 lower_flake_threshold = flake_settings.get('lower_flake_threshold') 326 lower_flake_threshold = flake_settings.get('lower_flake_threshold')
328 upper_flake_threshold = flake_settings.get('upper_flake_threshold') 327 upper_flake_threshold = flake_settings.get('upper_flake_threshold')
329 max_stable_in_a_row = flake_settings.get('max_stable_in_a_row') 328 max_stable_in_a_row = flake_settings.get('max_stable_in_a_row')
330 max_flake_in_a_row = flake_settings.get('max_flake_in_a_row') 329 max_flake_in_a_row = flake_settings.get('max_flake_in_a_row')
331 max_dive_in_a_row = flake_settings.get('max_dive_in_a_row') 330 max_dive_in_a_row = flake_settings.get('max_dive_in_a_row')
332 dive_rate_threshold = flake_settings.get('dive_rate_threshold') 331 dive_rate_threshold = flake_settings.get('dive_rate_threshold')
(...skipping 155 matching lines...) Expand 10 before | Expand all | Expand 10 after
488 version_number, triggering_build_number, 487 version_number, triggering_build_number,
489 manually_triggered=manually_triggered, 488 manually_triggered=manually_triggered,
490 use_nearby_neighbor=use_nearby_neighbor, 489 use_nearby_neighbor=use_nearby_neighbor,
491 step_size=(current_build_number - next_build_number)) 490 step_size=(current_build_number - next_build_number))
492 # Disable attribute 'target' defined outside __init__ pylint warning, 491 # Disable attribute 'target' defined outside __init__ pylint warning,
493 # because pipeline generates its own __init__ based on run function. 492 # because pipeline generates its own __init__ based on run function.
494 pipeline_job.target = ( # pylint: disable=W0201 493 pipeline_job.target = ( # pylint: disable=W0201
495 appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND)) 494 appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND))
496 pipeline_job.StartOffPSTPeakHours( 495 pipeline_job.StartOffPSTPeakHours(
497 queue_name=self.queue_name or constants.DEFAULT_QUEUE) 496 queue_name=self.queue_name or constants.DEFAULT_QUEUE)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698