Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1318)

Unified Diff: appengine/findit/waterfall/flake/initialize_flake_pipeline.py

Issue 2369333002: [Findit] Capture versionized metadata for master_flake_analysis (Closed)
Patch Set: Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: appengine/findit/waterfall/flake/initialize_flake_pipeline.py
diff --git a/appengine/findit/waterfall/flake/initialize_flake_pipeline.py b/appengine/findit/waterfall/flake/initialize_flake_pipeline.py
index 3c5d4351d8f56d4a82f394fe4e574a1e1949ebdb..dc001c8a27e71a636b90884bc25ae6530a6c07db 100644
--- a/appengine/findit/waterfall/flake/initialize_flake_pipeline.py
+++ b/appengine/findit/waterfall/flake/initialize_flake_pipeline.py
@@ -2,8 +2,6 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-from google.appengine.ext import ndb
-
from common import appengine_util
from common import constants
from model import analysis_status
@@ -12,7 +10,6 @@ from waterfall import waterfall_config
from waterfall.flake.recursive_flake_pipeline import RecursiveFlakePipeline
-@ndb.transactional
def NeedANewAnalysis(
chanli 2016/09/27 22:15:24 I think we still need to make it transactional?
lijeffrey 2016/09/28 03:12:31 The Save() call already happens in a transaction,
master_name, builder_name, build_number, step_name, test_name,
allow_new_analysis=False):
@@ -20,12 +17,13 @@ def NeedANewAnalysis(
A MasterFlakeAnalysis entity for the given parameters will be created if none
exists. When a new analysis is needed, this function will create and
- save a MasterFlakeAnalysis entity to the datastore.
+ save a MasterFlakeAnalysis entity to the datastore. TODO(lijeffrey): add
+ support for a force flag to rerun this analysis.
chanli 2016/09/27 22:15:24 Nit: Maybe move this TODO to a separate line?
lijeffrey 2016/09/28 03:12:31 Done.
Returns:
True if an analysis is needed, otherwise False.
"""
- master_flake_analysis = MasterFlakeAnalysis.Get(
+ master_flake_analysis = MasterFlakeAnalysis.GetVersion(
master_name, builder_name, build_number, step_name, test_name)
if not master_flake_analysis:
@@ -33,23 +31,30 @@ def NeedANewAnalysis(
return False
master_flake_analysis = MasterFlakeAnalysis.Create(
master_name, builder_name, build_number, step_name, test_name)
- master_flake_analysis.status = analysis_status.PENDING
- master_flake_analysis.put()
- return True
+ master_flake_analysis.Reset()
stgao 2016/09/28 00:03:24 Why do we need reset here?
lijeffrey 2016/09/28 03:12:31 We need to clear the fields, especially swarming_r
stgao 2016/09/30 21:07:31 But it is newly created, there is no leftover stat
lijeffrey 2016/10/01 01:28:04 Ah yes you are correct, I was looking at the wrong
+ _, saved = master_flake_analysis.Save()
+ if saved:
stgao 2016/09/28 00:03:24 Can we just return saved directly?
lijeffrey 2016/09/28 03:12:31 Done.
+ return True
+ else: # pragma: no cover.
+ # Another transaction has just triggered tihs analysis. TODO(lijeffrey):
+ # Indicate to the user to wait for the other analysis' results amd remove
+ # the no cover pragma.
chanli 2016/09/27 22:15:24 Same here
lijeffrey 2016/09/28 03:12:31 Done.
+ return False
elif (master_flake_analysis.status == analysis_status.COMPLETED or
master_flake_analysis.status == analysis_status.PENDING or
master_flake_analysis.status == analysis_status.RUNNING):
return False
else:
- # TODO(caiw): Reset method.
- MasterFlakeAnalysis.Get(
- master_name, builder_name, build_number,
- step_name, test_name).key.delete()
- master_flake_analysis = MasterFlakeAnalysis.Create(
- master_name, builder_name, build_number, step_name, test_name)
- master_flake_analysis.status = analysis_status.PENDING
- master_flake_analysis.put()
- return True
+ # The previous analysis had some error, so reset it and create a new one.
+ master_flake_analysis.Reset()
+ _, saved = master_flake_analysis.Save()
+ if saved: # pragma: no branch
stgao 2016/09/28 00:03:24 same here.
lijeffrey 2016/10/01 01:28:04 Done.
+ return True
+ else: # pragma: no cover.
+ # Another transaction has just triggered tihs analysis. TODO(lijeffrey):
+ # Indicate to the user to wait for the other analysis' results amd remove
+ # the no cover pragma.
+ return False
# Unused arguments - pylint: disable=W0612, W0613
@@ -78,7 +83,16 @@ def ScheduleAnalysisIfNeeded(master_name, builder_name, build_number, step_name,
if NeedANewAnalysis(
master_name, builder_name, build_number, step_name, test_name,
allow_new_analysis):
+ master_flake_analysis = MasterFlakeAnalysis.GetVersion(
+ master_name, builder_name, build_number, step_name, test_name)
check_flake_settings = waterfall_config.GetCheckFlakeSettings()
+
+ # TODO(lijeffrey): Allow for reruns with custom parameters if the user is
+ # not satisfied with with the results generated by the default ones provided
stgao 2016/09/28 00:03:24 typo: with with
lijeffrey 2016/09/28 03:12:31 Done.
+ # by waterfall_config and record the custom ones here.
+ master_flake_analysis.algorithm_parameters = check_flake_settings
+ master_flake_analysis.put()
+
max_build_numbers_to_look_back = check_flake_settings.get(
'max_build_numbers_to_look_back')
flakiness_algorithm_results_dict = {
@@ -93,6 +107,7 @@ def ScheduleAnalysisIfNeeded(master_name, builder_name, build_number, step_name,
'lower_boundary_result': None,
'sequential_run_index': 0
}
+
pipeline_job = RecursiveFlakePipeline(
master_name, builder_name, build_number, step_name, test_name,
stgao 2016/09/28 00:03:24 Should we pass the version of the master_flake_ana
lijeffrey 2016/09/28 03:12:31 I think this is a good idea. Originally the design
stgao 2016/09/30 21:07:31 I'm against running two concurrent analysis for th
lijeffrey 2016/10/01 01:28:04 NeedANewAnalysis as is does not support running 2
master_build_number=build_number,
@@ -100,5 +115,5 @@ def ScheduleAnalysisIfNeeded(master_name, builder_name, build_number, step_name,
pipeline_job.target = appengine_util.GetTargetNameForModule(
constants.WATERFALL_BACKEND)
pipeline_job.start(queue_name=queue_name)
- return MasterFlakeAnalysis.Get(
+ return MasterFlakeAnalysis.GetVersion(
master_name, builder_name, build_number, step_name, test_name)

Powered by Google App Engine
This is Rietveld 408576698