Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: appengine/findit/handlers/flake/check_flake.py

Issue 2563383002: [Findit] Flake Checker: Extract commit position and git hash and display to UI for each analyzed bu… (Closed)
Patch Set: Fixing nit Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | appengine/findit/handlers/flake/test/check_flake_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import logging 5 import logging
6 6
7 from google.appengine.api import users 7 from google.appengine.api import users
8 from google.appengine.ext import ndb 8 from google.appengine.ext import ndb
9 9
10 from common import auth_util 10 from common import auth_util
11 from common.base_handler import BaseHandler 11 from common.base_handler import BaseHandler
12 from common.base_handler import Permission 12 from common.base_handler import Permission
13 from libs import time_util 13 from libs import time_util
14 from model import analysis_status 14 from model import analysis_status
15 from model import triage_status 15 from model import triage_status
16 from model.flake.flake_analysis_request import FlakeAnalysisRequest 16 from model.flake.flake_analysis_request import FlakeAnalysisRequest
17 from model.flake.master_flake_analysis import MasterFlakeAnalysis 17 from model.flake.master_flake_analysis import MasterFlakeAnalysis
18 from waterfall.flake import flake_analysis_service 18 from waterfall.flake import flake_analysis_service
19 from waterfall.flake import triggering_sources 19 from waterfall.flake import triggering_sources
20 20
21 21
22 def _GetSuspectedFlakeAnalysisAndTriageResult(analysis): 22 def _FindSuspectedFlakeBuildDataPoint(analysis):
23 if analysis.suspected_flake_build_number is not None: 23 for data_point in analysis.data_points:
24 return { 24 if data_point.build_number == analysis.suspected_flake_build_number:
25 'build_number': analysis.suspected_flake_build_number, 25 return data_point
26 'triage_result': (
27 analysis.triage_history[-1].triage_result if analysis.triage_history
28 else triage_status.UNTRIAGED)
29 }
30 26
31 return {} 27 return None
28
29
30 def _GetSuspectedFlakeInfo(analysis):
31 """Returns a dict with information about the suspected flake build.
32
33 Args:
34 analysis (MasterFlakeAnalysis): The master flake analysis the suspected
35 flake build is associated with.
36
37 Returns:
38 A dict in the format:
39 {
40 'build_number': int,
41 'commit_position': int,
42 'git_hash': str,
43 'previous_build_commit_position': int,
44 'previous_build_git_hash': str,
45 'triage_result': int (correct, incorrect, etc.)
46 }
47 """
48 if analysis.suspected_flake_build_number is None:
49 return {}
50
51 data_point = _FindSuspectedFlakeBuildDataPoint(analysis)
52 assert data_point
53
54 return {
55 'build_number': analysis.suspected_flake_build_number,
56 'commit_position': data_point.commit_position,
57 'git_hash': data_point.git_hash,
58 'previous_build_commit_position': (
59 data_point.previous_build_commit_position),
60 'previous_build_git_hash': data_point.previous_build_git_hash,
61 'triage_result': (
62 analysis.triage_history[-1].triage_result if analysis.triage_history
63 else triage_status.UNTRIAGED)
64 }
65
66
67 def _GetCoordinatesData(analysis):
68 if not analysis or not analysis.data_points:
69 return []
70
71 coordinates = []
72
73 for data_point in analysis.data_points:
74 coordinates.append([
75 data_point.commit_position, data_point.pass_rate,
76 data_point.task_id, data_point.build_number, data_point.git_hash,
77 data_point.previous_build_commit_position,
78 data_point.previous_build_git_hash])
79
80 # Order by build number from earliest to latest.
81 coordinates.sort(key=lambda x: x[0])
82
83 return coordinates
32 84
33 85
34 class CheckFlake(BaseHandler): 86 class CheckFlake(BaseHandler):
35 PERMISSION_LEVEL = Permission.ANYONE 87 PERMISSION_LEVEL = Permission.ANYONE
36 88
37 def _ValidateInput(self, master_name, builder_name, build_number, step_name, 89 def _ValidateInput(self, master_name, builder_name, build_number, step_name,
38 test_name, bug_id): 90 test_name, bug_id):
39 """Ensures the input is valid and generates an error otherwise. 91 """Ensures the input is valid and generates an error otherwise.
40 92
41 Args: 93 Args:
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
142 if not analysis: # pragma: no cover 194 if not analysis: # pragma: no cover
143 logging.error('Flake analysis was deleted unexpectedly!') 195 logging.error('Flake analysis was deleted unexpectedly!')
144 return { 196 return {
145 'template': 'error.html', 197 'template': 'error.html',
146 'data': { 198 'data': {
147 'error_message': 'Flake analysis was deleted unexpectedly!', 199 'error_message': 'Flake analysis was deleted unexpectedly!',
148 }, 200 },
149 'return_code': 400 201 'return_code': 400
150 } 202 }
151 203
152 suspected_flake = _GetSuspectedFlakeAnalysisAndTriageResult(analysis) 204 suspected_flake = _GetSuspectedFlakeInfo(analysis)
153 205
154 data = { 206 data = {
155 'key': analysis.key.urlsafe(), 207 'key': analysis.key.urlsafe(),
156 'master_name': analysis.master_name, 208 'master_name': analysis.master_name,
157 'builder_name': analysis.builder_name, 209 'builder_name': analysis.builder_name,
158 'build_number': analysis.build_number, 210 'build_number': analysis.build_number,
159 'step_name': analysis.step_name, 211 'step_name': analysis.step_name,
160 'test_name': analysis.test_name, 212 'test_name': analysis.test_name,
161 'pass_rates': [], 213 'pass_rates': [],
162 'analysis_status': analysis.status_description, 214 'analysis_status': analysis.status_description,
(...skipping 12 matching lines...) Expand all
175 data['triage_history'] = analysis.GetTriageHistory() 227 data['triage_history'] = analysis.GetTriageHistory()
176 228
177 data['pending_time'] = time_util.FormatDuration( 229 data['pending_time'] = time_util.FormatDuration(
178 analysis.request_time, 230 analysis.request_time,
179 analysis.start_time or time_util.GetUTCNow()) 231 analysis.start_time or time_util.GetUTCNow())
180 if analysis.status != analysis_status.PENDING: 232 if analysis.status != analysis_status.PENDING:
181 data['duration'] = time_util.FormatDuration( 233 data['duration'] = time_util.FormatDuration(
182 analysis.start_time, 234 analysis.start_time,
183 analysis.end_time or time_util.GetUTCNow()) 235 analysis.end_time or time_util.GetUTCNow())
184 236
185 coordinates = [] 237 data['pass_rates'] = _GetCoordinatesData(analysis)
186 for data_point in analysis.data_points:
187 coordinates.append([
188 data_point.build_number, data_point.pass_rate, data_point.task_id])
189 238
190 # Order by build number from earliest to latest.
191 coordinates.sort(key=lambda x: x[0])
192
193 data['pass_rates'] = coordinates
194 return { 239 return {
195 'template': 'flake/result.html', 240 'template': 'flake/result.html',
196 'data': data 241 'data': data
197 } 242 }
OLDNEW
« no previous file with comments | « no previous file | appengine/findit/handlers/flake/test/check_flake_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698