Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import logging | 5 import logging |
| 6 | 6 |
| 7 from google.appengine.api import users | 7 from google.appengine.api import users |
| 8 from google.appengine.ext import ndb | 8 from google.appengine.ext import ndb |
| 9 | 9 |
| 10 from common import auth_util | 10 from common import auth_util |
| 11 from common.base_handler import BaseHandler | 11 from common.base_handler import BaseHandler |
| 12 from common.base_handler import Permission | 12 from common.base_handler import Permission |
| 13 from libs import time_util | 13 from libs import time_util |
| 14 from model import analysis_status | 14 from model import analysis_status |
| 15 from model import triage_status | 15 from model import triage_status |
| 16 from model.flake.flake_analysis_request import FlakeAnalysisRequest | 16 from model.flake.flake_analysis_request import FlakeAnalysisRequest |
| 17 from model.flake.master_flake_analysis import MasterFlakeAnalysis | 17 from model.flake.master_flake_analysis import MasterFlakeAnalysis |
| 18 from waterfall.flake import flake_analysis_service | 18 from waterfall.flake import flake_analysis_service |
| 19 from waterfall.flake import triggering_sources | 19 from waterfall.flake import triggering_sources |
| 20 | 20 |
| 21 | 21 |
| 22 def _GetSuspectedFlakeAnalysisAndTriageResult(analysis): | 22 SWARMING_TASK_BASE_URL = 'https://chromium-swarm.appspot.com' |
|
stgao
2016/12/14 07:58:28
You may want to pull in my change and rebase.
This
lijeffrey
2016/12/14 22:15:11
Done.
| |
| 23 if analysis.suspected_flake_build_number is not None: | |
| 24 return { | |
| 25 'build_number': analysis.suspected_flake_build_number, | |
| 26 'triage_result': ( | |
| 27 analysis.triage_history[-1].triage_result if analysis.triage_history | |
| 28 else triage_status.UNTRIAGED) | |
| 29 } | |
| 30 | 23 |
| 31 return {} | 24 |
| 25 def _FindSuspectedFlakeBuildDataPoint(analysis): | |
| 26 for data_point in analysis.data_points: | |
| 27 if data_point.build_number == analysis.suspected_flake_build_number: | |
| 28 return data_point | |
| 29 | |
| 30 return None # pragma: no cover. This should not happen. | |
|
chanli
2016/12/14 03:41:05
Would you mind add a test for this? Should be very
lijeffrey
2016/12/14 22:15:11
Done.
| |
| 31 | |
| 32 | |
| 33 def _GetSuspectedFlakeInfo(analysis): | |
| 34 """Returns a dict with information about the suspected flake build. | |
| 35 | |
| 36 Args: | |
| 37 analysis (MasterFlakeAnalysis): The master flake analysis the suspected | |
| 38 flake build is associated with. | |
| 39 | |
| 40 Returns: | |
| 41 A dict in the format: | |
| 42 { | |
| 43 'build_number': int, | |
| 44 'commit_position': int, | |
| 45 'git_hash': str, | |
| 46 'previous_commit_position': int, | |
| 47 'previous_git_hash': str, | |
| 48 'triage_result': int (correct, incorrect, etc.) | |
| 49 } | |
| 50 """ | |
| 51 if analysis.suspected_flake_build_number is None: | |
| 52 return {} | |
| 53 | |
| 54 data_point = _FindSuspectedFlakeBuildDataPoint(analysis) | |
| 55 assert data_point | |
| 56 | |
| 57 return { | |
| 58 'build_number': analysis.suspected_flake_build_number, | |
| 59 'commit_position': data_point.commit_position, | |
| 60 'git_hash': data_point.git_hash, | |
| 61 'previous_commit_position': data_point.previous_commit_position, | |
| 62 'previous_git_hash': data_point.previous_git_hash, | |
| 63 'triage_result': ( | |
| 64 analysis.triage_history[-1].triage_result if analysis.triage_history | |
| 65 else triage_status.UNTRIAGED) | |
| 66 } | |
| 67 | |
| 68 | |
| 69 def _GetCoordinatesData(analysis): | |
| 70 if not analysis or not analysis.data_points: | |
| 71 return [] | |
| 72 | |
| 73 coordinates = [] | |
| 74 | |
| 75 for data_point in analysis.data_points: | |
| 76 coordinates.append([ | |
| 77 data_point.build_number, data_point.pass_rate, data_point.task_id, | |
| 78 data_point.previous_commit_position, data_point.commit_position, | |
| 79 data_point.previous_git_hash, data_point.git_hash]) | |
| 80 | |
| 81 # Order by build number from earliest to latest. | |
| 82 coordinates.sort(key=lambda x: x[0]) | |
| 83 | |
| 84 return coordinates | |
| 32 | 85 |
| 33 | 86 |
| 34 class CheckFlake(BaseHandler): | 87 class CheckFlake(BaseHandler): |
| 35 PERMISSION_LEVEL = Permission.ANYONE | 88 PERMISSION_LEVEL = Permission.ANYONE |
| 36 | 89 |
| 37 def _ValidateInput(self, master_name, builder_name, build_number, step_name, | 90 def _ValidateInput(self, master_name, builder_name, build_number, step_name, |
| 38 test_name, bug_id): | 91 test_name, bug_id): |
| 39 """Ensures the input is valid and generates an error otherwise. | 92 """Ensures the input is valid and generates an error otherwise. |
| 40 | 93 |
| 41 Args: | 94 Args: |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 142 if not analysis: # pragma: no cover | 195 if not analysis: # pragma: no cover |
| 143 logging.error('Flake analysis was deleted unexpectedly!') | 196 logging.error('Flake analysis was deleted unexpectedly!') |
| 144 return { | 197 return { |
| 145 'template': 'error.html', | 198 'template': 'error.html', |
| 146 'data': { | 199 'data': { |
| 147 'error_message': 'Flake analysis was deleted unexpectedly!', | 200 'error_message': 'Flake analysis was deleted unexpectedly!', |
| 148 }, | 201 }, |
| 149 'return_code': 400 | 202 'return_code': 400 |
| 150 } | 203 } |
| 151 | 204 |
| 152 suspected_flake = _GetSuspectedFlakeAnalysisAndTriageResult(analysis) | 205 suspected_flake = _GetSuspectedFlakeInfo(analysis) |
| 153 | 206 |
| 154 data = { | 207 data = { |
| 155 'key': analysis.key.urlsafe(), | 208 'key': analysis.key.urlsafe(), |
| 156 'master_name': analysis.master_name, | 209 'master_name': analysis.master_name, |
| 157 'builder_name': analysis.builder_name, | 210 'builder_name': analysis.builder_name, |
| 158 'build_number': analysis.build_number, | 211 'build_number': analysis.build_number, |
| 159 'step_name': analysis.step_name, | 212 'step_name': analysis.step_name, |
| 160 'test_name': analysis.test_name, | 213 'test_name': analysis.test_name, |
| 161 'pass_rates': [], | 214 'pass_rates': [], |
| 162 'analysis_status': analysis.status_description, | 215 'analysis_status': analysis.status_description, |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 175 data['triage_history'] = analysis.GetTriageHistory() | 228 data['triage_history'] = analysis.GetTriageHistory() |
| 176 | 229 |
| 177 data['pending_time'] = time_util.FormatDuration( | 230 data['pending_time'] = time_util.FormatDuration( |
| 178 analysis.request_time, | 231 analysis.request_time, |
| 179 analysis.start_time or time_util.GetUTCNow()) | 232 analysis.start_time or time_util.GetUTCNow()) |
| 180 if analysis.status != analysis_status.PENDING: | 233 if analysis.status != analysis_status.PENDING: |
| 181 data['duration'] = time_util.FormatDuration( | 234 data['duration'] = time_util.FormatDuration( |
| 182 analysis.start_time, | 235 analysis.start_time, |
| 183 analysis.end_time or time_util.GetUTCNow()) | 236 analysis.end_time or time_util.GetUTCNow()) |
| 184 | 237 |
| 185 coordinates = [] | 238 data['pass_rates'] = _GetCoordinatesData(analysis) |
| 186 for data_point in analysis.data_points: | |
| 187 coordinates.append([ | |
| 188 data_point.build_number, data_point.pass_rate, data_point.task_id]) | |
| 189 | 239 |
| 190 # Order by build number from earliest to latest. | |
| 191 coordinates.sort(key=lambda x: x[0]) | |
| 192 | |
| 193 data['pass_rates'] = coordinates | |
| 194 return { | 240 return { |
| 195 'template': 'flake/result.html', | 241 'template': 'flake/result.html', |
| 196 'data': data | 242 'data': data |
| 197 } | 243 } |
| OLD | NEW |