Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. | 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 import json | |
| 5 import logging | 6 import logging |
| 6 | 7 |
| 7 from google.appengine.api import users | 8 from google.appengine.api import users |
| 8 from google.appengine.ext import ndb | 9 from google.appengine.ext import ndb |
| 9 | 10 |
| 10 from common import auth_util | 11 from common import auth_util |
| 11 from common.base_handler import BaseHandler | 12 from common.base_handler import BaseHandler |
| 12 from common.base_handler import Permission | 13 from common.base_handler import Permission |
| 13 from libs import time_util | 14 from libs import time_util |
| 15 from libs.gitiles import commit_util | |
| 14 from model import analysis_status | 16 from model import analysis_status |
| 15 from model import triage_status | 17 from model import triage_status |
| 16 from model.flake.flake_analysis_request import FlakeAnalysisRequest | 18 from model.flake.flake_analysis_request import FlakeAnalysisRequest |
| 17 from model.flake.master_flake_analysis import MasterFlakeAnalysis | 19 from model.flake.master_flake_analysis import MasterFlakeAnalysis |
| 20 from waterfall import buildbot | |
| 21 from waterfall import build_util | |
| 18 from waterfall.flake import flake_analysis_service | 22 from waterfall.flake import flake_analysis_service |
| 19 from waterfall.flake import triggering_sources | 23 from waterfall.flake import triggering_sources |
| 20 | 24 |
| 21 | 25 |
| 22 SWARMING_TASK_BASE_URL = 'https://chromium-swarm.appspot.com' | 26 SWARMING_TASK_BASE_URL = 'https://chromium-swarm.appspot.com' |
| 23 | 27 |
| 24 | 28 |
| 25 def _GetSuspectedFlakeAnalysisAndTriageResult(analysis): | 29 def _GetCommitPositionAndGitHash(master_name, builder_name, build_number): |
| 30 """Gets the commit position and git hash of a build. | |
| 31 | |
| 32 Args: | |
| 33 master_name (str): The name of the master. | |
| 34 builder_name (str): The name of the builder. | |
| 35 build_number (int): The build number. | |
| 36 | |
| 37 Returns: | |
| 38 commit_position (int), git_hash (str): The git commit position corresponding | |
| 39 to the last commit in the build, and the git hash itself. | |
| 40 """ | |
| 41 build = build_util.DownloadBuildData(master_name, builder_name, build_number) | |
|
stgao
2016/12/12 20:05:18
This seems not good. What if there are 100s of dat
lijeffrey
2016/12/14 02:37:13
Done.
| |
| 42 | |
| 43 if not build.data: | |
| 44 return None, None | |
| 45 | |
| 46 build_data = json.loads(build.data) | |
| 47 git_hash = buildbot.GetBuildProperty( | |
| 48 build_data.get('properties', []), 'got_revision') | |
| 49 commit_position = None | |
| 50 changes = build_data.get('sourceStamp', {}).get('changes', []) | |
| 51 | |
| 52 if changes: # pragma: no branch | |
| 53 last_commit_message = changes[-1].get('comments') | |
| 54 commit_position, _ = commit_util.ExtractCommitPositionAndCodeReviewUrl( | |
| 55 last_commit_message) | |
| 56 | |
| 57 return commit_position, git_hash | |
| 58 | |
| 59 | |
| 60 def _GetSuspectedFlakeInfo(analysis): | |
| 61 """Returns a dict with information about the suspected flake build. | |
| 62 | |
| 63 Args: | |
| 64 analysis (MasterFlakeAnalysis): The master flake analysis the suspected | |
| 65 flake build is associated with. | |
| 66 | |
| 67 Returns: | |
| 68 A dict in the format: | |
| 69 { | |
| 70 'build_number': int, | |
| 71 'triage_result': int (correct, incorrect, etc.) | |
| 72 'commit_position': The human-readable commit position associated with | |
| 73 the suspected flaky build number. | |
| 74 'git_hash': The git hash associated with the suspected flaky build. | |
| 75 } | |
| 76 """ | |
| 77 | |
| 26 if analysis.suspected_flake_build_number is not None: | 78 if analysis.suspected_flake_build_number is not None: |
| 79 commit_position, git_hash = _GetCommitPositionAndGitHash( | |
| 80 analysis.master_name, analysis.builder_name, analysis.build_number) | |
| 81 | |
| 27 return { | 82 return { |
| 28 'build_number': analysis.suspected_flake_build_number, | 83 'build_number': analysis.suspected_flake_build_number, |
| 84 'commit_position': commit_position, | |
| 85 'git_hash': git_hash, | |
| 29 'triage_result': ( | 86 'triage_result': ( |
| 30 analysis.triage_history[-1].triage_result if analysis.triage_history | 87 analysis.triage_history[-1].triage_result if analysis.triage_history |
| 31 else triage_status.UNTRIAGED) | 88 else triage_status.UNTRIAGED) |
| 32 } | 89 } |
| 33 | 90 |
| 34 return {} | 91 return {} |
| 35 | 92 |
| 36 | 93 |
| 94 def _GetCoordinatesData(analysis): | |
| 95 if not analysis or not analysis.data_points: | |
| 96 return [] | |
| 97 | |
| 98 coordinates = [] | |
| 99 | |
| 100 for data_point in analysis.data_points: | |
| 101 # Get commit position and git hash | |
| 102 commit_position, git_hash = _GetCommitPositionAndGitHash( | |
| 103 analysis.master_name, analysis.builder_name, data_point.build_number) | |
| 104 task_url = ('%s/task?id=%s' % ( | |
| 105 SWARMING_TASK_BASE_URL, data_point.task_id) if data_point.task_id | |
| 106 else None) | |
|
chanli
2016/12/12 14:19:05
Nit: indent
lijeffrey
2016/12/14 02:37:13
Done.
| |
| 107 coordinates.append([ | |
| 108 data_point.build_number, data_point.pass_rate, task_url, | |
| 109 commit_position, git_hash]) | |
| 110 | |
| 111 # Order by build number from earliest to latest. | |
| 112 coordinates.sort(key=lambda x: x[0]) | |
| 113 | |
| 114 return coordinates | |
| 115 | |
| 116 | |
| 37 class CheckFlake(BaseHandler): | 117 class CheckFlake(BaseHandler): |
| 38 PERMISSION_LEVEL = Permission.ANYONE | 118 PERMISSION_LEVEL = Permission.ANYONE |
| 39 | 119 |
| 40 def _ValidateInput(self, master_name, builder_name, build_number, step_name, | 120 def _ValidateInput(self, master_name, builder_name, build_number, step_name, |
| 41 test_name, bug_id): | 121 test_name, bug_id): |
| 42 """Ensures the input is valid and generates an error otherwise. | 122 """Ensures the input is valid and generates an error otherwise. |
| 43 | 123 |
| 44 Args: | 124 Args: |
| 45 master_name (str): The name of the master the flaky test was found on. | 125 master_name (str): The name of the master the flaky test was found on. |
| 46 builder_name (str): The name of the builder the flaky test was found on. | 126 builder_name (str): The name of the builder the flaky test was found on. |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 145 if not analysis: # pragma: no cover | 225 if not analysis: # pragma: no cover |
| 146 logging.error('Flake analysis was deleted unexpectedly!') | 226 logging.error('Flake analysis was deleted unexpectedly!') |
| 147 return { | 227 return { |
| 148 'template': 'error.html', | 228 'template': 'error.html', |
| 149 'data': { | 229 'data': { |
| 150 'error_message': 'Flake analysis was deleted unexpectedly!', | 230 'error_message': 'Flake analysis was deleted unexpectedly!', |
| 151 }, | 231 }, |
| 152 'return_code': 400 | 232 'return_code': 400 |
| 153 } | 233 } |
| 154 | 234 |
| 155 suspected_flake = _GetSuspectedFlakeAnalysisAndTriageResult(analysis) | 235 suspected_flake = _GetSuspectedFlakeInfo(analysis) |
| 156 | 236 |
| 157 data = { | 237 data = { |
| 158 'key': analysis.key.urlsafe(), | 238 'key': analysis.key.urlsafe(), |
| 159 'master_name': analysis.master_name, | 239 'master_name': analysis.master_name, |
| 160 'builder_name': analysis.builder_name, | 240 'builder_name': analysis.builder_name, |
| 161 'build_number': analysis.build_number, | 241 'build_number': analysis.build_number, |
| 162 'step_name': analysis.step_name, | 242 'step_name': analysis.step_name, |
| 163 'test_name': analysis.test_name, | 243 'test_name': analysis.test_name, |
| 164 'pass_rates': [], | 244 'pass_rates': [], |
| 165 'analysis_status': analysis.status_description, | 245 'analysis_status': analysis.status_description, |
| (...skipping 12 matching lines...) Expand all Loading... | |
| 178 data['triage_history'] = analysis.GetTriageHistory() | 258 data['triage_history'] = analysis.GetTriageHistory() |
| 179 | 259 |
| 180 data['pending_time'] = time_util.FormatDuration( | 260 data['pending_time'] = time_util.FormatDuration( |
| 181 analysis.request_time, | 261 analysis.request_time, |
| 182 analysis.start_time or time_util.GetUTCNow()) | 262 analysis.start_time or time_util.GetUTCNow()) |
| 183 if analysis.status != analysis_status.PENDING: | 263 if analysis.status != analysis_status.PENDING: |
| 184 data['duration'] = time_util.FormatDuration( | 264 data['duration'] = time_util.FormatDuration( |
| 185 analysis.start_time, | 265 analysis.start_time, |
| 186 analysis.end_time or time_util.GetUTCNow()) | 266 analysis.end_time or time_util.GetUTCNow()) |
| 187 | 267 |
| 188 coordinates = [] | 268 data['pass_rates'] = _GetCoordinatesData(analysis) |
| 189 for data_point in analysis.data_points: | |
| 190 if data_point.task_id: | |
| 191 task_url = '%s/task?id=%s' % ( | |
| 192 SWARMING_TASK_BASE_URL, data_point.task_id) | |
| 193 coordinates.append([ | |
| 194 data_point.build_number, data_point.pass_rate, task_url]) | |
| 195 else: | |
| 196 coordinates.append([ | |
| 197 data_point.build_number, data_point.pass_rate, None]) | |
| 198 | 269 |
| 199 # Order by build number from earliest to latest. | |
| 200 coordinates.sort(key=lambda x: x[0]) | |
| 201 | |
| 202 data['pass_rates'] = coordinates | |
| 203 return { | 270 return { |
| 204 'template': 'flake/result.html', | 271 'template': 'flake/result.html', |
| 205 'data': data | 272 'data': data |
| 206 } | 273 } |
| OLD | NEW |