Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(291)

Side by Side Diff: appengine/findit/handlers/flake/test/check_flake_test.py

Issue 2563383002: [Findit] Flake Checker: Extract commit position and git hash and display to UI for each analyzed bu… (Closed)
Patch Set: Addressing comments + rebase Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import mock 6 import mock
7 import re 7 import re
8 8
9 import webapp2 9 import webapp2
10 import webtest 10 import webtest
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
64 self.test_app.get, 64 self.test_app.get,
65 '/waterfall/flake', 65 '/waterfall/flake',
66 params={ 66 params={
67 'master_name': master_name, 67 'master_name': master_name,
68 'builder_name': builder_name, 68 'builder_name': builder_name,
69 'build_number': build_number, 69 'build_number': build_number,
70 'step_name': step_name, 70 'step_name': step_name,
71 'test_name': test_name 71 'test_name': test_name
72 }) 72 })
73 73
74 def testAnyoneCanViewScheduledAnalysis(self): 74 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
75 return_value={
76 'build_number': 100,
77 'commit_position': 12345,
78 'git_hash': 'a_git_hash',
79 'triage_result': 0})
80 @mock.patch.object(check_flake, '_GetCoordinatesData',
81 return_value=[[100, 0.9, '1', 12345, 'a_git_hash']])
82 def testAnyoneCanViewScheduledAnalysis(self, *_):
75 master_name = 'm' 83 master_name = 'm'
76 builder_name = 'b' 84 builder_name = 'b'
77 build_number = '123' 85 build_number = '123'
78 step_name = 's' 86 step_name = 's'
79 test_name = 't' 87 test_name = 't'
80 success_rate = .9 88 success_rate = .9
81 89
82 analysis = MasterFlakeAnalysis.Create( 90 analysis = MasterFlakeAnalysis.Create(
83 master_name, builder_name, build_number, step_name, test_name) 91 master_name, builder_name, build_number, step_name, test_name)
84 data_point = DataPoint() 92 data_point = DataPoint()
85 data_point.build_number = int(build_number) 93 data_point.build_number = int(build_number)
86 data_point.pass_rate = success_rate 94 data_point.pass_rate = success_rate
87 data_point.task_id = '1' 95 data_point.task_id = '1'
88 analysis.data_points.append(data_point) 96 analysis.data_points.append(data_point)
89 analysis.status = analysis_status.COMPLETED 97 analysis.status = analysis_status.COMPLETED
90 analysis.suspected_flake_build_number = 100 98 analysis.suspected_flake_build_number = 100
91 analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00) 99 analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00)
92 analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05) 100 analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05)
93 analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00) 101 analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00)
94 analysis.algorithm_parameters = {'iterations_to_rerun': 100} 102 analysis.algorithm_parameters = {'iterations_to_rerun': 100}
95 analysis.Save() 103 analysis.Save()
96 104
97 response = self.test_app.get('/waterfall/flake', params={ 105 response = self.test_app.get('/waterfall/flake', params={
98 'key': analysis.key.urlsafe(), 106 'key': analysis.key.urlsafe(),
99 'format': 'json'}) 107 'format': 'json'})
100 108
101 expected_check_flake_result = { 109 expected_check_flake_result = {
102 'key': analysis.key.urlsafe(), 110 'key': analysis.key.urlsafe(),
103 'pass_rates': [[int(build_number), success_rate, data_point.task_id]], 111 'pass_rates': [[100, 0.9, data_point.task_id, 12345, 'a_git_hash']],
104 'analysis_status': STATUS_TO_DESCRIPTION.get(analysis.status), 112 'analysis_status': STATUS_TO_DESCRIPTION.get(analysis.status),
105 'master_name': master_name, 113 'master_name': master_name,
106 'builder_name': builder_name, 114 'builder_name': builder_name,
107 'build_number': int(build_number), 115 'build_number': int(build_number),
108 'step_name': step_name, 116 'step_name': step_name,
109 'test_name': test_name, 117 'test_name': test_name,
110 'request_time': '2016-10-01 12:10:00 UTC', 118 'request_time': '2016-10-01 12:10:00 UTC',
111 'task_number': 1, 119 'task_number': 1,
112 'error': None, 120 'error': None,
113 'iterations_to_rerun': 100, 121 'iterations_to_rerun': 100,
114 'pending_time': '00:00:05', 122 'pending_time': '00:00:05',
115 'duration': '00:59:55', 123 'duration': '00:59:55',
116 'suspected_flake': { 124 'suspected_flake': {
117 'build_number': 100, 125 'build_number': 100,
126 'commit_position': 12345,
127 'git_hash': 'a_git_hash',
118 'triage_result': 0 128 'triage_result': 0
119 }, 129 },
120 'version_number': 1, 130 'version_number': 1,
121 'show_debug_info': False 131 'show_debug_info': False
122 } 132 }
123 133
124 self.assertEquals(200, response.status_int) 134 self.assertEquals(200, response.status_int)
125 self.assertEqual(expected_check_flake_result, response.json_body) 135 self.assertEqual(expected_check_flake_result, response.json_body)
126 136
127 def testUnauthorizedUserCannotScheduleNewAnalysis(self): 137 def testUnauthorizedUserCannotScheduleNewAnalysis(self):
(...skipping 11 matching lines...) Expand all
139 params={ 149 params={
140 'master_name': master_name, 150 'master_name': master_name,
141 'builder_name': builder_name, 151 'builder_name': builder_name,
142 'build_number': build_number, 152 'build_number': build_number,
143 'step_name': step_name, 153 'step_name': step_name,
144 'test_name': test_name, 154 'test_name': test_name,
145 'format': 'json'}) 155 'format': 'json'})
146 156
147 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake', 157 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake',
148 return_value=False) 158 return_value=False)
149 def testRequestExistingAnalysis(self, _): 159 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
160 return_value={
161 'build_number': 100,
162 'commit_position': 12345,
163 'git_hash': 'a_git_hash',
164 'triage_result': 0})
165 @mock.patch.object(check_flake, '_GetCoordinatesData',
166 return_value=[[100, 0.9, 'url', 12345, 'a_git_hash']])
167 def testRequestExistingAnalysis(self, *_):
150 master_name = 'm' 168 master_name = 'm'
151 builder_name = 'b' 169 builder_name = 'b'
152 build_number = 123 170 build_number = 123
153 step_name = 's' 171 step_name = 's'
154 test_name = 't' 172 test_name = 't'
155 success_rate = 0.9 173 success_rate = 0.9
156 174
157 previous_analysis = MasterFlakeAnalysis.Create( 175 previous_analysis = MasterFlakeAnalysis.Create(
158 master_name, builder_name, build_number - 1, step_name, test_name) 176 master_name, builder_name, build_number - 1, step_name, test_name)
159 data_point = DataPoint() 177 data_point = DataPoint()
(...skipping 22 matching lines...) Expand all
182 response = self.test_app.get('/waterfall/flake', params={ 200 response = self.test_app.get('/waterfall/flake', params={
183 'master_name': master_name, 201 'master_name': master_name,
184 'builder_name': builder_name, 202 'builder_name': builder_name,
185 'build_number': build_number, 203 'build_number': build_number,
186 'step_name': step_name, 204 'step_name': step_name,
187 'test_name': test_name, 205 'test_name': test_name,
188 'format': 'json'}) 206 'format': 'json'})
189 207
190 expected_check_flake_result = { 208 expected_check_flake_result = {
191 'key': previous_analysis.key.urlsafe(), 209 'key': previous_analysis.key.urlsafe(),
192 'pass_rates': [[build_number - 1, success_rate, None]], 210 'pass_rates': [[100, 0.9, 'url', 12345, 'a_git_hash']],
193 'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status), 211 'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status),
194 'master_name': master_name, 212 'master_name': master_name,
195 'builder_name': builder_name, 213 'builder_name': builder_name,
196 'build_number': build_number - 1, 214 'build_number': build_number - 1,
197 'step_name': step_name, 215 'step_name': step_name,
198 'test_name': test_name, 216 'test_name': test_name,
199 'request_time': '2016-10-01 12:10:00 UTC', 217 'request_time': '2016-10-01 12:10:00 UTC',
200 'task_number': 1, 218 'task_number': 1,
201 'error': None, 219 'error': None,
202 'iterations_to_rerun': 100, 220 'iterations_to_rerun': 100,
203 'pending_time': '00:00:05', 221 'pending_time': '00:00:05',
204 'duration': '00:59:55', 222 'duration': '00:59:55',
205 'suspected_flake': { 223 'suspected_flake': {
206 'build_number': 100, 224 'build_number': 100,
225 'commit_position': 12345,
226 'git_hash': 'a_git_hash',
207 'triage_result': 0 227 'triage_result': 0
208 }, 228 },
209 'version_number': 1, 229 'version_number': 1,
210 'show_debug_info': False 230 'show_debug_info': False
211 } 231 }
212 232
213 self.assertEqual(200, response.status_int) 233 self.assertEqual(200, response.status_int)
214 self.assertEqual(expected_check_flake_result, response.json_body) 234 self.assertEqual(expected_check_flake_result, response.json_body)
215 235
216 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake', 236 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake',
(...skipping 17 matching lines...) Expand all
234 self.test_app.get, 254 self.test_app.get,
235 '/waterfall/flake', 255 '/waterfall/flake',
236 params={ 256 params={
237 'master_name': master_name, 257 'master_name': master_name,
238 'builder_name': builder_name, 258 'builder_name': builder_name,
239 'build_number': build_number, 259 'build_number': build_number,
240 'step_name': step_name, 260 'step_name': step_name,
241 'test_name': test_name, 261 'test_name': test_name,
242 'format': 'json'}) 262 'format': 'json'})
243 263
264 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
265 return_value={
266 'build_number': 100,
267 'commit_position': 12345,
268 'git_hash': 'a_git_hash',
269 'triage_result': 0})
270 @mock.patch.object(check_flake, '_GetCoordinatesData',
271 return_value=[[100, 0.9, 'url', 12345, 'a_git_hash']])
244 @mock.patch.object(users, 'is_current_user_admin', return_value=True) 272 @mock.patch.object(users, 'is_current_user_admin', return_value=True)
245 def testGetTriageHistory(self, _): 273 def testGetTriageHistory(self, *_):
246 master_name = 'm' 274 master_name = 'm'
247 builder_name = 'b' 275 builder_name = 'b'
248 build_number = '123' 276 build_number = '123'
249 step_name = 's' 277 step_name = 's'
250 test_name = 't' 278 test_name = 't'
251 suspected_flake_build_number = 123 279 suspected_flake_build_number = 123
252 triage_result = 2 280 triage_result = 2
253 user_name = 'test' 281 user_name = 'test'
254 282
255 analysis = MasterFlakeAnalysis.Create( 283 analysis = MasterFlakeAnalysis.Create(
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 'm', 'b', '1', None, 't', '').get('data', {}).get('error_message'), 332 'm', 'b', '1', None, 't', '').get('data', {}).get('error_message'),
305 'Step name must be specified') 333 'Step name must be specified')
306 self.assertEqual( 334 self.assertEqual(
307 CheckFlake()._ValidateInput( 335 CheckFlake()._ValidateInput(
308 'm', 'b', '1', 's', None, '').get('data', {}).get('error_message'), 336 'm', 'b', '1', 's', None, '').get('data', {}).get('error_message'),
309 'Test name must be specified') 337 'Test name must be specified')
310 self.assertEqual( 338 self.assertEqual(
311 CheckFlake()._ValidateInput( 339 CheckFlake()._ValidateInput(
312 'm', 'b', '1', 's', 't', 'a').get('data', {}).get('error_message'), 340 'm', 'b', '1', 's', 't', 'a').get('data', {}).get('error_message'),
313 'Bug id (optional) must be an int') 341 'Bug id (optional) must be an int')
342
343 def testGetSuspectedFlakeInfo(self):
344 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
345 analysis.suspected_flake_build_number = 123
346 data_point = DataPoint()
347 data_point.build_number = 123
348 data_point.pass_rate = 0.9
349 data_point.commit_position = 2
350 data_point.git_hash = 'git_hash_2'
351 data_point.previous_commit_position = 1
352 data_point.previous_git_hash = 'git_hash_1'
353 analysis.data_points.append(data_point)
354 analysis.Save()
355
356 expected_result = {
357 'build_number': analysis.suspected_flake_build_number,
358 'commit_position': 2,
359 'git_hash': 'git_hash_2',
360 'previous_commit_position': 1,
361 'previous_git_hash': 'git_hash_1',
362 'triage_result': 0
363 }
364 self.assertEqual(expected_result,
365 check_flake._GetSuspectedFlakeInfo(analysis))
366
367 def testGetCoordinatesData(self):
368 master_name = 'm'
369 builder_name = 'b'
370 build_number = 123
371 step_name = 's'
372 test_name = 't'
373 success_rate = .9
374 analysis = MasterFlakeAnalysis.Create(
375 master_name, builder_name, build_number, step_name, test_name)
376 data_point = DataPoint()
377 data_point.build_number = build_number
378 data_point.pass_rate = success_rate
379 data_point.commit_position = 2
380 data_point.git_hash = 'git_hash_2'
381 data_point.previous_commit_position = 1
382 data_point.previous_git_hash = 'git_hash_1'
383 analysis.data_points.append(data_point)
384 analysis.Save()
385
386 self.assertEqual([[build_number, success_rate, None, 1, 2, 'git_hash_1',
387 'git_hash_2']],
388 check_flake._GetCoordinatesData(analysis))
389
390 def testFindSuspectedFlakeBuildDataPoint(self):
391 master_name = 'm'
392 builder_name = 'b'
393 build_number = 123
394 step_name = 's'
395 test_name = 't'
396 analysis = MasterFlakeAnalysis.Create(
397 master_name, builder_name, build_number, step_name, test_name)
398 analysis.suspected_flake_build_number = build_number
399 data_point_1 = DataPoint()
400 data_point_1.build_number = build_number - 1
401 data_point_1.pass_rate = 1
402 data_point_1.commit_position = 2
403 data_point_1.git_hash = 'git_hash_2'
404 data_point_1.previous_commit_position = 1
405 data_point_1.previous_git_hash = 'git_hash_1'
406 analysis.data_points.append(data_point_1)
407 data_point_2 = DataPoint()
408 data_point_2.build_number = build_number
409 data_point_2.pass_rate = 0.9
410 data_point_2.commit_position = 4
411 data_point_2.git_hash = 'git_hash_4'
412 data_point_2.previous_commit_position = 3
413 data_point_2.previous_git_hash = 'git_hash_3'
414 analysis.data_points.append(data_point_2)
415 analysis.Save()
416
417 self.assertEqual(data_point_2,
418 check_flake._FindSuspectedFlakeBuildDataPoint(analysis))
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698