Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(210)

Side by Side Diff: appengine/findit/handlers/flake/test/check_flake_test.py

Issue 2563383002: [Findit] Flake Checker: Extract commit position and git hash and display to UI for each analyzed bu… (Closed)
Patch Set: Fixing nit Created 4 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import datetime 5 import datetime
6 import mock 6 import mock
7 import re 7 import re
8 8
9 import webapp2 9 import webapp2
10 import webtest 10 import webtest
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
64 self.test_app.get, 64 self.test_app.get,
65 '/waterfall/flake', 65 '/waterfall/flake',
66 params={ 66 params={
67 'master_name': master_name, 67 'master_name': master_name,
68 'builder_name': builder_name, 68 'builder_name': builder_name,
69 'build_number': build_number, 69 'build_number': build_number,
70 'step_name': step_name, 70 'step_name': step_name,
71 'test_name': test_name 71 'test_name': test_name
72 }) 72 })
73 73
74 def testAnyoneCanViewScheduledAnalysis(self): 74 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
75 return_value={
76 'build_number': 100,
77 'commit_position': 12345,
78 'git_hash': 'git_hash_1',
79 'triage_result': 0})
80 @mock.patch.object(check_flake, '_GetCoordinatesData',
81 return_value=[[12345, 0.9, '1', 100, 'git_hash_2',
82 12344, 'git_hash_1']])
83 def testAnyoneCanViewScheduledAnalysis(self, *_):
75 master_name = 'm' 84 master_name = 'm'
76 builder_name = 'b' 85 builder_name = 'b'
77 build_number = '123' 86 build_number = '123'
78 step_name = 's' 87 step_name = 's'
79 test_name = 't' 88 test_name = 't'
80 success_rate = .9 89 success_rate = .9
81 90
82 analysis = MasterFlakeAnalysis.Create( 91 analysis = MasterFlakeAnalysis.Create(
83 master_name, builder_name, build_number, step_name, test_name) 92 master_name, builder_name, build_number, step_name, test_name)
84 data_point = DataPoint() 93 data_point = DataPoint()
85 data_point.build_number = int(build_number) 94 data_point.build_number = int(build_number)
86 data_point.pass_rate = success_rate 95 data_point.pass_rate = success_rate
87 data_point.task_id = '1' 96 data_point.task_id = '1'
88 analysis.data_points.append(data_point) 97 analysis.data_points.append(data_point)
89 analysis.status = analysis_status.COMPLETED 98 analysis.status = analysis_status.COMPLETED
90 analysis.suspected_flake_build_number = 100 99 analysis.suspected_flake_build_number = 100
91 analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00) 100 analysis.request_time = datetime.datetime(2016, 10, 01, 12, 10, 00)
92 analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05) 101 analysis.start_time = datetime.datetime(2016, 10, 01, 12, 10, 05)
93 analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00) 102 analysis.end_time = datetime.datetime(2016, 10, 01, 13, 10, 00)
94 analysis.algorithm_parameters = {'iterations_to_rerun': 100} 103 analysis.algorithm_parameters = {'iterations_to_rerun': 100}
95 analysis.Save() 104 analysis.Save()
96 105
97 response = self.test_app.get('/waterfall/flake', params={ 106 response = self.test_app.get('/waterfall/flake', params={
98 'key': analysis.key.urlsafe(), 107 'key': analysis.key.urlsafe(),
99 'format': 'json'}) 108 'format': 'json'})
100 109
101 expected_check_flake_result = { 110 expected_check_flake_result = {
102 'key': analysis.key.urlsafe(), 111 'key': analysis.key.urlsafe(),
103 'pass_rates': [[int(build_number), success_rate, data_point.task_id]], 112 'pass_rates': [[12345, 0.9, '1', 100, 'git_hash_2', 12344,
113 'git_hash_1']],
104 'analysis_status': STATUS_TO_DESCRIPTION.get(analysis.status), 114 'analysis_status': STATUS_TO_DESCRIPTION.get(analysis.status),
105 'master_name': master_name, 115 'master_name': master_name,
106 'builder_name': builder_name, 116 'builder_name': builder_name,
107 'build_number': int(build_number), 117 'build_number': int(build_number),
108 'step_name': step_name, 118 'step_name': step_name,
109 'test_name': test_name, 119 'test_name': test_name,
110 'request_time': '2016-10-01 12:10:00 UTC', 120 'request_time': '2016-10-01 12:10:00 UTC',
111 'task_number': 1, 121 'task_number': 1,
112 'error': None, 122 'error': None,
113 'iterations_to_rerun': 100, 123 'iterations_to_rerun': 100,
114 'pending_time': '00:00:05', 124 'pending_time': '00:00:05',
115 'duration': '00:59:55', 125 'duration': '00:59:55',
116 'suspected_flake': { 126 'suspected_flake': {
117 'build_number': 100, 127 'build_number': 100,
128 'commit_position': 12345,
129 'git_hash': 'git_hash_1',
118 'triage_result': 0 130 'triage_result': 0
119 }, 131 },
120 'version_number': 1, 132 'version_number': 1,
121 'show_debug_info': False 133 'show_debug_info': False
122 } 134 }
123 135
124 self.assertEquals(200, response.status_int) 136 self.assertEquals(200, response.status_int)
125 self.assertEqual(expected_check_flake_result, response.json_body) 137 self.assertEqual(expected_check_flake_result, response.json_body)
126 138
127 def testUnauthorizedUserCannotScheduleNewAnalysis(self): 139 def testUnauthorizedUserCannotScheduleNewAnalysis(self):
(...skipping 11 matching lines...) Expand all
139 params={ 151 params={
140 'master_name': master_name, 152 'master_name': master_name,
141 'builder_name': builder_name, 153 'builder_name': builder_name,
142 'build_number': build_number, 154 'build_number': build_number,
143 'step_name': step_name, 155 'step_name': step_name,
144 'test_name': test_name, 156 'test_name': test_name,
145 'format': 'json'}) 157 'format': 'json'})
146 158
147 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake', 159 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake',
148 return_value=False) 160 return_value=False)
149 def testRequestExistingAnalysis(self, _): 161 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
162 return_value={
163 'build_number': 100,
164 'commit_position': 12345,
165 'git_hash': 'a_git_hash',
166 'triage_result': 0})
167 @mock.patch.object(check_flake, '_GetCoordinatesData',
168 return_value=[[12345, 0.9, '1', 100, 'git_hash_2',
169 12344, 'git_hash_1']])
170 def testRequestExistingAnalysis(self, *_):
150 master_name = 'm' 171 master_name = 'm'
151 builder_name = 'b' 172 builder_name = 'b'
152 build_number = 123 173 build_number = 123
153 step_name = 's' 174 step_name = 's'
154 test_name = 't' 175 test_name = 't'
155 success_rate = 0.9 176 success_rate = 0.9
156 177
157 previous_analysis = MasterFlakeAnalysis.Create( 178 previous_analysis = MasterFlakeAnalysis.Create(
158 master_name, builder_name, build_number - 1, step_name, test_name) 179 master_name, builder_name, build_number - 1, step_name, test_name)
159 data_point = DataPoint() 180 data_point = DataPoint()
(...skipping 22 matching lines...) Expand all
182 response = self.test_app.get('/waterfall/flake', params={ 203 response = self.test_app.get('/waterfall/flake', params={
183 'master_name': master_name, 204 'master_name': master_name,
184 'builder_name': builder_name, 205 'builder_name': builder_name,
185 'build_number': build_number, 206 'build_number': build_number,
186 'step_name': step_name, 207 'step_name': step_name,
187 'test_name': test_name, 208 'test_name': test_name,
188 'format': 'json'}) 209 'format': 'json'})
189 210
190 expected_check_flake_result = { 211 expected_check_flake_result = {
191 'key': previous_analysis.key.urlsafe(), 212 'key': previous_analysis.key.urlsafe(),
192 'pass_rates': [[build_number - 1, success_rate, None]], 213 'pass_rates': [[12345, 0.9, '1', 100, 'git_hash_2', 12344,
214 'git_hash_1']],
193 'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status), 215 'analysis_status': STATUS_TO_DESCRIPTION.get(previous_analysis.status),
194 'master_name': master_name, 216 'master_name': master_name,
195 'builder_name': builder_name, 217 'builder_name': builder_name,
196 'build_number': build_number - 1, 218 'build_number': build_number - 1,
197 'step_name': step_name, 219 'step_name': step_name,
198 'test_name': test_name, 220 'test_name': test_name,
199 'request_time': '2016-10-01 12:10:00 UTC', 221 'request_time': '2016-10-01 12:10:00 UTC',
200 'task_number': 1, 222 'task_number': 1,
201 'error': None, 223 'error': None,
202 'iterations_to_rerun': 100, 224 'iterations_to_rerun': 100,
203 'pending_time': '00:00:05', 225 'pending_time': '00:00:05',
204 'duration': '00:59:55', 226 'duration': '00:59:55',
205 'suspected_flake': { 227 'suspected_flake': {
206 'build_number': 100, 228 'build_number': 100,
229 'commit_position': 12345,
230 'git_hash': 'a_git_hash',
207 'triage_result': 0 231 'triage_result': 0
208 }, 232 },
209 'version_number': 1, 233 'version_number': 1,
210 'show_debug_info': False 234 'show_debug_info': False
211 } 235 }
212 236
213 self.assertEqual(200, response.status_int) 237 self.assertEqual(200, response.status_int)
214 self.assertEqual(expected_check_flake_result, response.json_body) 238 self.assertEqual(expected_check_flake_result, response.json_body)
215 239
216 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake', 240 @mock.patch.object(flake_analysis_service, 'ScheduleAnalysisForFlake',
(...skipping 17 matching lines...) Expand all
234 self.test_app.get, 258 self.test_app.get,
235 '/waterfall/flake', 259 '/waterfall/flake',
236 params={ 260 params={
237 'master_name': master_name, 261 'master_name': master_name,
238 'builder_name': builder_name, 262 'builder_name': builder_name,
239 'build_number': build_number, 263 'build_number': build_number,
240 'step_name': step_name, 264 'step_name': step_name,
241 'test_name': test_name, 265 'test_name': test_name,
242 'format': 'json'}) 266 'format': 'json'})
243 267
268 @mock.patch.object(check_flake, '_GetSuspectedFlakeInfo',
269 return_value={
270 'build_number': 100,
271 'commit_position': 12345,
272 'git_hash': 'a_git_hash',
273 'triage_result': 0})
274 @mock.patch.object(check_flake, '_GetCoordinatesData',
275 return_value=[[12345, 0.9, '1', 100, 'git_hash_2',
276 12344, 'git_hash_1']])
244 @mock.patch.object(users, 'is_current_user_admin', return_value=True) 277 @mock.patch.object(users, 'is_current_user_admin', return_value=True)
245 def testGetTriageHistory(self, _): 278 def testGetTriageHistory(self, *_):
246 master_name = 'm' 279 master_name = 'm'
247 builder_name = 'b' 280 builder_name = 'b'
248 build_number = '123' 281 build_number = '123'
249 step_name = 's' 282 step_name = 's'
250 test_name = 't' 283 test_name = 't'
251 suspected_flake_build_number = 123 284 suspected_flake_build_number = 123
252 triage_result = 2 285 triage_result = 2
253 user_name = 'test' 286 user_name = 'test'
254 287
255 analysis = MasterFlakeAnalysis.Create( 288 analysis = MasterFlakeAnalysis.Create(
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
304 'm', 'b', '1', None, 't', '').get('data', {}).get('error_message'), 337 'm', 'b', '1', None, 't', '').get('data', {}).get('error_message'),
305 'Step name must be specified') 338 'Step name must be specified')
306 self.assertEqual( 339 self.assertEqual(
307 CheckFlake()._ValidateInput( 340 CheckFlake()._ValidateInput(
308 'm', 'b', '1', 's', None, '').get('data', {}).get('error_message'), 341 'm', 'b', '1', 's', None, '').get('data', {}).get('error_message'),
309 'Test name must be specified') 342 'Test name must be specified')
310 self.assertEqual( 343 self.assertEqual(
311 CheckFlake()._ValidateInput( 344 CheckFlake()._ValidateInput(
312 'm', 'b', '1', 's', 't', 'a').get('data', {}).get('error_message'), 345 'm', 'b', '1', 's', 't', 'a').get('data', {}).get('error_message'),
313 'Bug id (optional) must be an int') 346 'Bug id (optional) must be an int')
347
348 def testGetSuspectedFlakeInfo(self):
349 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
350 analysis.suspected_flake_build_number = 123
351 data_point = DataPoint()
352 data_point.build_number = 123
353 data_point.pass_rate = 0.9
354 data_point.commit_position = 2
355 data_point.git_hash = 'git_hash_2'
356 data_point.previous_build_commit_position = 1
357 data_point.previous_build_git_hash = 'git_hash_1'
358 analysis.data_points.append(data_point)
359 analysis.Save()
360
361 expected_result = {
362 'build_number': analysis.suspected_flake_build_number,
363 'commit_position': 2,
364 'git_hash': 'git_hash_2',
365 'previous_build_commit_position': 1,
366 'previous_build_git_hash': 'git_hash_1',
367 'triage_result': 0
368 }
369 self.assertEqual(expected_result,
370 check_flake._GetSuspectedFlakeInfo(analysis))
371
372 def testGetCoordinatesData(self):
373 master_name = 'm'
374 builder_name = 'b'
375 build_number = 123
376 step_name = 's'
377 test_name = 't'
378 success_rate = .9
379 analysis = MasterFlakeAnalysis.Create(
380 master_name, builder_name, build_number, step_name, test_name)
381 data_point = DataPoint()
382 data_point.build_number = build_number
383 data_point.pass_rate = success_rate
384 data_point.commit_position = 2
385 data_point.git_hash = 'git_hash_2'
386 data_point.previous_build_commit_position = 1
387 data_point.previous_build_git_hash = 'git_hash_1'
388 analysis.data_points.append(data_point)
389 analysis.Save()
390
391 self.assertEqual([[2, success_rate, None, build_number, 'git_hash_2', 1,
392 'git_hash_1']],
393 check_flake._GetCoordinatesData(analysis))
394
395 def testFindSuspectedFlakeBuildDataPoint(self):
396 master_name = 'm'
397 builder_name = 'b'
398 build_number = 123
399 step_name = 's'
400 test_name = 't'
401 analysis = MasterFlakeAnalysis.Create(
402 master_name, builder_name, build_number, step_name, test_name)
403 analysis.suspected_flake_build_number = build_number
404 data_point_1 = DataPoint()
405 data_point_1.build_number = build_number - 1
406 data_point_1.pass_rate = 1
407 data_point_1.commit_position = 2
408 data_point_1.git_hash = 'git_hash_2'
409 data_point_1.previous_build_commit_position = 1
410 data_point_1.previous_build_git_hash = 'git_hash_1'
411 analysis.data_points.append(data_point_1)
412 data_point_2 = DataPoint()
413 data_point_2.build_number = build_number
414 data_point_2.pass_rate = 0.9
415 data_point_2.commit_position = 4
416 data_point_2.git_hash = 'git_hash_4'
417 data_point_2.previous_build_commit_position = 3
418 data_point_2.previous_build_git_hash = 'git_hash_3'
419 analysis.data_points.append(data_point_2)
420 analysis.Save()
421
422 self.assertEqual(data_point_2,
423 check_flake._FindSuspectedFlakeBuildDataPoint(analysis))
424
425 def testFindSuspectedFlakeBuildDataPointNotFound(self):
426 master_name = 'm'
427 builder_name = 'b'
428 build_number = 123
429 step_name = 's'
430 test_name = 't'
431 analysis = MasterFlakeAnalysis.Create(
432 master_name, builder_name, build_number, step_name, test_name)
433 analysis.suspected_flake_build_number = build_number
434 analysis.data_points = []
435 analysis.Save()
436
437 self.assertIsNone(check_flake._FindSuspectedFlakeBuildDataPoint(analysis))
OLDNEW
« no previous file with comments | « appengine/findit/handlers/flake/check_flake.py ('k') | appengine/findit/model/flake/master_flake_analysis.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698