Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(350)

Side by Side Diff: appengine/findit/waterfall/flake/test/recursive_flake_try_job_pipeline_test.py

Issue 2630433002: Findit] Flake Checker: Pipeline to trigger try jobs to identify flake culprits (Closed)
Patch Set: Addressing comments Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import mock
6
7 from gae_libs.gitiles.cached_gitiles_repository import CachedGitilesRepository
8 from libs.gitiles.change_log import ChangeLog
9
10 from common import constants
11 from common.pipeline_wrapper import pipeline_handlers
12 from common.waterfall import failure_type
13 from model import analysis_status
14 from model import result_status
15 from model.flake.flake_culprit import FlakeCulprit
16 from model.flake.flake_try_job import FlakeTryJob
17 from model.flake.master_flake_analysis import DataPoint
18 from model.flake.master_flake_analysis import MasterFlakeAnalysis
19 from waterfall.flake import recursive_flake_try_job_pipeline
20 from waterfall.flake.recursive_flake_try_job_pipeline import _CreateCulprit
21 from waterfall.flake.recursive_flake_try_job_pipeline import (
22 _GetNextCommitPosition)
23 from waterfall.flake.recursive_flake_try_job_pipeline import (
24 _GetTryJobDataPoints)
25 from waterfall.flake.recursive_flake_try_job_pipeline import (
26 _UpdateAnalysisTryJobStatusUponCompletion)
27 from waterfall.flake.recursive_flake_try_job_pipeline import (
28 NextCommitPositionPipeline)
29 from waterfall.flake.recursive_flake_try_job_pipeline import (
30 RecursiveFlakeTryJobPipeline)
31 from waterfall.test import wf_testcase
32 from waterfall.test.wf_testcase import DEFAULT_CONFIG_DATA
33
34
35 def _GenerateDataPoint(
36 pass_rate=None, build_number=None, task_id=None, try_job_url=None,
37 commit_position=None, git_hash=None, previous_build_commit_position=None,
38 previous_build_git_hash=None, blame_list=None):
39 data_point = DataPoint()
40 data_point.pass_rate = pass_rate
41 data_point.build_number = build_number
42 data_point.task_id = task_id
43 data_point.try_job_url = try_job_url
44 data_point.commit_position = commit_position
45 data_point.git_hash = git_hash
46 data_point.previous_build_commit_position = previous_build_commit_position
47 data_point.previous_build_git_hash = previous_build_git_hash
48 data_point.blame_list = blame_list if blame_list else []
49 return data_point
50
51
52 class RecursiveFlakeTryJobPipelineTest(wf_testcase.WaterfallTestCase):
53 app_module = pipeline_handlers._APP
54
55 def testRecursiveFlakeTryJobPipeline(self):
56 master_name = 'm'
57 builder_name = 'b'
58 build_number = 100
59 step_name = 's'
60 test_name = 't'
61 commit_position = 1000
62 revision = 'r1000'
63 try_job_id = 'try_job_id'
64
65 analysis = MasterFlakeAnalysis.Create(
66 master_name, builder_name, build_number, step_name, test_name)
67 analysis.status = analysis_status.COMPLETED
68 analysis.Save()
69
70 try_job = FlakeTryJob.Create(
71 master_name, builder_name, step_name, test_name, revision)
72
73 try_job_result = {
74 revision: {
75 step_name: {
76 'status': 'failed',
77 'failures': [test_name],
78 'valid': True,
79 'pass_fail_counts': {
80 'test_name': {
81 'pass_count': 28,
82 'fail_count': 72
83 }
84 }
85 }
86 }
87 }
88
89 self.MockPipeline(
90 recursive_flake_try_job_pipeline.ScheduleFlakeTryJobPipeline,
91 try_job_id,
92 expected_args=[master_name, builder_name, step_name, test_name,
93 revision])
94 self.MockPipeline(
95 recursive_flake_try_job_pipeline.MonitorTryJobPipeline,
96 try_job_result,
97 expected_args=[try_job.key.urlsafe(), failure_type.FLAKY_TEST,
98 try_job_id])
99 self.MockPipeline(
100 recursive_flake_try_job_pipeline.ProcessFlakeTryJobResultPipeline,
101 None,
102 expected_args=[revision, commit_position, try_job_result,
103 try_job.key.urlsafe(), analysis.key.urlsafe()])
104 self.MockPipeline(
105 recursive_flake_try_job_pipeline.NextCommitPositionPipeline,
106 '',
107 expected_args=[analysis.key.urlsafe(), try_job.key.urlsafe()])
108
109 pipeline = RecursiveFlakeTryJobPipeline(
110 analysis.key.urlsafe(), commit_position, revision)
111
112 pipeline.start(queue_name=constants.DEFAULT_QUEUE)
113 self.execute_queued_tasks()
114 self.assertIsNotNone(
115 FlakeTryJob.Get(master_name, builder_name, step_name, test_name,
116 revision))
117
118 def testRecursiveFlakeTryJobPipelineDoNotStartIfError(self):
119 master_name = 'm'
120 builder_name = 'b'
121 build_number = 100
122 step_name = 's'
123 test_name = 't'
124 commit_position = 1000
125 revision = 'r1000'
126
127 analysis = MasterFlakeAnalysis.Create(
128 master_name, builder_name, build_number, step_name, test_name)
129 analysis.status = analysis_status.ERROR
130 analysis.Save()
131
132 pipeline = RecursiveFlakeTryJobPipeline(
133 analysis.key.urlsafe(), commit_position, revision)
134
135 pipeline.start(queue_name=constants.DEFAULT_QUEUE)
136 self.execute_queued_tasks()
137 self.assertIsNone(analysis.try_job_status)
138
139 def testNextCommitPositionPipeline(self, *_):
140 master_name = 'm'
141 builder_name = 'b'
142 build_number = 100
143 step_name = 's'
144 test_name = 't'
145 git_hash = 'r99'
146
147 try_job = FlakeTryJob.Create(
148 master_name, builder_name, step_name, test_name, git_hash)
149 try_job.status = analysis_status.COMPLETED
150 try_job.put()
151
152 analysis = MasterFlakeAnalysis.Create(
153 master_name, builder_name, build_number, step_name, test_name)
154 analysis.status = analysis_status.COMPLETED
155 analysis.try_job_status = analysis_status.RUNNING
156 analysis.data_points = [
157 _GenerateDataPoint(
158 pass_rate=0.9, commit_position=100, build_number=12345,
159 previous_build_commit_position=90, blame_list=[
160 'r91', 'r92', 'r93', 'r94', 'r95', 'r96', 'r97', 'r98', 'r99',
161 'r100']),
162 _GenerateDataPoint(pass_rate=0.9, commit_position=99, try_job_url='u')]
163 analysis.suspected_flake_build_number = 12345
164 analysis.Save()
165
166 queue_name = {'x': False}
167 def MockedRun(*_):
168 queue_name['x'] = True # pragma: no cover
169
170 self.mock(
171 recursive_flake_try_job_pipeline.RecursiveFlakeTryJobPipeline, 'start',
172 MockedRun)
173
174 NextCommitPositionPipeline().run(
175 analysis.key.urlsafe(), try_job.key.urlsafe())
176 self.assertTrue(queue_name['x'])
177
178 def testNextCommitPositionPipelineCompleted(self, *_):
179 master_name = 'm'
180 builder_name = 'b'
181 build_number = 100
182 step_name = 's'
183 test_name = 't'
184 git_hash = 'r95'
185
186 try_job = FlakeTryJob.Create(
187 master_name, builder_name, step_name, test_name, git_hash)
188 try_job.status = analysis_status.COMPLETED
189 try_job.put()
190
191 analysis = MasterFlakeAnalysis.Create(
192 master_name, builder_name, build_number, step_name, test_name)
193 analysis.status = analysis_status.COMPLETED
194 analysis.try_job_status = analysis_status.RUNNING
195 analysis.data_points = [
196 _GenerateDataPoint(
197 pass_rate=0.9, commit_position=100, build_number=12345,
198 previous_build_commit_position=90, blame_list=[
199 'r91', 'r92', 'r93', 'r94', 'r95', 'r96', 'r97', 'r98', 'r99',
200 'r100']),
201 _GenerateDataPoint(pass_rate=0.9, commit_position=99, try_job_url='u1'),
202 _GenerateDataPoint(pass_rate=0.9, commit_position=97, try_job_url='u2'),
203 _GenerateDataPoint(pass_rate=0.9, commit_position=95, try_job_url='u4'),
204 _GenerateDataPoint(pass_rate=1.0, commit_position=94, try_job_url='u3')]
205 analysis.suspected_flake_build_number = 12345
206 analysis.Save()
207
208 NextCommitPositionPipeline().run(
209 analysis.key.urlsafe(), try_job.key.urlsafe())
210
211 culprit = analysis.culprit
212 self.assertEqual(git_hash, culprit.revision)
213 self.assertEqual(95, culprit.commit_position)
214
215 def testNextCommitPositionNewlyAddedFlakyTest(self, *_):
216 master_name = 'm'
217 builder_name = 'b'
218 build_number = 100
219 step_name = 's'
220 test_name = 't'
221 git_hash = 'r100'
222
223 try_job = FlakeTryJob.Create(
224 master_name, builder_name, step_name, test_name, git_hash)
225 try_job.status = analysis_status.COMPLETED
226 try_job.put()
227
228 analysis = MasterFlakeAnalysis.Create(
229 master_name, builder_name, build_number, step_name, test_name)
230 analysis.status = analysis_status.COMPLETED
231 analysis.try_job_status = analysis_status.RUNNING
232 analysis.data_points = [
233 _GenerateDataPoint(
234 pass_rate=0.9, commit_position=100, build_number=12345,
235 previous_build_commit_position=98, blame_list=['r99', 'r100']),
236 _GenerateDataPoint(pass_rate=-1, commit_position=99, try_job_url='id1')]
237 analysis.suspected_flake_build_number = 12345
238 analysis.Save()
239
240 NextCommitPositionPipeline().run(
241 analysis.key.urlsafe(), try_job.key.urlsafe())
242
243 culprit = analysis.culprit
244 self.assertEqual(git_hash, culprit.revision)
245 self.assertEqual(100, culprit.commit_position)
246
247 @mock.patch(
248 ('waterfall.flake.recursive_flake_try_job_pipeline.'
249 'RecursiveFlakeTryJobPipeline'))
250 def testNextCommitPositionPipelineForFailedTryJob(self, mocked_pipeline):
251 master_name = 'm'
252 builder_name = 'b'
253 build_number = 100
254 step_name = 's'
255 test_name = 't'
256 revision = 'r97'
257 error = {
258 'code': 1,
259 'message': 'some failure message',
260 }
261
262 try_job = FlakeTryJob.Create(
263 master_name, builder_name, step_name, test_name, revision)
264 try_job.status = analysis_status.ERROR
265 try_job.error = error
266 try_job.put()
267
268 analysis = MasterFlakeAnalysis.Create(
269 master_name, builder_name, build_number, step_name, test_name)
270 analysis.put()
271
272 NextCommitPositionPipeline().run(
273 analysis.key.urlsafe(), try_job.key.urlsafe())
274 mocked_pipeline.assert_not_called()
275 self.assertEqual(error, analysis.error)
276
277 @mock.patch.object(CachedGitilesRepository, 'GetChangeLog')
278 def testCreateCulprit(self, mocked_module):
279 revision = 'a1b2c3d4'
280 commit_position = 12345
281 url = 'url'
282 repo_name = 'repo_name'
283 change_log = ChangeLog(None, None, None, None, None, None, revision,
284 commit_position, None, None, url, None)
285 mocked_module.return_value = change_log
286 culprit = _CreateCulprit(revision, commit_position, repo_name)
287
288 self.assertEqual(commit_position, culprit.commit_position)
289 self.assertEqual(revision, culprit.revision)
290 self.assertEqual(url, culprit.url)
291 self.assertEqual(repo_name, culprit.repo_name)
292
293 @mock.patch.object(CachedGitilesRepository, 'GetChangeLog', return_value=None)
294 def testCreateCulpritNoLogs(self, _):
295 revision = 'a1b2c3d4'
296 commit_position = 12345
297 repo_name = 'repo_name'
298 culprit = _CreateCulprit(revision, commit_position, repo_name)
299
300 self.assertEqual(commit_position, culprit.commit_position)
301 self.assertEqual(revision, culprit.revision)
302 self.assertIsNone(culprit.url)
303 self.assertEqual(repo_name, culprit.repo_name)
304
305 def testUpdateAnalysisTryJobStatusUponCompletionFound(self):
306 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
307 culprit = FlakeCulprit.Create('repo_name', 'a1b2c3d4', 12345, 'url')
308 _UpdateAnalysisTryJobStatusUponCompletion(
309 analysis, culprit, analysis_status.COMPLETED, None)
310 self.assertIsNone(analysis.error)
311 self.assertEqual(culprit.revision, analysis.culprit.revision)
312 self.assertEqual(analysis_status.COMPLETED, analysis.try_job_status)
313 self.assertEqual(result_status.FOUND_UNTRIAGED, analysis.result_status)
314
315 def testUpdateAnalysisTryJobStatusUponCompletionNotFound(self):
316 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
317 _UpdateAnalysisTryJobStatusUponCompletion(
318 analysis, None, analysis_status.COMPLETED, None)
319 self.assertIsNone(analysis.error)
320 self.assertIsNone(analysis.culprit)
321 self.assertEqual(analysis_status.COMPLETED, analysis.try_job_status)
322 self.assertEqual(result_status.NOT_FOUND_UNTRIAGED, analysis.result_status)
323
324 def testUpdateAnalysisTryJobStatusError(self):
325 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
326 _UpdateAnalysisTryJobStatusUponCompletion(
327 analysis, None, analysis_status.ERROR, {'error': 'errror'})
328 self.assertIsNotNone(analysis.error)
329 self.assertIsNone(analysis.culprit)
330 self.assertEqual(analysis_status.ERROR, analysis.try_job_status)
331 self.assertIsNone(analysis.result_status)
332
333 def testGetTryJobDataPointsNoTryJobsYet(self):
334 suspected_flake_build_number = 12345
335 data_points = [
336 _GenerateDataPoint(pass_rate=0.8, commit_position=100,
337 build_number=suspected_flake_build_number)]
338 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
339 analysis.suspected_flake_build_number = suspected_flake_build_number
340 analysis.data_points = data_points
341
342 self.assertEqual(data_points, _GetTryJobDataPoints(analysis))
343
344 def testGetTryJobDataPointsWithTryJobs(self):
345 suspected_flake_build_number = 12345
346 all_data_points = [
347 _GenerateDataPoint(pass_rate=0.8, commit_position=100,
348 build_number=suspected_flake_build_number),
349 _GenerateDataPoint(pass_rate=1.0, commit_position=90,
350 build_number=suspected_flake_build_number - 1),
351 _GenerateDataPoint(pass_rate=0.8, commit_position=99,
352 try_job_url='url')]
353 expected_data_points = [all_data_points[0], all_data_points[2]]
354
355 analysis = MasterFlakeAnalysis.Create('m', 'b', 123, 's', 't')
356 analysis.suspected_flake_build_number = suspected_flake_build_number
357 analysis.data_points = all_data_points
358
359 self.assertEqual(expected_data_points, _GetTryJobDataPoints(analysis))
360
361 def testGetNextFlakySingleFlakyDataPoint(self):
362 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100)]
363 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
364 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
365 self.assertEqual(99, next_commit_position)
366 self.assertIsNone(suspected_commit_position)
367
368 def testGetNextMultipleFlakyDataPoints(self):
369 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
370 _GenerateDataPoint(pass_rate=0.8, commit_position=99),
371 _GenerateDataPoint(pass_rate=0.8, commit_position=97),
372 _GenerateDataPoint(pass_rate=0.8, commit_position=94)]
373 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
374 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
375 self.assertEqual(90, next_commit_position)
376 self.assertIsNone(suspected_commit_position)
377
378 def testGetNextLowerBoundary(self):
379 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=2),
380 _GenerateDataPoint(pass_rate=0.8, commit_position=1)]
381 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
382 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
383
384 self.assertEqual(0, next_commit_position)
385 self.assertIsNone(suspected_commit_position)
386
387 def testSequentialSearchAtLowerBoundaryStable(self):
388 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=8),
389 _GenerateDataPoint(pass_rate=0.8, commit_position=3),
390 _GenerateDataPoint(pass_rate=1.0, commit_position=0)]
391 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
392 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
393 self.assertEqual(1, next_commit_position)
394 self.assertIsNone(suspected_commit_position)
395
396 def testSequentialSearchAtLowerBoundaryFlaky(self):
397 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=8),
398 _GenerateDataPoint(pass_rate=0.8, commit_position=3),
399 _GenerateDataPoint(pass_rate=0.8, commit_position=0)]
400 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
401 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
402
403 self.assertIsNone(next_commit_position)
404 self.assertEqual(0, suspected_commit_position)
405
406 def testReadyForSequential(self):
407 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
408 _GenerateDataPoint(pass_rate=0.8, commit_position=99),
409 _GenerateDataPoint(pass_rate=0.8, commit_position=97),
410 _GenerateDataPoint(pass_rate=0.8, commit_position=94),
411 _GenerateDataPoint(pass_rate=1.0, commit_position=90)]
412
413 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
414 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
415
416 self.assertIsNone(suspected_commit_position)
417 self.assertEqual(next_commit_position, 91)
418
419 def testSequentialSearch(self):
420 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
421 _GenerateDataPoint(pass_rate=0.8, commit_position=99),
422 _GenerateDataPoint(pass_rate=0.8, commit_position=97),
423 _GenerateDataPoint(pass_rate=0.8, commit_position=94),
424 _GenerateDataPoint(pass_rate=1.0, commit_position=92),
425 _GenerateDataPoint(pass_rate=1.0, commit_position=91),
426 _GenerateDataPoint(pass_rate=1.0, commit_position=90)]
427 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
428 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
429
430 self.assertIsNone(suspected_commit_position)
431 self.assertEqual(next_commit_position, 93)
432
433 def testSuspectedCommitPosition(self):
434 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
435 _GenerateDataPoint(pass_rate=1.0, commit_position=99)]
436 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
437 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
438
439 self.assertIsNone(next_commit_position)
440 self.assertEqual(suspected_commit_position, 100)
441
442 def testSuspectedCommitPositionAfterSequentialSearch(self):
443 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
444 _GenerateDataPoint(pass_rate=0.8, commit_position=99),
445 _GenerateDataPoint(pass_rate=0.8, commit_position=97),
446 _GenerateDataPoint(pass_rate=0.8, commit_position=94),
447 _GenerateDataPoint(pass_rate=1.0, commit_position=93),
448 _GenerateDataPoint(pass_rate=1.0, commit_position=92),
449 _GenerateDataPoint(pass_rate=1.0, commit_position=91),
450 _GenerateDataPoint(pass_rate=1.0, commit_position=90)]
451 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
452 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
453
454 self.assertEqual(94, suspected_commit_position)
455 self.assertIsNone(next_commit_position)
456
457 def testCommitIntroducedFlakiness(self):
458 data_points = [_GenerateDataPoint(pass_rate=0.8, commit_position=100),
459 _GenerateDataPoint(pass_rate=-1, commit_position=99)]
460 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
461 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
462
463 # This case should be handled by the caller of _GetNextCommitPosition
464 self.assertIsNone(suspected_commit_position)
465 self.assertEqual(100, next_commit_position)
466
467 def testTestDoesNotExist(self):
468 # This case should not be valid, since suspected flake build number would
469 # not have been None and not triggered try jobs to begin with.
470 data_points = [_GenerateDataPoint(pass_rate=-1, commit_position=100)]
471 next_commit_position, suspected_commit_position = _GetNextCommitPosition(
472 data_points, DEFAULT_CONFIG_DATA['check_flake_try_job_settings'], 0)
473
474 self.assertIsNone(suspected_commit_position)
475 self.assertIsNone(next_commit_position)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698