Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(386)

Side by Side Diff: appengine/chromium_try_flakes/handlers/flake_issues.py

Issue 2387153002: Report all flakes reported to issue tracker also to FindIt (Closed)
Patch Set: Addressed comments Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2015 The Chromium Authors. All rights reserved. 1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """Task queue endpoints for creating and updating issues on issue tracker.""" 5 """Task queue endpoints for creating and updating issues on issue tracker."""
6 6
7 import datetime 7 import datetime
8 import json 8 import json
9 import logging 9 import logging
10 import urllib2 10 import urllib2
11 import webapp2 11 import webapp2
12 12
13 from google.appengine.api import app_identity 13 from google.appengine.api import app_identity
14 from google.appengine.api import taskqueue 14 from google.appengine.api import taskqueue
15 from google.appengine.api import urlfetch 15 from google.appengine.api import urlfetch
16 from google.appengine.api import users 16 from google.appengine.api import users
17 from google.appengine.ext import ndb 17 from google.appengine.ext import ndb
18 18
19 from apiclient.errors import HttpError 19 from apiclient.errors import HttpError
20 from findit import findit
20 import gae_ts_mon 21 import gae_ts_mon
21 from issue_tracker import issue_tracker_api, issue 22 from issue_tracker import issue_tracker_api, issue
22 from model.flake import ( 23 from model.flake import (
23 Flake, FlakeOccurrence, FlakeUpdate, FlakeUpdateSingleton, FlakyRun) 24 Flake, FlakeOccurrence, FlakeUpdate, FlakeUpdateSingleton, FlakyRun)
24 from model.build_run import BuildRun 25 from model.build_run import BuildRun
25 from status import build_result, util 26 from status import build_result, util
26 from test_results.util import normalize_test_type, flatten_tests_trie 27 from test_results.util import normalize_test_type, flatten_tests_trie
27 28
28 29
29 MAX_UPDATED_ISSUES_PER_DAY = 10 30 MAX_UPDATED_ISSUES_PER_DAY = 10
(...skipping 178 matching lines...) Expand 10 before | Expand all | Expand 10 after
208 # TODO(sergiyb): Find a way to do this asynchronously to avoid block 209 # TODO(sergiyb): Find a way to do this asynchronously to avoid block
209 # transaction-bound method calling this. Possible solutions are to use 210 # transaction-bound method calling this. Possible solutions are to use
210 # put_multi_sync (need to find a way to test this) or to use deferred 211 # put_multi_sync (need to find a way to test this) or to use deferred
211 # execution. 212 # execution.
212 for fr in new_flaky_runs: 213 for fr in new_flaky_runs:
213 for occ in fr.flakes: 214 for occ in fr.flakes:
214 if occ.failure == name: 215 if occ.failure == name:
215 occ.issue_id = issue_id 216 occ.issue_id = issue_id
216 ndb.put_multi(new_flaky_runs) 217 ndb.put_multi(new_flaky_runs)
217 218
219 @staticmethod
220 @ndb.non_transactional
221 def _report_flakes_to_findit(flake, flaky_runs):
222 findit.FindItAPI().flake(flake, flaky_runs)
223
218 @ndb.transactional 224 @ndb.transactional
219 def _update_issue(self, api, flake, new_flakes, now): 225 def _update_issue(self, api, flake, new_flakes, now):
220 """Updates an issue on the issue tracker.""" 226 """Updates an issue on the issue tracker."""
221 flake_issue = api.getIssue(flake.issue_id) 227 flake_issue = api.getIssue(flake.issue_id)
222 228
223 # Handle cases when an issue has been closed. We need to do this in a loop 229 # Handle cases when an issue has been closed. We need to do this in a loop
224 # because we might move onto another issue. 230 # because we might move onto another issue.
225 seen_issues = set() 231 seen_issues = set()
226 while not flake_issue.open: 232 while not flake_issue.open:
227 if flake_issue.status == 'Duplicate': 233 if flake_issue.status == 'Duplicate':
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
264 'suffix': ' %s' % suffix if suffix else ''} 270 'suffix': ' %s' % suffix if suffix else ''}
265 api.update(flake_issue, comment=new_flaky_runs_msg) 271 api.update(flake_issue, comment=new_flaky_runs_msg)
266 self.issue_updates.increment_by(1, {'operation': 'update'}) 272 self.issue_updates.increment_by(1, {'operation': 'update'})
267 logging.info('Updated issue %d for flake %s with %d flake runs', 273 logging.info('Updated issue %d for flake %s with %d flake runs',
268 flake.issue_id, flake.name, len(new_flakes)) 274 flake.issue_id, flake.name, len(new_flakes))
269 self._update_new_occurrences_with_issue_id( 275 self._update_new_occurrences_with_issue_id(
270 flake.name, new_flakes, flake_issue.id) 276 flake.name, new_flakes, flake_issue.id)
271 flake.num_reported_flaky_runs = len(flake.occurrences) 277 flake.num_reported_flaky_runs = len(flake.occurrences)
272 flake.issue_last_updated = now 278 flake.issue_last_updated = now
273 279
280 self._report_flakes_to_findit(flake, new_flakes)
281
274 @ndb.transactional 282 @ndb.transactional
275 def _create_issue(self, api, flake, new_flakes, now): 283 def _create_issue(self, api, flake, new_flakes, now):
276 _, qlabel = get_queue_details(flake.name) 284 _, qlabel = get_queue_details(flake.name)
277 labels = ['Type-Bug', 'Pri-1', 'Via-TryFlakes', qlabel] 285 labels = ['Type-Bug', 'Pri-1', 'Via-TryFlakes', qlabel]
278 if is_trooper_flake(flake.name): 286 if is_trooper_flake(flake.name):
279 other_queue_msg = TROOPER_QUEUE_MSG 287 other_queue_msg = TROOPER_QUEUE_MSG
280 else: 288 else:
281 other_queue_msg = SHERIFF_QUEUE_MSG 289 other_queue_msg = SHERIFF_QUEUE_MSG
282 290
283 summary = SUMMARY_TEMPLATE % {'name': flake.name} 291 summary = SUMMARY_TEMPLATE % {'name': flake.name}
(...skipping 15 matching lines...) Expand all
299 flake_issue = api.create(new_issue) 307 flake_issue = api.create(new_issue)
300 flake.issue_id = flake_issue.id 308 flake.issue_id = flake_issue.id
301 self._update_new_occurrences_with_issue_id( 309 self._update_new_occurrences_with_issue_id(
302 flake.name, new_flakes, flake_issue.id) 310 flake.name, new_flakes, flake_issue.id)
303 flake.num_reported_flaky_runs = len(flake.occurrences) 311 flake.num_reported_flaky_runs = len(flake.occurrences)
304 flake.issue_last_updated = now 312 flake.issue_last_updated = now
305 self.issue_updates.increment_by(1, {'operation': 'create'}) 313 self.issue_updates.increment_by(1, {'operation': 'create'})
306 logging.info('Created a new issue %d for flake %s', flake.issue_id, 314 logging.info('Created a new issue %d for flake %s', flake.issue_id,
307 flake.name) 315 flake.name)
308 316
317 self._report_flakes_to_findit(flake, new_flakes)
318
309 # Find all flakes in the current flakiness period to compute metrics. The 319 # Find all flakes in the current flakiness period to compute metrics. The
310 # flakiness period is a series of flakes with a gap no larger than 320 # flakiness period is a series of flakes with a gap no larger than
311 # MAX_GAP_FOR_FLAKINESS_PERIOD seconds. 321 # MAX_GAP_FOR_FLAKINESS_PERIOD seconds.
312 period_flakes = self._find_flakiness_period_occurrences(flake) 322 period_flakes = self._find_flakiness_period_occurrences(flake)
313 323
314 # Compute the delay since the first flake in the current flakiness period. 324 # Compute the delay since the first flake in the current flakiness period.
315 time_since_first_flake = ( 325 time_since_first_flake = (
316 now - period_flakes[0].failure_run_time_finished).total_seconds() 326 now - period_flakes[0].failure_run_time_finished).total_seconds()
317 self.time_since_first_flake.set(time_since_first_flake) 327 self.time_since_first_flake.set(time_since_first_flake)
318 logging.info('Reported time_since_first_flake %d for flake %s', 328 logging.info('Reported time_since_first_flake %d for flake %s',
(...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after
712 return 722 return
713 723
714 key = self.request.get('key') 724 key = self.request.get('key')
715 flake = ndb.Key(urlsafe=key).get() 725 flake = ndb.Key(urlsafe=key).get()
716 flake.issue_id = issue_id 726 flake.issue_id = issue_id
717 flake.put() 727 flake.put()
718 728
719 logging.info('%s updated issue_id for flake %s to %d.', user_email, 729 logging.info('%s updated issue_id for flake %s to %d.', user_email,
720 flake.name, issue_id) 730 flake.name, issue_id)
721 self.redirect('/all_flake_occurrences?key=%s' % key) 731 self.redirect('/all_flake_occurrences?key=%s' % key)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698