Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(211)

Side by Side Diff: appengine/findit/findit_api.py

Issue 2435983003: [Findit] Asynchronously process flake reports from chromium-try-flakes. (Closed)
Patch Set: fix nit. Created 4 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2015 The Chromium Authors. All rights reserved. 1 # Copyright 2015 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 """This module is to provide Findit service APIs through Cloud Endpoints: 5 """This module is to provide Findit service APIs through Cloud Endpoints:
6 6
7 Current APIs include: 7 Current APIs include:
8 1. Analysis of compile/test failures in Chromium waterfalls. 8 1. Analysis of compile/test failures in Chromium waterfalls.
9 Analyzes failures and detects suspected CLs. 9 Analyzes failures and detects suspected CLs.
10 2. Analysis of flakes on Commit Queue. 10 2. Analysis of flakes on Commit Queue.
11 """ 11 """
12 12
13 import json 13 import json
14 import logging 14 import logging
15 import pickle
15 16
16 import endpoints 17 import endpoints
17 from google.appengine.api import taskqueue 18 from google.appengine.api import taskqueue
18 from protorpc import messages 19 from protorpc import messages
19 from protorpc import remote 20 from protorpc import remote
20 21
21 from common import appengine_util 22 from common import appengine_util
22 from common import auth_util 23 from common import auth_util
23 from common import constants 24 from common import constants
24 from common import time_util 25 from common import time_util
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after
102 103
103 104
104 class _Build(messages.Message): 105 class _Build(messages.Message):
105 master_name = messages.StringField(1, required=True) 106 master_name = messages.StringField(1, required=True)
106 builder_name = messages.StringField(2, required=True) 107 builder_name = messages.StringField(2, required=True)
107 build_number = messages.IntegerField( 108 build_number = messages.IntegerField(
108 3, variant=messages.Variant.INT32, required=True) 109 3, variant=messages.Variant.INT32, required=True)
109 110
110 111
111 class _FlakeAnalysis(messages.Message): 112 class _FlakeAnalysis(messages.Message):
112 analysis_triggered = messages.BooleanField(1, required=True) 113 queued = messages.BooleanField(1, required=True)
113 114
114 115
115 def _TriggerNewAnalysesOnDemand(builds): 116 def _AsyncProcessFailureAnalysisRequests(builds):
116 """Pushes a task to run on the backend to trigger new analyses on demand.""" 117 """Pushes a task on the backend to process requests of failure analysis."""
117 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND) 118 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND)
118 payload = json.dumps({'builds': builds}) 119 payload = json.dumps({'builds': builds})
119 taskqueue.add( 120 taskqueue.add(
120 url=constants.WATERFALL_TRIGGER_ANALYSIS_URL, 121 url=constants.WATERFALL_PROCESS_FAILURE_ANALYSIS_REQUESTS_URL,
121 payload=payload, target=target, 122 payload=payload, target=target,
122 queue_name=constants.WATERFALL_FAILURE_ANALYSIS_REQUEST_QUEUE) 123 queue_name=constants.WATERFALL_FAILURE_ANALYSIS_REQUEST_QUEUE)
123 124
124 125
126 def _AsyncProcessFlakeReport(flake_analysis_request, user_email, is_admin):
127 """Pushes a task on the backend to process the flake report."""
128 target = appengine_util.GetTargetNameForModule(constants.WATERFALL_BACKEND)
129 payload = pickle.dumps((flake_analysis_request, user_email, is_admin))
130 taskqueue.add(
131 url=constants.WATERFALL_PROCESS_FLAKE_ANALYSIS_REQUEST_URL,
132 payload=payload, target=target,
133 queue_name=constants.WATERFALL_FLAKE_ANALYSIS_REQUEST_QUEUE)
134
135
125 # Create a Cloud Endpoints API. 136 # Create a Cloud Endpoints API.
126 # https://cloud.google.com/appengine/docs/python/endpoints/create_api 137 # https://cloud.google.com/appengine/docs/python/endpoints/create_api
127 @endpoints.api(name='findit', version='v1', description='FindIt API') 138 @endpoints.api(name='findit', version='v1', description='FindIt API')
128 class FindItApi(remote.Service): 139 class FindItApi(remote.Service):
129 """FindIt API v1.""" 140 """FindIt API v1."""
130 141
131 def _GenerateBuildFailureAnalysisResult( 142 def _GenerateBuildFailureAnalysisResult(
132 self, build, suspected_cls_in_result, step_name, 143 self, build, suspected_cls_in_result, step_name,
133 first_failure, test_name=None, 144 first_failure, test_name=None,
134 analysis_approach=_AnalysisApproach.HEURISTIC): 145 analysis_approach=_AnalysisApproach.HEURISTIC):
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after
271 282
272 if heuristic_analysis.failed or not heuristic_analysis.result: 283 if heuristic_analysis.failed or not heuristic_analysis.result:
273 # Bail out if the analysis failed or there is no result yet. 284 # Bail out if the analysis failed or there is no result yet.
274 continue 285 continue
275 286
276 self._GenerateResultsForBuild(build, heuristic_analysis, results) 287 self._GenerateResultsForBuild(build, heuristic_analysis, results)
277 288
278 logging.info('%d build failure(s), while %d are supported', 289 logging.info('%d build failure(s), while %d are supported',
279 len(request.builds), len(supported_builds)) 290 len(request.builds), len(supported_builds))
280 try: 291 try:
281 _TriggerNewAnalysesOnDemand(supported_builds) 292 _AsyncProcessFailureAnalysisRequests(supported_builds)
282 except Exception: # pragma: no cover. 293 except Exception: # pragma: no cover.
283 # If we fail to post a task to the task queue, we ignore and wait for next 294 # If we fail to post a task to the task queue, we ignore and wait for next
284 # request. 295 # request.
285 logging.exception('Failed to add analysis request to task queue: %s', 296 logging.exception('Failed to add analysis request to task queue: %s',
286 repr(supported_builds)) 297 repr(supported_builds))
287 298
288 return _BuildFailureAnalysisResultCollection(results=results) 299 return _BuildFailureAnalysisResultCollection(results=results)
289 300
290 @endpoints.method(_Flake, _FlakeAnalysis, path='flake', name='flake') 301 @endpoints.method(_Flake, _FlakeAnalysis, path='flake', name='flake')
291 def AnalyzeFlake(self, request): 302 def AnalyzeFlake(self, request):
292 """Analyze a flake on Commit Queue. Currently only supports flaky tests.""" 303 """Analyze a flake on Commit Queue. Currently only supports flaky tests."""
293 user_email = auth_util.GetUserEmail() 304 user_email = auth_util.GetUserEmail()
294 is_admin = auth_util.IsCurrentUserAdmin() 305 is_admin = auth_util.IsCurrentUserAdmin()
295 306
307 if not flake_analysis_service.IsAuthorizedUser(user_email, is_admin):
308 raise endpoints.UnauthorizedException(
309 'No permission to run a new analysis! User is %s' % user_email)
310
296 def CreateFlakeAnalysisRequest(flake): 311 def CreateFlakeAnalysisRequest(flake):
297 analysis_request = FlakeAnalysisRequest.Create( 312 analysis_request = FlakeAnalysisRequest.Create(
298 flake.name, flake.is_step, flake.bug_id) 313 flake.name, flake.is_step, flake.bug_id)
299 for step in flake.build_steps: 314 for step in flake.build_steps:
300 analysis_request.AddBuildStep(step.master_name, step.builder_name, 315 analysis_request.AddBuildStep(step.master_name, step.builder_name,
301 step.build_number, step.step_name, 316 step.build_number, step.step_name,
302 time_util.GetUTCNow()) 317 time_util.GetUTCNow())
303 return analysis_request 318 return analysis_request
304 319
305 logging.info('Flake: %s', CreateFlakeAnalysisRequest(request)) 320 flake_analysis_request = CreateFlakeAnalysisRequest(request)
306 analysis_triggered = flake_analysis_service.ScheduleAnalysisForFlake( 321 logging.info('Flake: %s', flake_analysis_request)
307 CreateFlakeAnalysisRequest(request), user_email, is_admin)
308 322
309 if analysis_triggered is None: 323 try:
310 raise endpoints.UnauthorizedException( 324 _AsyncProcessFlakeReport(flake_analysis_request, user_email, is_admin)
311 'No permission for a new analysis! User is %s' % user_email) 325 queued = True
326 except Exception:
327 # Ignore the report when fail to queue it for async processing.
328 queued = False
329 logging.exception('Failed to queue flake report for async processing')
312 330
313 return _FlakeAnalysis(analysis_triggered=analysis_triggered) 331 return _FlakeAnalysis(queued=queued)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698