Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(224)

Side by Side Diff: appengine/findit/handlers/crash/crash_handler.py

Issue 2455053004: Moving ScheduleNewAnalysis to break the cycle (Closed)
Patch Set: rebase Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2016 The Chromium Authors. All rights reserved. 1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import base64 5 import base64
6 import json 6 import json
7 import logging 7 import logging
8 8
9 from common import constants 9 from common import constants
10 from common import appengine_util
10 from common.base_handler import BaseHandler 11 from common.base_handler import BaseHandler
11 from common.base_handler import Permission 12 from common.base_handler import Permission
12 from crash.crash_pipeline import FinditForClientID 13 from crash import crash_pipeline
14 from crash.crash_pipeline import CrashWrapperPipeline
13 from crash.crash_report import CrashReport 15 from crash.crash_report import CrashReport
14 16
15 17
16 class CrashHandler(BaseHandler): 18 class CrashHandler(BaseHandler):
17 PERMISSION_LEVEL = Permission.ANYONE 19 PERMISSION_LEVEL = Permission.ANYONE
18 20
19 def HandlePost(self): 21 def HandlePost(self):
20 """Handles push delivery from Pub/Sub for crash data. 22 """Handles push delivery from Pub/Sub for crash data.
21 23
22 The crash data should be in the following json format: 24 The crash data should be in the following json format:
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
93 } 95 }
94 """ 96 """
95 try: 97 try:
96 received_message = json.loads(self.request.body) 98 received_message = json.loads(self.request.body)
97 pubsub_message = received_message['message'] 99 pubsub_message = received_message['message']
98 crash_data = json.loads(base64.b64decode(pubsub_message['data'])) 100 crash_data = json.loads(base64.b64decode(pubsub_message['data']))
99 101
100 logging.info('Processing message %s from subscription %s.', 102 logging.info('Processing message %s from subscription %s.',
101 pubsub_message['message_id'], 103 pubsub_message['message_id'],
102 received_message['subscription']) 104 received_message['subscription'])
105 logging.info('Crash data is %s', json.dumps(crash_data))
106 ScheduleNewAnalysis(crash_data)
103 107
104 logging.info('Crash data is %s', json.dumps(crash_data))
105
106 client_id = crash_data['client_id']
107 FinditForClientID(client_id).ScheduleNewAnalysis(crash_data,
108 queue_name=constants.CRASH_ANALYSIS_QUEUE[client_id])
109 except (KeyError, ValueError): # pragma: no cover. 108 except (KeyError, ValueError): # pragma: no cover.
110 # TODO: save exception in datastore and create a page to show them. 109 # TODO: save exception in datastore and create a page to show them.
111 logging.exception('Failed to process crash message') 110 logging.exception('Failed to process crash message')
112 logging.info(self.request.body) 111 logging.info(self.request.body)
112
113
114 # TODO(http://crbug.com/659346): we don't cover anything after the
115 # call to _NeedsNewAnalysis.
116 def ScheduleNewAnalysis(crash_data):
117 """Creates a pipeline object to perform the analysis, and start it.
118
119 If we can detect that the analysis doesn't need to be performed
120 (e.g., it was already performed, or the ``crash_data`` is empty so
121 there's nothig we can do), then we will skip creating the pipeline
122 at all.
123
124 Args:
125 crash_data (JSON): ??
126
127 Returns:
128 True if we started a new pipeline; False otherwise.
129 """
130 client_id = crash_data['client_id']
131 # N.B., must call FinditForClientID indirectly, for mock testing.
132 findit_client = crash_pipeline.FinditForClientID(client_id)
133
134 # Check policy and modify the crash_data as needed.
135 crash_data = findit_client.CheckPolicy(crash_data)
136 if crash_data is None:
137 return False
138
139 # Detect the regression range, and decide if we actually need to
140 # run a new anlaysis or not.
141 if not findit_client._NeedsNewAnalysis(crash_data):
142 return False
143
144 crash_identifiers = crash_data['crash_identifiers']
145 # N.B., we cannot pass ``self`` directly to the _pipeline_cls, because
146 # it is not JSON-serializable (and there's no way to make it such,
147 # since JSON-serializability is defined by JSON-encoders rather than
148 # as methods on the objects being encoded).
149 pipeline = CrashWrapperPipeline(client_id, crash_identifiers)
150 # Attribute defined outside __init__ - pylint: disable=W0201
151 pipeline.target = appengine_util.GetTargetNameForModule(
152 constants.CRASH_BACKEND[client_id])
153 queue_name = constants.CRASH_ANALYSIS_QUEUE[client_id]
154 pipeline.start(queue_name=queue_name)
155 logging.info('New %s analysis is scheduled for %s', client_id,
156 repr(crash_identifiers))
157 return True
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698