Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8)

Unified Diff: appengine/findit/crash/crash_pipeline.py

Issue 2299883005: [Findit] Add findit_for_client to do analysis based on client_id (Closed)
Patch Set: Add an if else. Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: appengine/findit/crash/crash_pipeline.py
diff --git a/appengine/findit/crash/crash_pipeline.py b/appengine/findit/crash/crash_pipeline.py
index 7d4c0eba8609d2aaf87ed6ee5afa09de74bf3b86..1940f9a49a0a1d07169523226f65370d5811de0a 100644
--- a/appengine/findit/crash/crash_pipeline.py
+++ b/appengine/findit/crash/crash_pipeline.py
@@ -15,18 +15,16 @@ from common import pubsub_util
from common import time_util
from common.pipeline_wrapper import BasePipeline
from common.pipeline_wrapper import pipeline
-from crash import findit_for_fracas
+from crash import findit_for_client
from model import analysis_status
from model.crash.crash_config import CrashConfig
-from model.crash.fracas_crash_analysis import FracasCrashAnalysis
-
-_FINDIT_FEEDBACK_URL_TEMPLATE = '%s/crash/fracas-result-feedback?key=%s'
class CrashBasePipeline(BasePipeline):
- def __init__(self, crash_identifiers):
- super(CrashBasePipeline, self).__init__(crash_identifiers)
+ def __init__(self, crash_identifiers, client_id):
+ super(CrashBasePipeline, self).__init__(crash_identifiers, client_id)
self.crash_identifiers = crash_identifiers
+ self.client_id = client_id
def run(self, *args, **kwargs):
raise NotImplementedError()
@@ -38,7 +36,8 @@ class CrashAnalysisPipeline(CrashBasePipeline):
return
logging.error('Aborted analysis for %s', repr(self.crash_identifiers))
- analysis = FracasCrashAnalysis.Get(self.crash_identifiers)
+ analysis = findit_for_client.GetAnalysisForClient(self.crash_identifiers,
+ self.client_id)
analysis.status = analysis_status.ERROR
analysis.put()
@@ -46,8 +45,9 @@ class CrashAnalysisPipeline(CrashBasePipeline):
self._SetErrorIfAborted(self.was_aborted)
# Arguments number differs from overridden method - pylint: disable=W0221
- def run(self, crash_identifiers):
- analysis = FracasCrashAnalysis.Get(crash_identifiers)
+ def run(self, crash_identifiers, client_id):
+ analysis = findit_for_client.GetAnalysisForClient(crash_identifiers,
+ client_id)
# Update analysis status.
analysis.pipeline_status_path = self.pipeline_status_path()
@@ -57,9 +57,7 @@ class CrashAnalysisPipeline(CrashBasePipeline):
analysis.put()
# Run the analysis.
- result, tags = findit_for_fracas.FindCulpritForChromeCrash(
- analysis.signature, analysis.platform, analysis.stack_trace,
- analysis.crashed_version, analysis.historical_metadata)
+ result, tags = findit_for_client.FindCulprit(analysis)
# Update analysis status and save the analysis result.
analysis.completed_time = time_util.GetUTCNow()
@@ -75,49 +73,46 @@ class CrashAnalysisPipeline(CrashBasePipeline):
class PublishResultPipeline(CrashBasePipeline):
def finalized(self):
if self.was_aborted: # pragma: no cover.
- logging.error('Failed to publish analysis result for %s',
- repr(self.crash_identifiers))
-
- def PostProcessResults(self, analysis, crash_identifiers):
- analysis_result = copy.deepcopy(analysis.result)
- analysis_result['feedback_url'] = _FINDIT_FEEDBACK_URL_TEMPLATE % (
- app_identity.get_default_version_hostname(), analysis.key.urlsafe())
- if analysis_result['found']:
- for cl in analysis_result['suspected_cls']:
- cl['confidence'] = round(cl['confidence'], 2)
- cl.pop('reason', None)
-
- return {
- 'crash_identifiers': crash_identifiers,
- 'client_id': analysis.client_id,
- 'result': analysis_result,
- }
+ logging.error('Failed to publish %s analysis result for %s',
+ repr(self.crash_identifiers), self.client_id)
+
# Arguments number differs from overridden method - pylint: disable=W0221
- def run(self, crash_identifiers):
- analysis = FracasCrashAnalysis.Get(crash_identifiers)
- result = self.PostProcessResults(analysis, crash_identifiers)
+ def run(self, crash_identifiers, client_id):
+ analysis = findit_for_client.GetAnalysisForClient(crash_identifiers,
+ client_id)
+ result = findit_for_client.GetPublishResultFromAnalysis(analysis,
+ crash_identifiers,
+ client_id)
messages_data = [json.dumps(result, sort_keys=True)]
- crash_config = CrashConfig.Get()
- topic = crash_config.fracas['analysis_result_pubsub_topic']
+ client_config = getattr(CrashConfig.Get(), client_id)
+ topic = client_config['analysis_result_pubsub_topic']
pubsub_util.PublishMessagesToTopic(messages_data, topic)
logging.info('Published analysis result for %s', repr(crash_identifiers))
stgao 2016/09/13 16:37:31 include client_id
Sharu Jiang 2016/09/14 20:38:38 Done.
class CrashWrapperPipeline(BasePipeline):
# Arguments number differs from overridden method - pylint: disable=W0221
- def run(self, crash_identifiers):
- run_analysis = yield CrashAnalysisPipeline(crash_identifiers)
+ def run(self, crash_identifiers, client_id):
+ run_analysis = yield CrashAnalysisPipeline(crash_identifiers, client_id)
with pipeline.After(run_analysis):
- yield PublishResultPipeline(crash_identifiers)
+ yield PublishResultPipeline(crash_identifiers, client_id)
-@ndb.transactional
def _NeedsNewAnalysis(
stgao 2016/09/13 16:37:31 If we remove the transaction annotation, there is
Sharu Jiang 2016/09/14 20:38:38 Done.
crash_identifiers, chrome_version, signature, client_id,
- platform, stack_trace, channel, historical_metadata):
- analysis = FracasCrashAnalysis.Get(crash_identifiers)
+ platform, stack_trace, customized_data):
+ pass_policy, args = findit_for_client.CheckPolicyForClient(
stgao 2016/09/13 16:37:31 `args` here is too general. Change to a name that
Sharu Jiang 2016/09/14 20:38:38 How about updated_analysis_args or tuned_analysis_
+ crash_identifiers, chrome_version, signature,
+ client_id, platform, stack_trace,
+ customized_data)
+
+ if not pass_policy:
+ return False
+
+ analysis = findit_for_client.GetAnalysisForClient(crash_identifiers,
+ client_id)
if analysis and not analysis.failed:
# A new analysis is not needed if last one didn't complete or succeeded.
# TODO(http://crbug.com/600535): re-analyze if stack trace or regression
@@ -126,68 +121,22 @@ def _NeedsNewAnalysis(
repr(crash_identifiers))
return False
- if not analysis:
- # A new analysis is needed if there is no analysis yet.
- analysis = FracasCrashAnalysis.Create(crash_identifiers)
-
- analysis.Reset()
-
- # Set common properties.
- analysis.crashed_version = chrome_version
- analysis.stack_trace = stack_trace
- analysis.signature = signature
- analysis.platform = platform
- analysis.client_id = client_id
-
- # Set customized properties.
- analysis.historical_metadata = historical_metadata
- analysis.channel = channel
-
- # Set analysis progress properties.
- analysis.status = analysis_status.PENDING
- analysis.requested_time = time_util.GetUTCNow()
-
- analysis.put()
+ findit_for_client.ResetAnalysis(analysis, *args)
return True
def ScheduleNewAnalysisForCrash(
crash_identifiers, chrome_version, signature, client_id,
- platform, stack_trace, channel, historical_metadata,
+ platform, stack_trace, customized_data,
queue_name=constants.DEFAULT_QUEUE):
"""Schedules an analysis."""
- crash_config = CrashConfig.Get()
- if platform not in crash_config.fracas.get(
- 'supported_platform_list_by_channel', {}).get(channel, []):
- # Bail out if either the channel or platform is not supported yet.
- logging.info('Ananlysis of channel %s, platform %s is not supported. '
- 'No analysis is scheduled for %s',
- channel, platform, repr(crash_identifiers))
- return False
-
- # TODO(katesonia): Remove the default value after adding validity check to
- # config.
- for blacklist_marker in crash_config.fracas.get(
- 'signature_blacklist_markers', []):
- if blacklist_marker in signature:
- logging.info('%s signature is not supported. '
- 'No analysis is scheduled for %s', blacklist_marker,
- repr(crash_identifiers))
- return False
-
- # TODO(katesonia): Remove the default value after adding validity check to
- # config.
- platform_rename = crash_config.fracas.get('platform_rename', {})
- if platform in platform_rename:
- platform = platform_rename[platform]
-
if _NeedsNewAnalysis(crash_identifiers, chrome_version, signature, client_id,
- platform, stack_trace, channel, historical_metadata):
- analysis_pipeline = CrashWrapperPipeline(crash_identifiers)
+ platform, stack_trace, customized_data):
+ analysis_pipeline = CrashWrapperPipeline(crash_identifiers, client_id)
# Attribute defined outside __init__ - pylint: disable=W0201
analysis_pipeline.target = appengine_util.GetTargetNameForModule(
- constants.CRASH_BACKEND_FRACAS)
+ constants.CRASH_BACKEND[client_id])
analysis_pipeline.start(queue_name=queue_name)
logging.info('New analysis is scheduled for %s', repr(crash_identifiers))
return True

Powered by Google App Engine
This is Rietveld 408576698