Chromium Code Reviews| Index: appengine/findit/util_scripts/crash_queries/delta_test/delta_util.py |
| diff --git a/appengine/findit/util_scripts/crash_queries/delta_test/delta_util.py b/appengine/findit/util_scripts/crash_queries/delta_test/delta_util.py |
| index 02102bdf820228e630fbc5eb89e965b19148d691..3391c5efe573a3f8df9d6be94e05138ed3a7b881 100644 |
| --- a/appengine/findit/util_scripts/crash_queries/delta_test/delta_util.py |
| +++ b/appengine/findit/util_scripts/crash_queries/delta_test/delta_util.py |
| @@ -20,8 +20,9 @@ dev_appserver.fix_sys_path() |
| from common import appengine_util |
| -# TODO(katesonia): move host to azalea host after migration. |
| -_FEEDBACK_URL_TEMPLATE = 'host/crash/fracas-result-feedback?key=%s' |
| +# TODO(katesonia): move host to predator host after migration. |
| +_FRACAS_FEEDBACK_URL_TEMPLATE = ( |
| + 'https://%s.appspot.com/crash/fracas-result-feedback?key=%s') |
| GIT_HASH_PATTERN = re.compile(r'^[0-9a-fA-F]{40}$') |
| @@ -62,48 +63,47 @@ def EnsureDirExists(path): # pragma: no cover |
| # TODO(crbug.com/662540): Add unittests. |
| -def FlushResult(result, result_path): # pragma: no cover |
| - logging.info('\nFlushing results to %s', result_path) |
| +def FlushResult(result, result_path, serializer=pickle): # pragma: no cover |
| + print '\nFlushing results to', result_path |
| EnsureDirExists(result_path) |
| with open(result_path, 'wb') as f: |
| - pickle.dump(result, f) |
| + serializer.dump(result, f) |
| # TODO(crbug.com/662540): Add unittests. |
| -def PrintDelta(deltas, crash_num): # pragma: no cover |
| - logging.info(('\n+++++++++++++++++++++' |
| - '\nDelta on %d crashes ' |
| - '\n+++++++++++++++++++++'), crash_num) |
| +def PrintDelta(deltas, crash_num, app_id): # pragma: no cover |
| + print ('\n+++++++++++++++++++++' |
| + '\nDelta on %d crashes ' |
| + '\n+++++++++++++++++++++') % crash_num |
| if not deltas: |
| - logging.info('Two sets of results are the same.') |
| + print 'Two sets of results are the same.' |
| return |
| for crash_id, delta in deltas.iteritems(): |
| - logging.info('\nCrash: %s\n%s\n', |
| - _FEEDBACK_URL_TEMPLATE % crash_id, |
| - str(delta)) |
| + print '\nCrash: %s\n%s\n' % ( |
| + _FRACAS_FEEDBACK_URL_TEMPLATE % (app_id, crash_id), str(delta)) |
| # TODO(crbug.com/662540): Add unittests. |
| -def WriteDeltaToCSV(deltas, crash_num, |
| +def WriteDeltaToCSV(deltas, crash_num, app_id, |
| git_hash1, git_hash2, file_path): # pragma: no cover |
| EnsureDirExists(file_path) |
| def _EncodeStr(string): |
| return string.replace('\"', '\'') if string else '' |
| - logging.info('Writing delta diff to %s\n', file_path) |
| + print 'Writing delta diff to %s\n' % file_path |
|
stgao
2016/11/17 05:03:05
Why we switch from logging to print? I thought log
Sharu Jiang
2016/11/17 09:03:15
The problem is that we have tons of loggings insid
|
| with open(file_path, 'wb') as f: |
| - f.write('Delta between githash1 %s and githash2 %s on %d crashes\n' % ( |
| + f.write('Delta between githash1 %s and githash2 %s on %d crashes\n\n' % ( |
| git_hash1, git_hash2, crash_num)) |
| - f.write('project, components, cls, regression_range\n') |
| + f.write('crash url, project, components, cls, regression_range\n') |
| for crash_id, delta in deltas.iteritems(): |
| delta_str_dict = delta.delta_str_dict |
| - feedback_url = _FEEDBACK_URL_TEMPLATE % crash_id |
| + feedback_url = _FRACAS_FEEDBACK_URL_TEMPLATE % (app_id, crash_id) |
| f.write('%s, "%s", "%s", "%s", "%s"\n' % ( |
| feedback_url, |
| - _EncodeStr(delta_str_dict.get('project', '')), |
| - _EncodeStr(delta_str_dict.get('components', '')), |
| - _EncodeStr(delta_str_dict.get('cls', '')), |
| + _EncodeStr(delta_str_dict.get('suspected_project', '')), |
| + _EncodeStr(delta_str_dict.get('suspected_components', '')), |
| + _EncodeStr(delta_str_dict.get('suspected_cls', '')), |
| _EncodeStr(delta_str_dict.get('regression_range', '')) |
| )) |