Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(764)

Side by Side Diff: appengine/monorail/tracker/issueimport.py

Issue 1868553004: Open Source Monorail (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: Rebase Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « appengine/monorail/tracker/issueexport.py ('k') | appengine/monorail/tracker/issuelist.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is govered by a BSD-style
3 # license that can be found in the LICENSE file or at
4 # https://developers.google.com/open-source/licenses/bsd
5
6 """Servlet to import a file of issues in JSON format.
7 """
8
9 import collections
10 import json
11 import logging
12 import time
13
14 from third_party import ezt
15
16 from features import filterrules_helpers
17 from framework import framework_helpers
18 from framework import jsonfeed
19 from framework import permissions
20 from framework import servlet
21 from framework import urls
22 from proto import tracker_pb2
23
24
25 ParserState = collections.namedtuple(
26 'ParserState',
27 'user_id_dict, nonexist_emails, issue_list, comments_dict, starrers_dict, '
28 'relations_dict')
29
30
31 class IssueImport(servlet.Servlet):
32 """IssueImport loads a file of issues in JSON format."""
33
34 _PAGE_TEMPLATE = 'tracker/issue-import-page.ezt'
35 _MAIN_TAB_MODE = servlet.Servlet.MAIN_TAB_ISSUES
36
37 def AssertBasePermission(self, mr):
38 """Make sure that the logged in user has permission to view this page."""
39 super(IssueImport, self).AssertBasePermission(mr)
40 if not mr.auth.user_pb.is_site_admin:
41 raise permissions.PermissionException(
42 'Only site admins may import issues')
43
44 def GatherPageData(self, mr):
45 """Build up a dictionary of data values to use when rendering the page."""
46 return {
47 'issue_tab_mode': None,
48 'page_perms': self.MakePagePerms(mr, None, permissions.CREATE_ISSUE),
49 'import_errors': [],
50 }
51
52 def ProcessFormData(self, mr, post_data):
53 """Process the issue entry form.
54
55 Args:
56 mr: commonly used info parsed from the request.
57 post_data: The post_data dict for the current request.
58
59 Returns:
60 String URL to redirect the user to after processing.
61 """
62 import_errors = []
63 json_data = None
64
65 pre_check_only = 'pre_check_only' in post_data
66
67 uploaded_file = post_data.get('jsonfile')
68 if uploaded_file is None:
69 import_errors.append('No file uploaded')
70 else:
71 try:
72 json_str = uploaded_file.value
73 if json_str.startswith(jsonfeed.XSSI_PREFIX):
74 json_str = json_str[len(jsonfeed.XSSI_PREFIX):]
75 json_data = json.loads(json_str)
76 except ValueError:
77 import_errors.append('error parsing JSON in file')
78
79 if uploaded_file and not json_data:
80 import_errors.append('JSON file was empty')
81
82 # Note that the project must already exist in order to even reach
83 # this servlet because it is hosted in the context of a project.
84 if json_data and mr.project_name != json_data['metadata']['project']:
85 import_errors.append(
86 'Project name does not match. '
87 'Edit the file if you want to import into this project anyway.')
88
89 if import_errors:
90 return self.PleaseCorrect(mr, import_errors=import_errors)
91
92 event_log = [] # We accumulate a list of messages to display to the user.
93
94 try:
95 # First we parse the JSON into objects, but we don't have DB IDs yet.
96 state = self._ParseObjects(mr.cnxn, mr.project_id, json_data, event_log)
97 # If that worked, go ahead and start saving the data to the DB.
98 if not pre_check_only:
99 self._SaveObjects(mr.cnxn, mr.project_id, state, event_log)
100 except JSONImportError:
101 # just report it to the user by displaying event_log
102 event_log.append('Aborted import processing')
103
104 # This is a little bit of a hack because it always uses the form validation
105 # error message display logic to show the results of this import run,
106 # which may include errors or not.
107 return self.PleaseCorrect(mr, import_errors=event_log)
108
109 def _ParseObjects(self, cnxn, project_id, json_data, event_log):
110 """Examine JSON data and return a parser state for further processing."""
111 # Decide which users need to be created.
112 needed_emails = json_data['emails']
113 user_id_dict = self.services.user.LookupExistingUserIDs(cnxn, needed_emails)
114 nonexist_emails = [email for email in needed_emails
115 if email not in user_id_dict]
116
117 event_log.append('Need to create %d users: %r' %
118 (len(nonexist_emails), nonexist_emails))
119 user_id_dict.update({
120 email.lower(): framework_helpers.MurmurHash3_x86_32(email.lower())
121 for email in nonexist_emails})
122
123 num_comments = 0
124 num_stars = 0
125 issue_list = []
126 comments_dict = collections.defaultdict(list)
127 starrers_dict = collections.defaultdict(list)
128 relations_dict = collections.defaultdict(list)
129 for issue_json in json_data.get('issues', []):
130 issue, comment_list, starrer_list, relation_list = self._ParseIssue(
131 cnxn, project_id, user_id_dict, issue_json, event_log)
132 issue_list.append(issue)
133 comments_dict[issue.local_id] = comment_list
134 starrers_dict[issue.local_id] = starrer_list
135 relations_dict[issue.local_id] = relation_list
136 num_comments += len(comment_list)
137 num_stars += len(starrer_list)
138
139 event_log.append(
140 'Found info for %d issues: %r' %
141 (len(issue_list), sorted([issue.local_id for issue in issue_list])))
142
143 event_log.append(
144 'Found %d total comments for %d issues' %
145 (num_comments, len(comments_dict)))
146
147 event_log.append(
148 'Found %d total stars for %d issues' %
149 (num_stars, len(starrers_dict)))
150
151 event_log.append(
152 'Found %d total relationships.' %
153 sum((len(dsts) for dsts in relations_dict.itervalues())))
154
155 event_log.append('Parsing phase finished OK')
156 return ParserState(
157 user_id_dict, nonexist_emails, issue_list,
158 comments_dict, starrers_dict, relations_dict)
159
160 def _ParseIssue(self, cnxn, project_id, user_id_dict, issue_json, event_log):
161 issue = tracker_pb2.Issue(
162 project_id=project_id,
163 local_id=issue_json['local_id'],
164 reporter_id=user_id_dict[issue_json['reporter']],
165 summary=issue_json['summary'],
166 opened_timestamp=issue_json['opened'],
167 modified_timestamp=issue_json['modified'],
168 cc_ids=[user_id_dict[cc_email]
169 for cc_email in issue_json.get('cc', [])
170 if cc_email in user_id_dict],
171 status=issue_json.get('status', ''),
172 labels=issue_json.get('labels', []),
173 field_values=[self._ParseFieldValue(cnxn, project_id, user_id_dict, field)
174 for field in issue_json.get('fields', [])])
175 if issue_json.get('owner'):
176 issue.owner_id = user_id_dict[issue_json['owner']]
177 if issue_json.get('closed'):
178 issue.closed_timestamp = issue_json['closed']
179 comments = [self._ParseComment(
180 project_id, user_id_dict, comment_json, event_log)
181 for comment_json in issue_json.get('comments', [])]
182
183 starrers = [user_id_dict[starrer] for starrer in issue_json['starrers']]
184
185 relations = []
186 relations.extend(
187 [(i, 'blockedon') for i in issue_json.get('blocked_on', [])])
188 relations.extend(
189 [(i, 'blocking') for i in issue_json.get('blocking', [])])
190 if 'merged_into' in issue_json:
191 relations.append((issue_json['merged_into'], 'mergedinto'))
192
193 return issue, comments, starrers, relations
194
195 def _ParseFieldValue(self, cnxn, project_id, user_id_dict, field_json):
196 field = tracker_pb2.FieldValue(
197 field_id=self.services.config.LookupFieldID(cnxn, project_id,
198 field_json['field']))
199 if 'int_value' in field_json:
200 field.int_value = field_json['int_value']
201 if 'str_value' in field_json:
202 field.str_value = field_json['str_value']
203 if 'user_value' in field_json:
204 field.user_value = user_id_dict.get(field_json['user_value'])
205
206 return field
207
208 def _ParseComment(self, project_id, user_id_dict, comment_json, event_log):
209 comment = tracker_pb2.IssueComment(
210 # Note: issue_id is filled in after the issue is saved.
211 project_id=project_id,
212 timestamp=comment_json['timestamp'],
213 user_id=user_id_dict[comment_json['commenter']],
214 content=comment_json.get('content'))
215
216 for amendment in comment_json['amendments']:
217 comment.amendments.append(
218 self._ParseAmendment(amendment, user_id_dict, event_log))
219
220 for attachment in comment_json['attachments']:
221 comment.attachments.append(
222 self._ParseAttachment(attachment, event_log))
223
224 return comment
225
226 def _ParseAmendment(self, amendment_json, user_id_dict, _event_log):
227 amendment = tracker_pb2.Amendment(
228 field=tracker_pb2.FieldID(amendment_json['field']))
229
230 if 'new_value' in amendment_json:
231 amendment.newvalue = amendment_json['new_value']
232 if 'custom_field_name' in amendment_json:
233 amendment.custom_field_name = amendment_json['custom_field_name']
234 if 'added_users' in amendment_json:
235 amendment.added_user_ids.extend(
236 [user_id_dict[email] for email in amendment_json['added_users']])
237 if 'removed_users' in amendment_json:
238 amendment.removed_user_ids.extend(
239 [user_id_dict[email] for email in amendment_json['removed_users']])
240
241 return amendment
242
243 def _ParseAttachment(self, attachment_json, _event_log):
244 attachment = tracker_pb2.Attachment(
245 filename=attachment_json['name'],
246 filesize=attachment_json['size'],
247 mimetype=attachment_json['mimetype'],
248 gcs_object_id=attachment_json['gcs_object_id']
249 )
250 return attachment
251
252 def _SaveObjects(self, cnxn, project_id, state, event_log):
253 """Examine JSON data and create users, issues, and comments."""
254
255 created_user_ids = self.services.user.LookupUserIDs(
256 cnxn, state.nonexist_emails, autocreate=True)
257 for created_email, created_id in created_user_ids.items():
258 if created_id != state.user_id_dict[created_email]:
259 event_log.append('Mismatched user_id for %r' % created_email)
260 raise JSONImportError()
261 event_log.append('Created %d users' % len(state.nonexist_emails))
262
263 total_comments = 0
264 total_stars = 0
265 config = self.services.config.GetProjectConfig(cnxn, project_id)
266 for issue in state.issue_list:
267 # TODO(jrobbins): renumber issues if there is a local_id conflict.
268 if issue.local_id not in state.starrers_dict:
269 # Issues with stars will have filter rules applied in SetStar().
270 filterrules_helpers.ApplyFilterRules(
271 cnxn, self.services, issue, config)
272 issue_id = self.services.issue.InsertIssue(cnxn, issue)
273 for comment in state.comments_dict[issue.local_id]:
274 total_comments += 1
275 comment.issue_id = issue_id
276 self.services.issue.InsertComment(cnxn, comment)
277 for starrer in state.starrers_dict[issue.local_id]:
278 total_stars += 1
279 self.services.issue_star.SetStar(
280 cnxn, self.services, config, issue_id, starrer, True)
281
282 event_log.append('Created %d issues' % len(state.issue_list))
283 event_log.append('Created %d comments for %d issues' % (
284 total_comments, len(state.comments_dict)))
285 event_log.append('Set %d stars on %d issues' % (
286 total_stars, len(state.starrers_dict)))
287
288 global_relations_dict = collections.defaultdict(list)
289 for issue, rels in state.relations_dict.iteritems():
290 src_iid = self.services.issue.GetIssueByLocalID(
291 cnxn, project_id, issue).issue_id
292 dst_iids = [i.issue_id for i in self.services.issue.GetIssuesByLocalIDs(
293 cnxn, project_id, [rel[0] for rel in rels])]
294 kinds = [rel[1] for rel in rels]
295 global_relations_dict[src_iid] = zip(dst_iids, kinds)
296 self.services.issue.RelateIssues(cnxn, global_relations_dict)
297
298 self.services.issue.SetUsedLocalID(cnxn, project_id)
299 event_log.append('Finished import')
300
301
302 class JSONImportError(Exception):
303 """Exception to raise if imported JSON is invalid."""
304 pass
OLDNEW
« no previous file with comments | « appengine/monorail/tracker/issueexport.py ('k') | appengine/monorail/tracker/issuelist.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698