Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(117)

Side by Side Diff: appengine/monorail/services/issue_svc.py

Issue 1868553004: Open Source Monorail (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: Rebase Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is govered by a BSD-style
3 # license that can be found in the LICENSE file or at
4 # https://developers.google.com/open-source/licenses/bsd
5
6 """A set of functions that provide persistence for Monorail issue tracking.
7
8 This module provides functions to get, update, create, and (in some
9 cases) delete each type of business object. It provides a logical
10 persistence layer on top of an SQL database.
11
12 Business objects are described in tracker_pb2.py and tracker_bizobj.py.
13 """
14
15 import collections
16 import json
17 import logging
18 import os
19 import time
20 import uuid
21
22 from google.appengine.api import app_identity
23 from google.appengine.api import images
24 from third_party import cloudstorage
25
26 import settings
27 from features import filterrules_helpers
28 from framework import framework_bizobj
29 from framework import framework_constants
30 from framework import framework_helpers
31 from framework import gcs_helpers
32 from framework import permissions
33 from framework import sql
34 from infra_libs import ts_mon
35 from proto import project_pb2
36 from proto import tracker_pb2
37 from services import caches
38 from services import tracker_fulltext
39 from tracker import tracker_bizobj
40 from tracker import tracker_helpers
41
42
43 ISSUE_TABLE_NAME = 'Issue'
44 ISSUESUMMARY_TABLE_NAME = 'IssueSummary'
45 ISSUE2LABEL_TABLE_NAME = 'Issue2Label'
46 ISSUE2COMPONENT_TABLE_NAME = 'Issue2Component'
47 ISSUE2CC_TABLE_NAME = 'Issue2Cc'
48 ISSUE2NOTIFY_TABLE_NAME = 'Issue2Notify'
49 ISSUE2FIELDVALUE_TABLE_NAME = 'Issue2FieldValue'
50 COMMENT_TABLE_NAME = 'Comment'
51 ATTACHMENT_TABLE_NAME = 'Attachment'
52 ISSUERELATION_TABLE_NAME = 'IssueRelation'
53 DANGLINGRELATION_TABLE_NAME = 'DanglingIssueRelation'
54 ISSUEUPDATE_TABLE_NAME = 'IssueUpdate'
55 ISSUEFORMERLOCATIONS_TABLE_NAME = 'IssueFormerLocations'
56 REINDEXQUEUE_TABLE_NAME = 'ReindexQueue'
57 LOCALIDCOUNTER_TABLE_NAME = 'LocalIDCounter'
58
59 ISSUE_COLS = [
60 'id', 'project_id', 'local_id', 'status_id', 'owner_id', 'reporter_id',
61 'opened', 'closed', 'modified', 'derived_owner_id', 'derived_status_id',
62 'deleted', 'star_count', 'attachment_count', 'is_spam']
63 ISSUESUMMARY_COLS = ['issue_id', 'summary']
64 ISSUE2LABEL_COLS = ['issue_id', 'label_id', 'derived']
65 ISSUE2COMPONENT_COLS = ['issue_id', 'component_id', 'derived']
66 ISSUE2CC_COLS = ['issue_id', 'cc_id', 'derived']
67 ISSUE2NOTIFY_COLS = ['issue_id', 'email']
68 ISSUE2FIELDVALUE_COLS = [
69 'issue_id', 'field_id', 'int_value', 'str_value', 'user_id', 'derived']
70 COMMENT_COLS = [
71 'Comment.id', 'issue_id', 'created', 'Comment.project_id', 'commenter_id',
72 'content', 'inbound_message', 'was_escaped', 'deleted_by',
73 'Comment.is_spam']
74 ABBR_COMMENT_COLS = ['Comment.id', 'commenter_id', 'deleted_by']
75 ATTACHMENT_COLS = [
76 'id', 'issue_id', 'comment_id', 'filename', 'filesize', 'mimetype',
77 'deleted', 'gcs_object_id']
78 ISSUERELATION_COLS = ['issue_id', 'dst_issue_id', 'kind']
79 DANGLINGRELATION_COLS = [
80 'issue_id', 'dst_issue_project', 'dst_issue_local_id', 'kind']
81 ISSUEUPDATE_COLS = [
82 'id', 'issue_id', 'comment_id', 'field', 'old_value', 'new_value',
83 'added_user_id', 'removed_user_id', 'custom_field_name']
84 ISSUEFORMERLOCATIONS_COLS = ['issue_id', 'project_id', 'local_id']
85 REINDEXQUEUE_COLS = ['issue_id', 'created']
86
87 CHUNK_SIZE = 1000
88
89
90 class IssueIDTwoLevelCache(caches.AbstractTwoLevelCache):
91 """Class to manage RAM and memcache for Issue IDs."""
92
93 def __init__(self, cache_manager, issue_service):
94 super(IssueIDTwoLevelCache, self).__init__(
95 cache_manager, 'issue_id', 'issue_id:', int,
96 max_size=settings.issue_cache_max_size, use_value_centric_cache=True)
97 self.issue_service = issue_service
98
99 def _DeserializeIssueIDs(self, project_local_issue_ids):
100 """Convert database rows into a dict {(project_id, local_id): issue_id}."""
101 return {(project_id, local_id): issue_id
102 for (project_id, local_id, issue_id) in project_local_issue_ids}
103
104 def FetchItems(self, cnxn, keys):
105 """On RAM and memcache miss, hit the database."""
106 local_ids_by_pid = collections.defaultdict(list)
107 for project_id, local_id in keys:
108 local_ids_by_pid[project_id].append(local_id)
109
110 where = [] # We OR per-project pairs of conditions together.
111 for project_id, local_ids_in_project in local_ids_by_pid.iteritems():
112 term_str = ('(Issue.project_id = %%s AND Issue.local_id IN (%s))' %
113 sql.PlaceHolders(local_ids_in_project))
114 where.append((term_str, [project_id] + local_ids_in_project))
115
116 rows = self.issue_service.issue_tbl.Select(
117 cnxn, cols=['project_id', 'local_id', 'id'],
118 where=where, or_where_conds=True)
119 return self._DeserializeIssueIDs(rows)
120
121 def _KeyToStr(self, key):
122 """This cache uses pairs of ints as keys. Convert them to strings."""
123 return '%d,%d' % key
124
125 def _StrToKey(self, key_str):
126 """This cache uses pairs of ints as keys. Convert them from strings."""
127 project_id_str, local_id_str = key_str.split(',')
128 return int(project_id_str), int(local_id_str)
129
130
131 class IssueTwoLevelCache(caches.AbstractTwoLevelCache):
132 """Class to manage RAM and memcache for Issue PBs."""
133
134 def __init__(
135 self, cache_manager, issue_service, project_service, config_service):
136 super(IssueTwoLevelCache, self).__init__(
137 cache_manager, 'issue', 'issue:', tracker_pb2.Issue,
138 max_size=settings.issue_cache_max_size)
139 self.issue_service = issue_service
140 self.project_service = project_service
141 self.config_service = config_service
142
143 def _UnpackIssue(self, cnxn, issue_row):
144 """Partially construct an issue object using info from a DB row."""
145 (issue_id, project_id, local_id, status_id, owner_id, reporter_id,
146 opened, closed, modified, derived_owner_id, derived_status_id,
147 deleted, star_count, attachment_count, is_spam) = issue_row
148
149 issue = tracker_pb2.Issue()
150 project = self.project_service.GetProject(cnxn, project_id)
151 issue.project_name = project.project_name
152 issue.issue_id = issue_id
153 issue.project_id = project_id
154 issue.local_id = local_id
155 if status_id is not None:
156 status = self.config_service.LookupStatus(cnxn, project_id, status_id)
157 issue.status = status
158 issue.owner_id = owner_id or 0
159 issue.reporter_id = reporter_id or 0
160 issue.derived_owner_id = derived_owner_id or 0
161 if derived_status_id is not None:
162 derived_status = self.config_service.LookupStatus(
163 cnxn, project_id, derived_status_id)
164 issue.derived_status = derived_status
165 issue.deleted = bool(deleted)
166 if opened:
167 issue.opened_timestamp = opened
168 if closed:
169 issue.closed_timestamp = closed
170 if modified:
171 issue.modified_timestamp = modified
172 issue.star_count = star_count
173 issue.attachment_count = attachment_count
174 issue.is_spam = bool(is_spam)
175 return issue
176
177 def _UnpackFieldValue(self, fv_row):
178 """Construct a field value object from a DB row."""
179 (issue_id, field_id, int_value, str_value, user_id, derived) = fv_row
180 fv = tracker_bizobj.MakeFieldValue(
181 field_id, int_value, str_value, user_id, bool(derived))
182 return fv, issue_id
183
184 def _DeserializeIssues(
185 self, cnxn, issue_rows, summary_rows, label_rows, component_rows,
186 cc_rows, notify_rows, fieldvalue_rows, relation_rows,
187 dangling_relation_rows):
188 """Convert the given DB rows into a dict of Issue PBs."""
189 results_dict = {}
190 for issue_row in issue_rows:
191 issue = self._UnpackIssue(cnxn, issue_row)
192 results_dict[issue.issue_id] = issue
193
194 for issue_id, summary in summary_rows:
195 results_dict[issue_id].summary = summary
196
197 # TODO(jrobbins): it would be nice to order labels by rank and name.
198 for issue_id, label_id, derived in label_rows:
199 issue = results_dict.get(issue_id)
200 if not issue:
201 logging.info('Got label for an unknown issue: %r %r',
202 label_rows, issue_rows)
203 continue
204 label = self.config_service.LookupLabel(cnxn, issue.project_id, label_id)
205 assert label, ('Label ID %r on IID %r not found in project %r' %
206 (label_id, issue_id, issue.project_id))
207 if derived:
208 results_dict[issue_id].derived_labels.append(label)
209 else:
210 results_dict[issue_id].labels.append(label)
211
212 for issue_id, component_id, derived in component_rows:
213 if derived:
214 results_dict[issue_id].derived_component_ids.append(component_id)
215 else:
216 results_dict[issue_id].component_ids.append(component_id)
217
218 for issue_id, user_id, derived in cc_rows:
219 if derived:
220 results_dict[issue_id].derived_cc_ids.append(user_id)
221 else:
222 results_dict[issue_id].cc_ids.append(user_id)
223
224 for issue_id, email in notify_rows:
225 results_dict[issue_id].derived_notify_addrs.append(email)
226
227 for fv_row in fieldvalue_rows:
228 fv, issue_id = self._UnpackFieldValue(fv_row)
229 results_dict[issue_id].field_values.append(fv)
230
231 for issue_id, dst_issue_id, kind in relation_rows:
232 src_issue = results_dict.get(issue_id)
233 dst_issue = results_dict.get(dst_issue_id)
234 assert src_issue or dst_issue, (
235 'Neither source issue %r nor dest issue %r was found' %
236 (issue_id, dst_issue_id))
237 if src_issue:
238 if kind == 'blockedon':
239 src_issue.blocked_on_iids.append(dst_issue_id)
240 elif kind == 'mergedinto':
241 src_issue.merged_into = dst_issue_id
242 else:
243 logging.info('unknown relation kind %r', kind)
244 continue
245
246 if dst_issue:
247 if kind == 'blockedon':
248 dst_issue.blocking_iids.append(issue_id)
249
250 for issue_id, dst_issue_proj, dst_issue_id, kind in dangling_relation_rows:
251 src_issue = results_dict.get(issue_id)
252 if kind == 'blockedon':
253 src_issue.dangling_blocked_on_refs.append(
254 tracker_bizobj.MakeDanglingIssueRef(dst_issue_proj, dst_issue_id))
255 elif kind == 'blocking':
256 src_issue.dangling_blocking_refs.append(
257 tracker_bizobj.MakeDanglingIssueRef(dst_issue_proj, dst_issue_id))
258 else:
259 logging.warn('unhandled danging relation kind %r', kind)
260 continue
261
262 return results_dict
263
264 # Note: sharding is used to here to allow us to load issues from the replicas
265 # without placing load on the master. Writes are not sharded.
266 # pylint: disable=arguments-differ
267 def FetchItems(self, cnxn, issue_ids, shard_id=None):
268 """Retrieve and deserialize issues."""
269 issue_rows = self.issue_service.issue_tbl.Select(
270 cnxn, cols=ISSUE_COLS, id=issue_ids, shard_id=shard_id)
271
272 summary_rows = self.issue_service.issuesummary_tbl.Select(
273 cnxn, cols=ISSUESUMMARY_COLS, shard_id=shard_id, issue_id=issue_ids)
274 label_rows = self.issue_service.issue2label_tbl.Select(
275 cnxn, cols=ISSUE2LABEL_COLS, shard_id=shard_id, issue_id=issue_ids)
276 component_rows = self.issue_service.issue2component_tbl.Select(
277 cnxn, cols=ISSUE2COMPONENT_COLS, shard_id=shard_id, issue_id=issue_ids)
278 cc_rows = self.issue_service.issue2cc_tbl.Select(
279 cnxn, cols=ISSUE2CC_COLS, shard_id=shard_id, issue_id=issue_ids)
280 notify_rows = self.issue_service.issue2notify_tbl.Select(
281 cnxn, cols=ISSUE2NOTIFY_COLS, shard_id=shard_id, issue_id=issue_ids)
282 fieldvalue_rows = self.issue_service.issue2fieldvalue_tbl.Select(
283 cnxn, cols=ISSUE2FIELDVALUE_COLS, shard_id=shard_id,
284 issue_id=issue_ids)
285 if issue_ids:
286 ph = sql.PlaceHolders(issue_ids)
287 relation_rows = self.issue_service.issuerelation_tbl.Select(
288 cnxn, cols=ISSUERELATION_COLS,
289 where=[('(issue_id IN (%s) OR dst_issue_id IN (%s))' % (ph, ph),
290 issue_ids + issue_ids)])
291 dangling_relation_rows = self.issue_service.danglingrelation_tbl.Select(
292 cnxn, cols=DANGLINGRELATION_COLS, issue_id=issue_ids)
293 else:
294 relation_rows = []
295 dangling_relation_rows = []
296
297 return self._DeserializeIssues(
298 cnxn, issue_rows, summary_rows, label_rows, component_rows, cc_rows,
299 notify_rows, fieldvalue_rows, relation_rows, dangling_relation_rows)
300
301
302 class IssueService(object):
303 """The persistence layer for Monorail's issues, comments, and attachments."""
304 spam_labels = ts_mon.CounterMetric('monorail/issue_svc/spam_label')
305
306 def __init__(self, project_service, config_service, cache_manager):
307 """Initialize this object so that it is ready to use.
308
309 Args:
310 project_service: services object for project info.
311 config_service: services object for tracker configuration info.
312 cache_manager: local cache with distributed invalidation.
313 """
314 # Tables that represent issue data.
315 self.issue_tbl = sql.SQLTableManager(ISSUE_TABLE_NAME)
316 self.issuesummary_tbl = sql.SQLTableManager(ISSUESUMMARY_TABLE_NAME)
317 self.issue2label_tbl = sql.SQLTableManager(ISSUE2LABEL_TABLE_NAME)
318 self.issue2component_tbl = sql.SQLTableManager(ISSUE2COMPONENT_TABLE_NAME)
319 self.issue2cc_tbl = sql.SQLTableManager(ISSUE2CC_TABLE_NAME)
320 self.issue2notify_tbl = sql.SQLTableManager(ISSUE2NOTIFY_TABLE_NAME)
321 self.issue2fieldvalue_tbl = sql.SQLTableManager(ISSUE2FIELDVALUE_TABLE_NAME)
322 self.issuerelation_tbl = sql.SQLTableManager(ISSUERELATION_TABLE_NAME)
323 self.danglingrelation_tbl = sql.SQLTableManager(DANGLINGRELATION_TABLE_NAME)
324 self.issueformerlocations_tbl = sql.SQLTableManager(
325 ISSUEFORMERLOCATIONS_TABLE_NAME)
326
327 # Tables that represent comments.
328 self.comment_tbl = sql.SQLTableManager(COMMENT_TABLE_NAME)
329 self.issueupdate_tbl = sql.SQLTableManager(ISSUEUPDATE_TABLE_NAME)
330 self.attachment_tbl = sql.SQLTableManager(ATTACHMENT_TABLE_NAME)
331
332 # Tables for cron tasks.
333 self.reindexqueue_tbl = sql.SQLTableManager(REINDEXQUEUE_TABLE_NAME)
334
335 # Tables for generating sequences of local IDs.
336 self.localidcounter_tbl = sql.SQLTableManager(LOCALIDCOUNTER_TABLE_NAME)
337
338 # Like a dictionary {(project_id, local_id): issue_id}
339 # Use value centric cache here because we cannot store a tuple in the
340 # Invalidate table.
341 self.issue_id_2lc = IssueIDTwoLevelCache(cache_manager, self)
342 # Like a dictionary {issue_id: issue}
343 self.issue_2lc = IssueTwoLevelCache(
344 cache_manager, self, project_service, config_service)
345
346 self._config_service = config_service
347
348 ### Issue ID lookups
349
350 def LookupIssueIDs(self, cnxn, project_local_id_pairs):
351 """Find the global issue IDs given the project ID and local ID of each."""
352 issue_id_dict, _misses = self.issue_id_2lc.GetAll(
353 cnxn, project_local_id_pairs)
354
355 # Put the Issue IDs in the order specified by project_local_id_pairs
356 issue_ids = [issue_id_dict[pair] for pair in project_local_id_pairs
357 if pair in issue_id_dict]
358
359 return issue_ids
360
361 def LookupIssueID(self, cnxn, project_id, local_id):
362 """Find the global issue ID given the project ID and local ID."""
363 issue_ids = self.LookupIssueIDs(cnxn, [(project_id, local_id)])
364 try:
365 return issue_ids[0]
366 except IndexError:
367 raise NoSuchIssueException()
368
369 def ResolveIssueRefs(
370 self, cnxn, ref_projects, default_project_name, refs):
371 """Look up all the referenced issues and return their issue_ids.
372
373 Args:
374 cnxn: connection to SQL database.
375 ref_projects: pre-fetched dict {project_name: project} of all projects
376 mentioned in the refs as well as the default project.
377 default_project_name: string name of the current project, this is used
378 when the project_name in a ref is None.
379 refs: list of (project_name, local_id) pairs. These are parsed from
380 textual references in issue descriptions, comments, and the input
381 in the blocked-on field.
382
383 Returns:
384 A list of issue_ids for all the referenced issues. References to issues
385 in deleted projects and any issues not found are simply ignored.
386 """
387 if not refs:
388 return []
389
390 project_local_id_pairs = []
391 for project_name, local_id in refs:
392 project = ref_projects.get(project_name or default_project_name)
393 if not project or project.state == project_pb2.ProjectState.DELETABLE:
394 continue # ignore any refs to issues in deleted projects
395 project_local_id_pairs.append((project.project_id, local_id))
396
397 issue_ids = self.LookupIssueIDs(cnxn, project_local_id_pairs)
398 return issue_ids
399
400 ### Issue objects
401
402 def CreateIssue(
403 self, cnxn, services, project_id, summary, status,
404 owner_id, cc_ids, labels, field_values, component_ids, reporter_id,
405 marked_description, blocked_on=None, blocking=None, attachments=None,
406 timestamp=None, index_now=True):
407 """Create and store a new issue with all the given information.
408
409 Args:
410 cnxn: connection to SQL database.
411 services: persistence layer for users, issues, and projects.
412 project_id: int ID for the current project.
413 summary: one-line summary string summarizing this issue.
414 status: string issue status value. E.g., 'New'.
415 owner_id: user ID of the issue owner.
416 cc_ids: list of user IDs for users to be CC'd on changes.
417 labels: list of label strings. E.g., 'Priority-High'.
418 field_values: list of FieldValue PBs.
419 component_ids: list of int component IDs.
420 reporter_id: user ID of the user who reported the issue.
421 marked_description: issue description with initial HTML markup.
422 blocked_on: list of issue_ids that this issue is blocked on.
423 blocking: list of issue_ids that this issue blocks.
424 attachments: [(filename, contents, mimetype),...] attachments uploaded at
425 the time the comment was made.
426 timestamp: time that the issue was entered, defaults to now.
427 index_now: True if the issue should be updated in the full text index.
428
429 Returns:
430 The integer local ID of the new issue.
431 """
432 config = self._config_service.GetProjectConfig(cnxn, project_id)
433 iids_to_invalidate = set()
434
435 status = framework_bizobj.CanonicalizeLabel(status)
436 labels = [framework_bizobj.CanonicalizeLabel(l) for l in labels]
437 labels = [l for l in labels if l]
438
439 issue = tracker_pb2.Issue()
440 issue.project_id = project_id
441 issue.summary = summary
442 issue.status = status
443 issue.owner_id = owner_id
444 issue.cc_ids.extend(cc_ids)
445 issue.labels.extend(labels)
446 issue.field_values.extend(field_values)
447 issue.component_ids.extend(component_ids)
448 issue.reporter_id = reporter_id
449 if blocked_on is not None:
450 iids_to_invalidate.update(blocked_on)
451 issue.blocked_on_iids = blocked_on
452 if blocking is not None:
453 iids_to_invalidate.update(blocking)
454 issue.blocking_iids = blocking
455 if attachments:
456 issue.attachment_count = len(attachments)
457 timestamp = timestamp or int(time.time())
458 issue.opened_timestamp = timestamp
459 issue.modified_timestamp = timestamp
460
461 comment = self._MakeIssueComment(
462 project_id, reporter_id, marked_description,
463 attachments=attachments, timestamp=timestamp, was_escaped=True)
464
465 # Set the closed_timestamp both before and after filter rules.
466 if not tracker_helpers.MeansOpenInProject(
467 tracker_bizobj.GetStatus(issue), config):
468 issue.closed_timestamp = timestamp
469 filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
470 if not tracker_helpers.MeansOpenInProject(
471 tracker_bizobj.GetStatus(issue), config):
472 issue.closed_timestamp = timestamp
473
474 classification = services.spam.ClassifyIssue(issue, comment)
475
476 label = classification['outputLabel']
477 logging.info('issue/comment classification: %s' % classification)
478 score = 0
479 for output in classification['outputMulti']:
480 if output['label'] == label:
481 score = float(output['score'])
482
483 self.spam_labels.increment({'type': label})
484
485 if label == 'spam' and score > settings.classifier_spam_thresh:
486 # Must be negative so as not to use up actual local_ids.
487 # This can be fixed later if a human declares it to be ham.
488 issue.local_id = self.AllocateNextSpamLocalID(cnxn, project_id)
489 issue.is_spam = True
490 else:
491 issue.local_id = self.AllocateNextLocalID(cnxn, project_id)
492
493 issue_id = self.InsertIssue(cnxn, issue)
494 comment.issue_id = issue_id
495 self.InsertComment(cnxn, comment)
496
497 issue.issue_id = issue_id
498 services.spam.RecordClassifierIssueVerdict(
499 cnxn, issue, label=='spam', score)
500
501 if permissions.HasRestrictions(issue, 'view'):
502 self._config_service.InvalidateMemcache(
503 [issue], key_prefix='nonviewable:')
504
505 # Add a comment to existing issues saying they are now blocking or
506 # blocked on this issue.
507 blocked_add_issues = self.GetIssues(cnxn, blocked_on or [])
508 for add_issue in blocked_add_issues:
509 self.CreateIssueComment(
510 cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
511 content='',
512 amendments=[tracker_bizobj.MakeBlockingAmendment(
513 [(issue.project_name, issue.local_id)], [],
514 default_project_name=add_issue.project_name)])
515 blocking_add_issues = self.GetIssues(cnxn, blocking or [])
516 for add_issue in blocking_add_issues:
517 self.CreateIssueComment(
518 cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
519 content='',
520 amendments=[tracker_bizobj.MakeBlockedOnAmendment(
521 [(issue.project_name, issue.local_id)], [],
522 default_project_name=add_issue.project_name)])
523
524 self._UpdateIssuesModified(
525 cnxn, iids_to_invalidate, modified_timestamp=timestamp)
526
527 if index_now:
528 tracker_fulltext.IndexIssues(
529 cnxn, [issue], services.user, self, self._config_service)
530
531 return issue.local_id
532
533 def AllocateNewLocalIDs(self, cnxn, issues):
534 # Filter to just the issues that need new local IDs.
535 issues = [issue for issue in issues if issue.local_id < 0]
536
537 for issue in issues:
538 if issue.local_id < 0:
539 issue.local_id = self.AllocateNextLocalID(cnxn, issue.project_id)
540
541 self.UpdateIssues(cnxn, issues)
542
543 logging.info("AllocateNewLocalIDs")
544
545 def GetAllIssuesInProject(self, cnxn, project_id, min_local_id=None):
546 """Special query to efficiently get ALL issues in a project.
547
548 This is not done while the user is waiting, only by backround tasks.
549
550 Args:
551 cnxn: connection to SQL database.
552 project_id: the ID of the project.
553 min_local_id: optional int to start at.
554
555 Returns:
556 A list of Issue protocol buffers for all issues.
557 """
558 all_local_ids = self.GetAllLocalIDsInProject(
559 cnxn, project_id, min_local_id=min_local_id)
560 return self.GetIssuesByLocalIDs(cnxn, project_id, all_local_ids)
561
562 def GetAnyOnHandIssue(self, issue_ids, start=None, end=None):
563 """Get any one issue from RAM or memcache, otherwise return None."""
564 return self.issue_2lc.GetAnyOnHandItem(issue_ids, start=start, end=end)
565
566 def GetIssuesDict(self, cnxn, issue_ids, use_cache=True, shard_id=None):
567 """Get a dict {iid: issue} from the DB or cache."""
568 issue_dict, _missed_iids = self.issue_2lc.GetAll(
569 cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id)
570 return issue_dict
571
572 def GetIssues(self, cnxn, issue_ids, use_cache=True, shard_id=None):
573 """Get a list of Issue PBs from the DB or cache.
574
575 Args:
576 cnxn: connection to SQL database.
577 issue_ids: integer global issue IDs of the issues.
578 use_cache: optional boolean to turn off using the cache.
579 shard_id: optional int shard_id to limit retrieval.
580
581 Returns:
582 A list of Issue PBs in the same order as the given issue_ids.
583 """
584 issue_dict = self.GetIssuesDict(
585 cnxn, issue_ids, use_cache=use_cache, shard_id=shard_id)
586
587 # Return a list that is ordered the same as the given issue_ids.
588 issue_list = [issue_dict[issue_id] for issue_id in issue_ids
589 if issue_id in issue_dict]
590
591 return issue_list
592
593 def GetIssue(self, cnxn, issue_id):
594 """Get one Issue PB from the DB.
595
596 Args:
597 cnxn: connection to SQL database.
598 issue_id: integer global issue ID of the issue.
599
600 Returns:
601 The requested Issue protocol buffer.
602
603 Raises:
604 NoSuchIssueException: the issue was not found.
605 """
606 issues = self.GetIssues(cnxn, [issue_id])
607 try:
608 return issues[0]
609 except IndexError:
610 raise NoSuchIssueException()
611
612 def GetIssuesByLocalIDs(
613 self, cnxn, project_id, local_id_list, shard_id=None):
614 """Get all the requested issues.
615
616 Args:
617 cnxn: connection to SQL database.
618 project_id: int ID of the project to which the issues belong.
619 local_id_list: list of integer local IDs for the requested issues.
620 shard_id: optional int shard_id to choose a replica.
621
622 Returns:
623 List of Issue PBs for the requested issues. The result Issues
624 will be ordered in the same order as local_id_list.
625 """
626 issue_ids_to_fetch = self.LookupIssueIDs(
627 cnxn, [(project_id, local_id) for local_id in local_id_list])
628 issues = self.GetIssues(cnxn, issue_ids_to_fetch, shard_id=shard_id)
629 return issues
630
631 def GetIssueByLocalID(self, cnxn, project_id, local_id):
632 """Get one Issue PB from the DB.
633
634 Args:
635 cnxn: connection to SQL database.
636 project_id: the ID of the project to which the issue belongs.
637 local_id: integer local ID of the issue.
638
639 Returns:
640 The requested Issue protocol buffer.
641 """
642 issues = self.GetIssuesByLocalIDs(cnxn, project_id, [local_id])
643 try:
644 return issues[0]
645 except IndexError:
646 raise NoSuchIssueException('The issue %s:%d does not exist.' % (
647 project_id, local_id))
648
649 def GetOpenAndClosedIssues(self, cnxn, issue_ids):
650 """Return the requested issues in separate open and closed lists.
651
652 Args:
653 cnxn: connection to SQL database.
654 issue_ids: list of int issue issue_ids.
655
656 Returns:
657 A pair of lists, the first with open issues, second with closed issues.
658 """
659 if not issue_ids:
660 return [], [] # make one common case efficient
661
662 issues = self.GetIssues(cnxn, issue_ids)
663 project_ids = {issue.project_id for issue in issues}
664 configs = self._config_service.GetProjectConfigs(cnxn, project_ids)
665 open_issues = []
666 closed_issues = []
667 for issue in issues:
668 config = configs[issue.project_id]
669 if tracker_helpers.MeansOpenInProject(
670 tracker_bizobj.GetStatus(issue), config):
671 open_issues.append(issue)
672 else:
673 closed_issues.append(issue)
674
675 return open_issues, closed_issues
676
677 def GetCurrentLocationOfMovedIssue(self, cnxn, project_id, local_id):
678 """Return the current location of a moved issue based on old location."""
679 issue_id = int(self.issueformerlocations_tbl.SelectValue(
680 cnxn, 'issue_id', default=0, project_id=project_id, local_id=local_id))
681 if not issue_id:
682 return None, None
683 project_id, local_id = self.issue_tbl.SelectRow(
684 cnxn, cols=['project_id', 'local_id'], id=issue_id)
685 return project_id, local_id
686
687 def GetPreviousLocations(self, cnxn, issue):
688 """Get all the previous locations of an issue."""
689 location_rows = self.issueformerlocations_tbl.Select(
690 cnxn, cols=['project_id', 'local_id'], issue_id=issue.issue_id)
691 locations = [(pid, local_id) for (pid, local_id) in location_rows
692 if pid != issue.project_id or local_id != issue.local_id]
693 return locations
694
695 def InsertIssue(self, cnxn, issue):
696 """Store the given issue in SQL.
697
698 Args:
699 cnxn: connection to SQL database.
700 issue: Issue PB to insert into the database.
701
702 Returns:
703 The int issue_id of the newly created issue.
704 """
705 status_id = self._config_service.LookupStatusID(
706 cnxn, issue.project_id, issue.status)
707 row = (issue.project_id, issue.local_id, status_id,
708 issue.owner_id or None,
709 issue.reporter_id,
710 issue.opened_timestamp,
711 issue.closed_timestamp,
712 issue.modified_timestamp,
713 issue.derived_owner_id or None,
714 self._config_service.LookupStatusID(
715 cnxn, issue.project_id, issue.derived_status),
716 bool(issue.deleted),
717 issue.star_count, issue.attachment_count,
718 issue.is_spam)
719 # ISSUE_COLs[1:] to skip setting the ID
720 # Insert into the Master DB.
721 generated_ids = self.issue_tbl.InsertRows(
722 cnxn, ISSUE_COLS[1:], [row], commit=False, return_generated_ids=True)
723 issue_id = generated_ids[0]
724 issue.issue_id = issue_id
725 self.issue_tbl.Update(
726 cnxn, {'shard': issue_id % settings.num_logical_shards},
727 id=issue.issue_id, commit=False)
728
729 self._UpdateIssuesSummary(cnxn, [issue], commit=False)
730 self._UpdateIssuesLabels(
731 cnxn, [issue], issue.project_id, commit=False)
732 self._UpdateIssuesFields(cnxn, [issue], commit=False)
733 self._UpdateIssuesComponents(cnxn, [issue], commit=False)
734 self._UpdateIssuesCc(cnxn, [issue], commit=False)
735 self._UpdateIssuesNotify(cnxn, [issue], commit=False)
736 self._UpdateIssuesRelation(cnxn, [issue], commit=False)
737 cnxn.Commit()
738 self._config_service.InvalidateMemcache([issue])
739
740 return issue_id
741
742 def UpdateIssues(
743 self, cnxn, issues, update_cols=None, just_derived=False, commit=True,
744 invalidate=True):
745 """Update the given issues in SQL.
746
747 Args:
748 cnxn: connection to SQL database.
749 issues: list of issues to update.
750 update_cols: optional list of just the field names to update.
751 just_derived: set to True when only updating derived fields.
752 commit: set to False to skip the DB commit and do it in the caller.
753 invalidate: set to False to leave cache invalidatation to the caller.
754 """
755 if not issues:
756 return
757
758 project_id = issues[0].project_id # All must be in the same project.
759 assert all(issue.project_id == project_id for issue in issues)
760
761 for issue in issues: # slow, but mysql will not allow REPLACE rows.
762 delta = {
763 'project_id': issue.project_id,
764 'local_id': issue.local_id,
765 'owner_id': issue.owner_id or None,
766 'status_id': self._config_service.LookupStatusID(
767 cnxn, issue.project_id, issue.status) or None,
768 'opened': issue.opened_timestamp,
769 'closed': issue.closed_timestamp,
770 'modified': issue.modified_timestamp,
771 'derived_owner_id': issue.derived_owner_id or None,
772 'derived_status_id': self._config_service.LookupStatusID(
773 cnxn, issue.project_id, issue.derived_status) or None,
774 'deleted': bool(issue.deleted),
775 'star_count': issue.star_count,
776 'attachment_count': issue.attachment_count,
777 'is_spam': issue.is_spam,
778 }
779 if update_cols is not None:
780 delta = {key: val for key, val in delta.iteritems()
781 if key in update_cols}
782 self.issue_tbl.Update(cnxn, delta, id=issue.issue_id, commit=False)
783
784 if not update_cols:
785 self._UpdateIssuesLabels(
786 cnxn, issues, project_id, commit=False)
787 self._UpdateIssuesCc(cnxn, issues, commit=False)
788 self._UpdateIssuesFields(cnxn, issues, commit=False)
789 self._UpdateIssuesComponents(cnxn, issues, commit=False)
790 self._UpdateIssuesNotify(cnxn, issues, commit=False)
791 if not just_derived:
792 self._UpdateIssuesSummary(cnxn, issues, commit=False)
793 self._UpdateIssuesRelation(cnxn, issues, commit=False)
794
795 iids_to_invalidate = [issue.issue_id for issue in issues]
796 if just_derived and invalidate:
797 self.issue_2lc.InvalidateAllKeys(cnxn, iids_to_invalidate)
798 elif invalidate:
799 self.issue_2lc.InvalidateKeys(cnxn, iids_to_invalidate)
800 if commit:
801 cnxn.Commit()
802 if invalidate:
803 self._config_service.InvalidateMemcache(issues)
804
805 def UpdateIssue(
806 self, cnxn, issue, update_cols=None, just_derived=False, commit=True,
807 invalidate=True):
808 """Update the given issue in SQL.
809
810 Args:
811 cnxn: connection to SQL database.
812 issue: the issue to update.
813 update_cols: optional list of just the field names to update.
814 just_derived: set to True when only updating derived fields.
815 commit: set to False to skip the DB commit and do it in the caller.
816 invalidate: set to False to leave cache invalidatation to the caller.
817 """
818 self.UpdateIssues(
819 cnxn, [issue], update_cols=update_cols, just_derived=just_derived,
820 commit=commit, invalidate=invalidate)
821
822 def _UpdateIssuesSummary(self, cnxn, issues, commit=True):
823 """Update the IssueSummary table rows for the given issues."""
824 self.issuesummary_tbl.InsertRows(
825 cnxn, ISSUESUMMARY_COLS,
826 [(issue.issue_id, issue.summary) for issue in issues],
827 replace=True, commit=commit)
828
829 def _UpdateIssuesLabels(self, cnxn, issues, project_id, commit=True):
830 """Update the Issue2Label table rows for the given issues."""
831 label_rows = []
832 for issue in issues:
833 issue_shard = issue.issue_id % settings.num_logical_shards
834 # TODO(jrobbins): If the user adds many novel labels in one issue update,
835 # that could be slow. Solution is to add all new labels in a batch first.
836 label_rows.extend(
837 (issue.issue_id,
838 self._config_service.LookupLabelID(cnxn, project_id, label), False,
839 issue_shard)
840 for label in issue.labels)
841 label_rows.extend(
842 (issue.issue_id,
843 self._config_service.LookupLabelID(cnxn, project_id, label), True,
844 issue_shard)
845 for label in issue.derived_labels)
846
847 self.issue2label_tbl.Delete(
848 cnxn, issue_id=[issue.issue_id for issue in issues],
849 commit=False)
850 self.issue2label_tbl.InsertRows(
851 cnxn, ISSUE2LABEL_COLS + ['issue_shard'],
852 label_rows, ignore=True, commit=commit)
853
854 def _UpdateIssuesFields(self, cnxn, issues, commit=True):
855 """Update the Issue2FieldValue table rows for the given issues."""
856 fieldvalue_rows = []
857 for issue in issues:
858 issue_shard = issue.issue_id % settings.num_logical_shards
859 for fv in issue.field_values:
860 fieldvalue_rows.append(
861 (issue.issue_id, fv.field_id, fv.int_value, fv.str_value,
862 fv.user_id or None, fv.derived, issue_shard))
863
864 self.issue2fieldvalue_tbl.Delete(
865 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
866 self.issue2fieldvalue_tbl.InsertRows(
867 cnxn, ISSUE2FIELDVALUE_COLS + ['issue_shard'],
868 fieldvalue_rows, commit=commit)
869
870 def _UpdateIssuesComponents(self, cnxn, issues, commit=True):
871 """Update the Issue2Component table rows for the given issues."""
872 issue2component_rows = []
873 for issue in issues:
874 issue_shard = issue.issue_id % settings.num_logical_shards
875 issue2component_rows.extend(
876 (issue.issue_id, component_id, False, issue_shard)
877 for component_id in issue.component_ids)
878 issue2component_rows.extend(
879 (issue.issue_id, component_id, True, issue_shard)
880 for component_id in issue.derived_component_ids)
881
882 self.issue2component_tbl.Delete(
883 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
884 self.issue2component_tbl.InsertRows(
885 cnxn, ISSUE2COMPONENT_COLS + ['issue_shard'],
886 issue2component_rows, ignore=True, commit=commit)
887
888 def _UpdateIssuesCc(self, cnxn, issues, commit=True):
889 """Update the Issue2Cc table rows for the given issues."""
890 cc_rows = []
891 for issue in issues:
892 issue_shard = issue.issue_id % settings.num_logical_shards
893 cc_rows.extend(
894 (issue.issue_id, cc_id, False, issue_shard)
895 for cc_id in issue.cc_ids)
896 cc_rows.extend(
897 (issue.issue_id, cc_id, True, issue_shard)
898 for cc_id in issue.derived_cc_ids)
899
900 self.issue2cc_tbl.Delete(
901 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
902 self.issue2cc_tbl.InsertRows(
903 cnxn, ISSUE2CC_COLS + ['issue_shard'],
904 cc_rows, ignore=True, commit=commit)
905
906 def _UpdateIssuesNotify(self, cnxn, issues, commit=True):
907 """Update the Issue2Notify table rows for the given issues."""
908 notify_rows = []
909 for issue in issues:
910 derived_rows = [[issue.issue_id, email]
911 for email in issue.derived_notify_addrs]
912 notify_rows.extend(derived_rows)
913
914 self.issue2notify_tbl.Delete(
915 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
916 self.issue2notify_tbl.InsertRows(
917 cnxn, ISSUE2NOTIFY_COLS, notify_rows, ignore=True, commit=commit)
918
919 def _UpdateIssuesRelation(self, cnxn, issues, commit=True):
920 """Update the IssueRelation table rows for the given issues."""
921 relation_rows = []
922 dangling_relation_rows = []
923 for issue in issues:
924 for dst_issue_id in issue.blocked_on_iids:
925 relation_rows.append((issue.issue_id, dst_issue_id, 'blockedon'))
926 for dst_issue_id in issue.blocking_iids:
927 relation_rows.append((dst_issue_id, issue.issue_id, 'blockedon'))
928 for dst_ref in issue.dangling_blocked_on_refs:
929 dangling_relation_rows.append((
930 issue.issue_id, dst_ref.project, dst_ref.issue_id, 'blockedon'))
931 for dst_ref in issue.dangling_blocking_refs:
932 dangling_relation_rows.append((
933 issue.issue_id, dst_ref.project, dst_ref.issue_id, 'blocking'))
934 if issue.merged_into:
935 relation_rows.append((issue.issue_id, issue.merged_into, 'mergedinto'))
936
937 self.issuerelation_tbl.Delete(
938 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
939 self.issuerelation_tbl.Delete(
940 cnxn, dst_issue_id=[issue.issue_id for issue in issues],
941 kind='blockedon', commit=False)
942 self.issuerelation_tbl.InsertRows(
943 cnxn, ISSUERELATION_COLS, relation_rows, ignore=True, commit=commit)
944 self.danglingrelation_tbl.Delete(
945 cnxn, issue_id=[issue.issue_id for issue in issues], commit=False)
946 self.danglingrelation_tbl.InsertRows(
947 cnxn, DANGLINGRELATION_COLS, dangling_relation_rows, ignore=True,
948 commit=commit)
949
950 def _UpdateIssuesModified(
951 self, cnxn, iids, modified_timestamp=None, invalidate=True):
952 """Store a modified timestamp for each of the specified issues."""
953 delta = {'modified': modified_timestamp or int(time.time())}
954 self.issue_tbl.Update(cnxn, delta, id=iids, commit=False)
955 if invalidate:
956 self.InvalidateIIDs(cnxn, iids)
957
958 def DeltaUpdateIssue(
959 self, cnxn, services, reporter_id, project_id,
960 config, issue, status, owner_id, cc_add, cc_remove, comp_ids_add,
961 comp_ids_remove, labels_add, labels_remove, field_vals_add,
962 field_vals_remove, fields_clear, blocked_on_add=None,
963 blocked_on_remove=None, blocking_add=None, blocking_remove=None,
964 merged_into=None, index_now=False, comment=None, summary=None,
965 iids_to_invalidate=None, rules=None, predicate_asts=None,
966 timestamp=None):
967 """Update the issue in the database and return a set of update tuples.
968
969 Args:
970 cnxn: connection to SQL database.
971 services: connections to persistence layer.
972 reporter_id: user ID of the user making this change.
973 project_id: int ID for the current project.
974 config: ProjectIssueConfig PB for this project.
975 issue: Issue PB of issue to update.
976 status: new issue status string, if a change is desired.
977 owner_id: user ID of the new issue owner, if a change is desired.
978 cc_add: list of user IDs of users to add to CC list.
979 cc_remove: list of user IDs of users to remove from CC list.
980 comp_ids_add: list of component IDs to add to the issue.
981 comp_ids_remove: list of component IDs to remove from the issue.
982 labels_add: list of issue label strings to add.
983 labels_remove: list of issue label strings to remove.
984 field_vals_add: dict of FieldValue PBs to add.
985 field_vals_remove: list of FieldValue PBs to remove.
986 fields_clear: list of custom field IDs to clear.
987 blocked_on_add: list of IIDs that this issue is now blocked on.
988 blocked_on_remove: list of IIDs that this issue is no longer blocked on.
989 blocking_add: list of IIDs that this issue is blocking.
990 blocking_remove: list of IIDs that this issue is no longer blocking.
991 merged_into: IID of issue that this issue was merged into, 0 to clear,
992 or None for no change.
993 index_now: True if the issue should be updated in the full text index.
994 comment: This should be the content of the comment
995 corresponding to this change.
996 summary: new issue summary, currently only used by GData API.
997 rules: optional list of preloaded FilterRule PBs for this project.
998 predicate_asts: optional list of QueryASTs for the rules. If rules are
999 provided, then predicate_asts should also be provided.
1000 timestamp: int timestamp set during testing, otherwise defaults to
1001 int(time.time()).
1002
1003 Returns:
1004 A list of Amendment PBs that describe the set of metadata updates that
1005 the user made. This tuple is later used in making the IssueComment.
1006 """
1007 old_effective_status = tracker_bizobj.GetStatus(issue)
1008
1009 # Make all user input safe to echo out again later.
1010 status = framework_bizobj.CanonicalizeLabel(status)
1011 labels_add = [framework_bizobj.CanonicalizeLabel(l) for l in labels_add]
1012 labels_add = [l for l in labels_add if l]
1013 labels_remove = [framework_bizobj.CanonicalizeLabel(l)
1014 for l in labels_remove]
1015 labels_remove = [l for l in labels_remove if l]
1016
1017 logging.info(
1018 'Bulk edit to project_id %s issue.local_id %s',
1019 project_id, issue.local_id)
1020 if iids_to_invalidate is None:
1021 iids_to_invalidate = set([issue.issue_id])
1022 invalidate = True
1023 else:
1024 iids_to_invalidate.add(issue.issue_id)
1025 invalidate = False # Caller will do it.
1026
1027 # Store each updated value in the issue PB, and compute Update PBs
1028 amendments = []
1029 if status is not None and status != issue.status:
1030 amendments.append(tracker_bizobj.MakeStatusAmendment(
1031 status, issue.status))
1032 issue.status = status
1033 if owner_id is not None and owner_id != issue.owner_id:
1034 amendments.append(tracker_bizobj.MakeOwnerAmendment(
1035 owner_id, issue.owner_id))
1036 issue.owner_id = owner_id
1037
1038 # compute the set of cc'd users added and removed
1039 cc_add = [cc for cc in cc_add if cc not in issue.cc_ids]
1040 cc_remove = [cc for cc in cc_remove if cc in issue.cc_ids]
1041 if cc_add or cc_remove:
1042 cc_ids = [cc for cc in list(issue.cc_ids) + cc_add
1043 if cc not in cc_remove]
1044 issue.cc_ids = cc_ids
1045 amendments.append(tracker_bizobj.MakeCcAmendment(cc_add, cc_remove))
1046
1047 # compute the set of components added and removed
1048 comp_ids_add = [c for c in comp_ids_add if c not in issue.component_ids]
1049 comp_ids_remove = [c for c in comp_ids_remove if c in issue.component_ids]
1050 if comp_ids_add or comp_ids_remove:
1051 comp_ids = [cid for cid in list(issue.component_ids) + comp_ids_add
1052 if cid not in comp_ids_remove]
1053 issue.component_ids = comp_ids
1054 amendments.append(tracker_bizobj.MakeComponentsAmendment(
1055 comp_ids_add, comp_ids_remove, config))
1056
1057 # compute the set of labels added and removed
1058 (labels, update_labels_add,
1059 update_labels_remove) = framework_bizobj.MergeLabels(
1060 issue.labels, labels_add, labels_remove,
1061 config.exclusive_label_prefixes)
1062
1063 if update_labels_add or update_labels_remove:
1064 issue.labels = labels
1065 amendments.append(tracker_bizobj.MakeLabelsAmendment(
1066 update_labels_add, update_labels_remove))
1067
1068 # compute the set of custom fields added and removed
1069 (field_vals, update_fields_add,
1070 update_fields_remove) = tracker_bizobj.MergeFields(
1071 issue.field_values, field_vals_add, field_vals_remove,
1072 config.field_defs)
1073
1074 if update_fields_add or update_fields_remove:
1075 issue.field_values = field_vals
1076 for fd in config.field_defs:
1077 added_values_this_field = [
1078 fv for fv in update_fields_add if fv.field_id == fd.field_id]
1079 if added_values_this_field:
1080 amendments.append(tracker_bizobj.MakeFieldAmendment(
1081 fd.field_id, config,
1082 [tracker_bizobj.GetFieldValue(fv, {})
1083 for fv in added_values_this_field],
1084 old_values=[]))
1085 removed_values_this_field = [
1086 fv for fv in update_fields_remove if fv.field_id == fd.field_id]
1087 if removed_values_this_field:
1088 amendments.append(tracker_bizobj.MakeFieldAmendment(
1089 fd.field_id, config, [],
1090 old_values=[tracker_bizobj.GetFieldValue(fv, {})
1091 for fv in removed_values_this_field]))
1092
1093 if fields_clear:
1094 field_clear_set = set(fields_clear)
1095 revised_fields = []
1096 for fd in config.field_defs:
1097 if fd.field_id not in field_clear_set:
1098 revised_fields.extend(
1099 fv for fv in issue.field_values if fv.field_id == fd.field_id)
1100 else:
1101 amendments.append(
1102 tracker_bizobj.MakeFieldClearedAmendment(fd.field_id, config))
1103 if fd.field_type == tracker_pb2.FieldTypes.ENUM_TYPE:
1104 prefix = fd.field_name.lower() + '-'
1105 filtered_labels = [
1106 lab for lab in issue.labels
1107 if not lab.lower().startswith(prefix)]
1108 issue.labels = filtered_labels
1109
1110 issue.field_values = revised_fields
1111
1112 if blocked_on_add or blocked_on_remove:
1113 old_blocked_on = issue.blocked_on_iids
1114 blocked_on_add = [iid for iid in blocked_on_add
1115 if iid not in old_blocked_on]
1116 add_refs = [(ref_issue.project_name, ref_issue.local_id)
1117 for ref_issue in self.GetIssues(cnxn, blocked_on_add)]
1118 blocked_on_rm = [iid for iid in blocked_on_remove
1119 if iid in old_blocked_on]
1120 remove_refs = [
1121 (ref_issue.project_name, ref_issue.local_id)
1122 for ref_issue in self.GetIssues(cnxn, blocked_on_rm)]
1123 amendments.append(tracker_bizobj.MakeBlockedOnAmendment(
1124 add_refs, remove_refs, default_project_name=issue.project_name))
1125 blocked_on = [iid for iid in old_blocked_on + blocked_on_add
1126 if iid not in blocked_on_remove]
1127 issue.blocked_on_iids = blocked_on
1128 iids_to_invalidate.update(blocked_on_add + blocked_on_remove)
1129
1130 if blocking_add or blocking_remove:
1131 old_blocking = issue.blocking_iids
1132 blocking_add = [iid for iid in blocking_add
1133 if iid not in old_blocking]
1134 add_refs = [(ref_issue.project_name, ref_issue.local_id)
1135 for ref_issue in self.GetIssues(cnxn, blocking_add)]
1136 blocking_remove = [iid for iid in blocking_remove
1137 if iid in old_blocking]
1138 remove_refs = [
1139 (ref_issue.project_name, ref_issue.local_id)
1140 for ref_issue in self.GetIssues(cnxn, blocking_remove)]
1141 amendments.append(tracker_bizobj.MakeBlockingAmendment(
1142 add_refs, remove_refs, default_project_name=issue.project_name))
1143 blocking_refs = [iid for iid in old_blocking + blocking_add
1144 if iid not in blocking_remove]
1145 issue.blocking_iids = blocking_refs
1146 iids_to_invalidate.update(blocking_add + blocking_remove)
1147
1148 if merged_into is not None and merged_into != issue.merged_into:
1149 merged_remove = issue.merged_into
1150 merged_add = merged_into
1151 issue.merged_into = merged_into
1152 try:
1153 remove_issue = self.GetIssue(cnxn, merged_remove)
1154 remove_ref = remove_issue.project_name, remove_issue.local_id
1155 iids_to_invalidate.add(merged_remove)
1156 except NoSuchIssueException:
1157 remove_ref = None
1158
1159 try:
1160 add_issue = self.GetIssue(cnxn, merged_add)
1161 add_ref = add_issue.project_name, add_issue.local_id
1162 iids_to_invalidate.add(merged_add)
1163 except NoSuchIssueException:
1164 add_ref = None
1165
1166 amendments.append(tracker_bizobj.MakeMergedIntoAmendment(
1167 add_ref, remove_ref, default_project_name=issue.project_name))
1168
1169 if summary and summary != issue.summary:
1170 amendments.append(tracker_bizobj.MakeSummaryAmendment(
1171 summary, issue.summary))
1172 issue.summary = summary
1173
1174 # If this was a no-op with no comment, bail out and don't save,
1175 # invalidate, or re-index anything.
1176 if not amendments and (not comment or not comment.strip()):
1177 return [], None
1178
1179 # Note: no need to check for collisions when the user is doing a delta.
1180
1181 # update the modified_timestamp for any comment added, even if it was
1182 # just a text comment with no issue fields changed.
1183 issue.modified_timestamp = timestamp or int(time.time())
1184
1185 # Update the closed timestamp before filter rules so that rules
1186 # can test for closed_timestamp, and also after filter rules
1187 # so that closed_timestamp will be set if the issue is closed by the rule.
1188 _UpdateClosedTimestamp(config, issue, old_effective_status)
1189 if rules is None:
1190 logging.info('Rules were not given')
1191 rules = services.features.GetFilterRules(cnxn, config.project_id)
1192 predicate_asts = filterrules_helpers.ParsePredicateASTs(
1193 rules, config, None)
1194
1195 filterrules_helpers.ApplyGivenRules(
1196 cnxn, services, issue, config, rules, predicate_asts)
1197 _UpdateClosedTimestamp(config, issue, old_effective_status)
1198
1199 # Store the issue in SQL.
1200 self.UpdateIssue(cnxn, issue, commit=False, invalidate=False)
1201
1202 comment_pb = self.CreateIssueComment(
1203 cnxn, project_id, issue.local_id, reporter_id, comment,
1204 amendments=amendments, commit=False)
1205 self._UpdateIssuesModified(
1206 cnxn, iids_to_invalidate, modified_timestamp=issue.modified_timestamp,
1207 invalidate=invalidate)
1208
1209 if not invalidate:
1210 cnxn.Commit()
1211
1212 if index_now:
1213 tracker_fulltext.IndexIssues(
1214 cnxn, [issue], services.user_service, self, self._config_service)
1215
1216 return amendments, comment_pb
1217
1218 def InvalidateIIDs(self, cnxn, iids_to_invalidate):
1219 """Invalidate the specified issues in the Invalidate table and memcache."""
1220 issues_to_invalidate = self.GetIssues(cnxn, iids_to_invalidate)
1221 self.issue_2lc.InvalidateKeys(cnxn, iids_to_invalidate)
1222 self._config_service.InvalidateMemcache(issues_to_invalidate)
1223
1224 def ApplyIssueComment(
1225 self, cnxn, services, reporter_id, project_id,
1226 local_id, summary, status, owner_id, cc_ids, labels, field_values,
1227 component_ids, blocked_on, blocking, dangling_blocked_on_refs,
1228 dangling_blocking_refs, merged_into, index_now=True,
1229 page_gen_ts=None, comment=None, inbound_message=None, attachments=None,
1230 timestamp=None):
1231 """Update the issue in the database and return info for notifications.
1232
1233 Args:
1234 cnxn: connection to SQL database.
1235 services: connection to persistence layer.
1236 reporter_id: user ID of the user making this change.
1237 project_id: int Project ID for the current project.
1238 local_id: integer local ID of the issue to update.
1239 summary: new issue summary string.
1240 status: new issue status string.
1241 owner_id: user ID of the new issue owner.
1242 cc_ids: list of user IDs of users to CC when the issue changes.
1243 labels: list of new issue label strings.
1244 field_values: list of FieldValue PBs.
1245 component_ids: list of int component IDs.
1246 blocked_on: list of IIDs that this issue is blocked on.
1247 blocking: list of IIDs that this issue is blocking.
1248 dangling_blocked_on_refs: list of Codesite issues this is blocked on.
1249 dangling_blocking_refs: list of Codesite issues this is blocking.
1250 merged_into: IID of issue that this issue was merged into, 0 to clear.
1251 index_now: True if the issue should be updated in the full text index.
1252 page_gen_ts: time at which the issue HTML page was generated,
1253 used in detecting mid-air collisions.
1254 comment: This should be the content of the comment
1255 corresponding to this change.
1256 inbound_message: optional string full text of an email that caused
1257 this comment to be added.
1258 attachments: This should be a list of
1259 [(filename, contents, mimetype),...] attachments uploaded at
1260 the time the comment was made.
1261 timestamp: int timestamp set during testing, otherwise defaults to
1262 int(time.time()).
1263
1264 Returns:
1265 (amendments, comment_pb). Amendments is a list of Amendment PBs
1266 that describe the set of metadata updates that the user made.
1267 Comment_pb is the IssueComment for the change.
1268
1269 Raises:
1270 MidAirCollisionException: indicates that the issue has been
1271 changed since the user loaded the page.
1272 """
1273 status = framework_bizobj.CanonicalizeLabel(status)
1274 labels = [framework_bizobj.CanonicalizeLabel(l) for l in labels]
1275 labels = [l for l in labels if l]
1276
1277 # Use canonical label names
1278 label_ids = self._config_service.LookupLabelIDs(
1279 cnxn, project_id, labels, autocreate=True)
1280 labels = [self._config_service.LookupLabel(cnxn, project_id, l_id)
1281 for l_id in label_ids]
1282
1283 # Get the issue and project configurations.
1284 config = self._config_service.GetProjectConfig(cnxn, project_id)
1285 issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
1286
1287 # Store each updated value in the issue PB, and compute amendments
1288 amendments = []
1289 iids_to_invalidate = set()
1290
1291 if summary and summary != issue.summary:
1292 amendments.append(tracker_bizobj.MakeSummaryAmendment(
1293 summary, issue.summary))
1294 issue.summary = summary
1295
1296 old_effective_status = tracker_bizobj.GetStatus(issue)
1297 if status != issue.status:
1298 amendments.append(tracker_bizobj.MakeStatusAmendment(
1299 status, issue.status))
1300 issue.status = status
1301
1302 if owner_id != issue.owner_id:
1303 amendments.append(tracker_bizobj.MakeOwnerAmendment(
1304 owner_id, issue.owner_id))
1305 if owner_id == framework_constants.NO_USER_SPECIFIED:
1306 issue.reset('owner_id')
1307 else:
1308 issue.owner_id = owner_id
1309
1310 # TODO(jrobbins): factor the CC code into a method and add a test
1311 # compute the set of cc'd users added and removed
1312 cc_added = [cc for cc in cc_ids if cc not in issue.cc_ids]
1313 cc_removed = [cc for cc in issue.cc_ids if cc not in cc_ids]
1314 if cc_added or cc_removed:
1315 amendments.append(tracker_bizobj.MakeCcAmendment(cc_added, cc_removed))
1316 issue.cc_ids = cc_ids
1317
1318 # TODO(jrobbins): factor the labels code into a method and add a test
1319 # compute the set of labels added and removed
1320 labels_added = [lab for lab in labels
1321 if lab not in issue.labels]
1322 labels_removed = [lab for lab in issue.labels
1323 if lab not in labels]
1324 if labels_added or labels_removed:
1325 amendments.append(tracker_bizobj.MakeLabelsAmendment(
1326 labels_added, labels_removed))
1327 issue.labels = labels
1328
1329 old_field_values = collections.defaultdict(list)
1330 for ofv in issue.field_values:
1331 # Passing {} because I just want the user_id, not the email address.
1332 old_field_values[ofv.field_id].append(
1333 tracker_bizobj.GetFieldValue(ofv, {}))
1334 for field_id, values in old_field_values.iteritems():
1335 old_field_values[field_id] = sorted(values)
1336
1337 new_field_values = collections.defaultdict(list)
1338 for nfv in field_values:
1339 new_field_values[nfv.field_id].append(
1340 tracker_bizobj.GetFieldValue(nfv, {}))
1341 for field_id, values in new_field_values.iteritems():
1342 new_field_values[field_id] = sorted(values)
1343
1344 field_ids_added = {fv.field_id for fv in field_values
1345 if fv.field_id not in old_field_values}
1346 field_ids_removed = {ofv.field_id for ofv in issue.field_values
1347 if ofv.field_id not in new_field_values}
1348 field_ids_changed = {
1349 fv.field_id for fv in field_values
1350 if (fv.field_id in old_field_values and
1351 old_field_values[fv.field_id] != new_field_values[fv.field_id])}
1352
1353 if field_ids_added or field_ids_removed or field_ids_changed:
1354 amendments.extend(
1355 tracker_bizobj.MakeFieldAmendment(fid, config, new_field_values[fid])
1356 for fid in field_ids_added)
1357 amendments.extend(
1358 tracker_bizobj.MakeFieldAmendment(
1359 fid, config, new_field_values[fid],
1360 old_values=old_field_values[fid])
1361 for fid in field_ids_changed)
1362 amendments.extend(
1363 tracker_bizobj.MakeFieldAmendment(fid, config, [])
1364 for fid in field_ids_removed)
1365
1366 issue.field_values = field_values
1367
1368 comps_added = [comp for comp in component_ids
1369 if comp not in issue.component_ids]
1370 comps_removed = [comp for comp in issue.component_ids
1371 if comp not in component_ids]
1372 if comps_added or comps_removed:
1373 amendments.append(tracker_bizobj.MakeComponentsAmendment(
1374 comps_added, comps_removed, config))
1375 issue.component_ids = component_ids
1376
1377 if merged_into != issue.merged_into:
1378 # TODO(jrobbins): refactor this into LookupIssueRefByIssueID().
1379 try:
1380 merged_remove = self.GetIssue(cnxn, issue.merged_into)
1381 remove_ref = merged_remove.project_name, merged_remove.local_id
1382 iids_to_invalidate.add(issue.merged_into)
1383 except NoSuchIssueException:
1384 remove_ref = None
1385
1386 try:
1387 merged_add = self.GetIssue(cnxn, merged_into)
1388 add_ref = merged_add.project_name, merged_add.local_id
1389 iids_to_invalidate.add(merged_into)
1390 except NoSuchIssueException:
1391 add_ref = None
1392
1393 issue.merged_into = merged_into
1394 amendments.append(tracker_bizobj.MakeMergedIntoAmendment(
1395 add_ref, remove_ref, default_project_name=issue.project_name))
1396
1397 blockers_added, blockers_removed = framework_helpers.ComputeListDeltas(
1398 issue.blocked_on_iids, blocked_on)
1399 danglers_added, danglers_removed = framework_helpers.ComputeListDeltas(
1400 issue.dangling_blocked_on_refs, dangling_blocked_on_refs)
1401 blocked_add_issues = []
1402 blocked_remove_issues = []
1403 if blockers_added or blockers_removed or danglers_added or danglers_removed:
1404 blocked_add_issues = self.GetIssues(cnxn, blockers_added)
1405 add_refs = [(ref_issue.project_name, ref_issue.local_id)
1406 for ref_issue in blocked_add_issues]
1407 add_refs.extend([(ref.project, ref.issue_id) for ref in danglers_added])
1408 blocked_remove_issues = self.GetIssues(cnxn, blockers_removed)
1409 remove_refs = [
1410 (ref_issue.project_name, ref_issue.local_id)
1411 for ref_issue in blocked_remove_issues]
1412 remove_refs.extend([(ref.project, ref.issue_id)
1413 for ref in danglers_removed])
1414 amendments.append(tracker_bizobj.MakeBlockedOnAmendment(
1415 add_refs, remove_refs, default_project_name=issue.project_name))
1416 issue.blocked_on_iids = blocked_on
1417 issue.dangling_blocked_on_refs = dangling_blocked_on_refs
1418 iids_to_invalidate.update(blockers_added + blockers_removed)
1419
1420 blockers_added, blockers_removed = framework_helpers.ComputeListDeltas(
1421 issue.blocking_iids, blocking)
1422 danglers_added, danglers_removed = framework_helpers.ComputeListDeltas(
1423 issue.dangling_blocking_refs, dangling_blocking_refs)
1424 blocking_add_issues = []
1425 blocking_remove_issues = []
1426 if blockers_added or blockers_removed or danglers_added or danglers_removed:
1427 blocking_add_issues = self.GetIssues(cnxn, blockers_added)
1428 add_refs = [(ref_issue.project_name, ref_issue.local_id)
1429 for ref_issue in blocking_add_issues]
1430 add_refs.extend([(ref.project, ref.issue_id) for ref in danglers_added])
1431 blocking_remove_issues = self.GetIssues(cnxn, blockers_removed)
1432 remove_refs = [
1433 (ref_issue.project_name, ref_issue.local_id)
1434 for ref_issue in blocking_remove_issues]
1435 remove_refs.extend([(ref.project, ref.issue_id)
1436 for ref in danglers_removed])
1437 amendments.append(tracker_bizobj.MakeBlockingAmendment(
1438 add_refs, remove_refs, default_project_name=issue.project_name))
1439 issue.blocking_iids = blocking
1440 issue.dangling_blocking_refs = dangling_blocking_refs
1441 iids_to_invalidate.update(blockers_added + blockers_removed)
1442
1443 logging.info('later amendments so far is %r', amendments)
1444
1445 # Raise an exception if the issue was changed by another user
1446 # while this user was viewing/editing the issue.
1447 if page_gen_ts and amendments:
1448 # The issue timestamp is stored in seconds, convert to microseconds to
1449 # match the page_gen_ts.
1450 issue_ts = issue.modified_timestamp * 1000000
1451 if issue_ts > page_gen_ts:
1452 logging.info('%d > %d', issue_ts, page_gen_ts)
1453 logging.info('amendments: %s', amendments)
1454 # Forget all the modificiations made to this issue in RAM.
1455 self.issue_2lc.InvalidateKeys(cnxn, [issue.issue_id])
1456 raise MidAirCollisionException('issue %d' % local_id, local_id)
1457
1458 # update the modified_timestamp for any comment added, even if it was
1459 # just a text comment with no issue fields changed.
1460 issue.modified_timestamp = timestamp or int(time.time())
1461
1462 # Update closed_timestamp both before and after filter rules.
1463 _UpdateClosedTimestamp(config, issue, old_effective_status)
1464 filterrules_helpers.ApplyFilterRules(cnxn, services, issue, config)
1465 _UpdateClosedTimestamp(config, issue, old_effective_status)
1466
1467 self.UpdateIssue(cnxn, issue)
1468 # TODO(jrobbins): only invalidate nonviewable if the following changed:
1469 # restriction label, owner, cc, or user-type custom field.
1470 self._config_service.InvalidateMemcache([issue], key_prefix='nonviewable:')
1471
1472 classification = services.spam.ClassifyComment(comment)
1473
1474 label = classification['outputLabel']
1475 logging.info('comment classification: %s' % classification)
1476 score = 0
1477 is_spam = False
1478 for output in classification['outputMulti']:
1479 if output['label'] == label:
1480 score = float(output['score'])
1481 if label == 'spam' and score > settings.classifier_spam_thresh:
1482 logging.info('spam comment: %s' % comment)
1483 is_spam = True
1484
1485 if amendments or (comment and comment.strip()) or attachments:
1486 logging.info('amendments = %r', amendments)
1487 comment_pb = self.CreateIssueComment(
1488 cnxn, project_id, local_id, reporter_id, comment,
1489 amendments=amendments, attachments=attachments,
1490 inbound_message=inbound_message, is_spam=is_spam)
1491 services.spam.RecordClassifierCommentVerdict(
1492 cnxn, comment_pb, is_spam, score)
1493 else:
1494 comment_pb = None
1495
1496 # Add a comment to the newly added issues saying they are now blocking
1497 # this issue.
1498 for add_issue in blocked_add_issues:
1499 self.CreateIssueComment(
1500 cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
1501 content='',
1502 amendments=[tracker_bizobj.MakeBlockingAmendment(
1503 [(issue.project_name, issue.local_id)], [],
1504 default_project_name=add_issue.project_name)])
1505 # Add a comment to the newly removed issues saying they are no longer
1506 # blocking this issue.
1507 for remove_issue in blocked_remove_issues:
1508 self.CreateIssueComment(
1509 cnxn, remove_issue.project_id, remove_issue.local_id, reporter_id,
1510 content='',
1511 amendments=[tracker_bizobj.MakeBlockingAmendment(
1512 [], [(issue.project_name, issue.local_id)],
1513 default_project_name=remove_issue.project_name)])
1514
1515 # Add a comment to the newly added issues saying they are now blocked on
1516 # this issue.
1517 for add_issue in blocking_add_issues:
1518 self.CreateIssueComment(
1519 cnxn, add_issue.project_id, add_issue.local_id, reporter_id,
1520 content='',
1521 amendments=[tracker_bizobj.MakeBlockedOnAmendment(
1522 [(issue.project_name, issue.local_id)], [],
1523 default_project_name=add_issue.project_name)])
1524 # Add a comment to the newly removed issues saying they are no longer
1525 # blocked on this issue.
1526 for remove_issue in blocking_remove_issues:
1527 self.CreateIssueComment(
1528 cnxn, remove_issue.project_id, remove_issue.local_id, reporter_id,
1529 content='',
1530 amendments=[tracker_bizobj.MakeBlockedOnAmendment(
1531 [], [(issue.project_name, issue.local_id)],
1532 default_project_name=remove_issue.project_name)])
1533
1534 self._UpdateIssuesModified(
1535 cnxn, iids_to_invalidate, modified_timestamp=issue.modified_timestamp)
1536
1537 if index_now:
1538 tracker_fulltext.IndexIssues(
1539 cnxn, [issue], services.user, self, self._config_service)
1540
1541 if is_spam:
1542 sequence_num = len(self.GetCommentsForIssue(cnxn, issue.issue_id)) - 1
1543 # Soft-deletes have to have a user ID, so spam comments are
1544 # just "deleted" by the commenter.
1545 self.SoftDeleteComment(cnxn, project_id, local_id, sequence_num,
1546 reporter_id, services.user, is_spam=True)
1547 return amendments, comment_pb
1548
1549 def RelateIssues(self, cnxn, issue_relation_dict, commit=True):
1550 """Update the IssueRelation table rows for the given relationships.
1551
1552 issue_relation_dict is a mapping of 'source' issues to 'destination' issues,
1553 paired with the kind of relationship connecting the two.
1554 """
1555 relation_rows = []
1556 for src_iid, dests in issue_relation_dict.iteritems():
1557 for dst_iid, kind in dests:
1558 if kind == 'blocking':
1559 relation_rows.append((dst_iid, src_iid, 'blockedon'))
1560 elif kind == 'blockedon' or kind == 'mergedinto':
1561 relation_rows.append((src_iid, dst_iid, kind))
1562
1563 self.issuerelation_tbl.InsertRows(
1564 cnxn, ISSUERELATION_COLS, relation_rows, ignore=True, commit=commit)
1565
1566 def CopyIssues(self, cnxn, dest_project, issues, user_service, copier_id):
1567 """Copy the given issues into the destination project."""
1568 created_issues = []
1569 iids_to_invalidate = set()
1570
1571 for target_issue in issues:
1572 new_issue = tracker_pb2.Issue()
1573 new_issue.project_id = dest_project.project_id
1574 new_issue.project_name = dest_project.project_name
1575 new_issue.summary = target_issue.summary
1576 new_issue.labels.extend(target_issue.labels)
1577 new_issue.field_values.extend(target_issue.field_values)
1578 new_issue.reporter_id = copier_id
1579
1580 timestamp = int(time.time())
1581 new_issue.opened_timestamp = timestamp
1582 new_issue.modified_timestamp = timestamp
1583
1584 target_comments = self.GetCommentsForIssue(cnxn, target_issue.issue_id)
1585 initial_summary_comment = target_comments[0]
1586
1587 # Note that blocking and merge_into are not copied.
1588 if target_issue.blocked_on_iids:
1589 blocked_on = target_issue.blocked_on_iids
1590 iids_to_invalidate.update(blocked_on)
1591 new_issue.blocked_on_iids = blocked_on
1592
1593 # Gather list of attachments from the target issue's summary comment.
1594 # MakeIssueComments expects a list of [(filename, contents, mimetype),...]
1595 attachments = []
1596 for attachment in initial_summary_comment.attachments:
1597 object_path = ('/' + app_identity.get_default_gcs_bucket_name() +
1598 attachment.gcs_object_id)
1599 with cloudstorage.open(object_path, 'r') as f:
1600 content = f.read()
1601 attachments.append(
1602 [attachment.filename, content, attachment.mimetype])
1603
1604 if attachments:
1605 new_issue.attachment_count = len(attachments)
1606
1607 # Create the same summary comment as the target issue.
1608 comment = self._MakeIssueComment(
1609 dest_project.project_id, copier_id, initial_summary_comment.content,
1610 attachments=attachments, timestamp=timestamp, was_escaped=True)
1611
1612 new_issue.local_id = self.AllocateNextLocalID(
1613 cnxn, dest_project.project_id)
1614 issue_id = self.InsertIssue(cnxn, new_issue)
1615 comment.issue_id = issue_id
1616 self.InsertComment(cnxn, comment)
1617
1618 if permissions.HasRestrictions(new_issue, 'view'):
1619 self._config_service.InvalidateMemcache(
1620 [new_issue], key_prefix='nonviewable:')
1621
1622 tracker_fulltext.IndexIssues(
1623 cnxn, [new_issue], user_service, self, self._config_service)
1624 created_issues.append(new_issue)
1625
1626 # The referenced issues are all modified when the relationship is added.
1627 self._UpdateIssuesModified(
1628 cnxn, iids_to_invalidate, modified_timestamp=timestamp)
1629
1630 return created_issues
1631
1632 def MoveIssues(self, cnxn, dest_project, issues, user_service):
1633 """Move the given issues into the destination project."""
1634 old_location_rows = [
1635 (issue.issue_id, issue.project_id, issue.local_id)
1636 for issue in issues]
1637 moved_back_iids = set()
1638
1639 former_locations_in_project = self.issueformerlocations_tbl.Select(
1640 cnxn, cols=ISSUEFORMERLOCATIONS_COLS,
1641 project_id=dest_project.project_id,
1642 issue_id=[issue.issue_id for issue in issues])
1643 former_locations = {
1644 issue_id: local_id
1645 for issue_id, project_id, local_id in former_locations_in_project}
1646
1647 # Remove the issue id from issue_id_2lc so that it does not stay
1648 # around in cache and memcache.
1649 # The Key of IssueIDTwoLevelCache is (project_id, local_id).
1650 issue_id_2lc_key = (issues[0].project_id, issues[0].local_id)
1651 self.issue_id_2lc.InvalidateKeys(cnxn, [issue_id_2lc_key])
1652
1653 for issue in issues:
1654 if issue.issue_id in former_locations:
1655 dest_id = former_locations[issue.issue_id]
1656 moved_back_iids.add(issue.issue_id)
1657 else:
1658 dest_id = self.AllocateNextLocalID(cnxn, dest_project.project_id)
1659
1660 issue.local_id = dest_id
1661 issue.project_id = dest_project.project_id
1662 issue.project_name = dest_project.project_name
1663
1664 # Rewrite each whole issue so that status and label IDs are looked up
1665 # in the context of the destination project.
1666 self.UpdateIssues(cnxn, issues)
1667
1668 # Comments also have the project_id because it is needed for an index.
1669 self.comment_tbl.Update(
1670 cnxn, {'project_id': dest_project.project_id},
1671 issue_id=[issue.issue_id for issue in issues], commit=False)
1672
1673 # Record old locations so that we can offer links if the user looks there.
1674 self.issueformerlocations_tbl.InsertRows(
1675 cnxn, ISSUEFORMERLOCATIONS_COLS, old_location_rows, ignore=True,
1676 commit=False)
1677 cnxn.Commit()
1678
1679 tracker_fulltext.IndexIssues(
1680 cnxn, issues, user_service, self, self._config_service)
1681
1682 return moved_back_iids
1683
1684 def ExpungeFormerLocations(self, cnxn, project_id):
1685 """Delete history of issues that were in this project but moved out."""
1686 self.issueformerlocations_tbl.Delete(cnxn, project_id=project_id)
1687
1688 def ExpungeIssues(self, cnxn, issue_ids):
1689 """Completely delete the specified issues from the database."""
1690 logging.info('expunging the issues %r', issue_ids)
1691 tracker_fulltext.UnindexIssues(issue_ids)
1692
1693 remaining_iids = issue_ids[:]
1694
1695 # Note: these are purposely not done in a transaction to allow
1696 # incremental progress in what might be a very large change.
1697 # We are not concerned about non-atomic deletes because all
1698 # this data will be gone eventually anyway.
1699 while remaining_iids:
1700 iids_in_chunk = remaining_iids[:CHUNK_SIZE]
1701 remaining_iids = remaining_iids[CHUNK_SIZE:]
1702 self.issuesummary_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1703 self.issue2label_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1704 self.issue2component_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1705 self.issue2cc_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1706 self.issue2notify_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1707 self.issueupdate_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1708 self.attachment_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1709 self.comment_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1710 self.issuerelation_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1711 self.issuerelation_tbl.Delete(cnxn, dst_issue_id=iids_in_chunk)
1712 self.danglingrelation_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1713 self.issueformerlocations_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1714 self.reindexqueue_tbl.Delete(cnxn, issue_id=iids_in_chunk)
1715 self.issue_tbl.Delete(cnxn, id=iids_in_chunk)
1716
1717 def SoftDeleteIssue(self, cnxn, project_id, local_id, deleted, user_service):
1718 """Set the deleted boolean on the indicated issue and store it.
1719
1720 Args:
1721 cnxn: connection to SQL database.
1722 project_id: int project ID for the current project.
1723 local_id: int local ID of the issue to freeze/unfreeze.
1724 deleted: boolean, True to soft-delete, False to undelete.
1725 user_service: persistence layer for users, used to lookup user IDs.
1726 """
1727 issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
1728 issue.deleted = deleted
1729 self.UpdateIssue(cnxn, issue, update_cols=['deleted'])
1730 tracker_fulltext.IndexIssues(
1731 cnxn, [issue], user_service, self, self._config_service)
1732
1733 def DeleteComponentReferences(self, cnxn, component_id):
1734 """Delete any references to the specified component."""
1735 # TODO(jrobbins): add tasks to re-index any affected issues.
1736 # Note: if this call fails, some data could be left
1737 # behind, but it would not be displayed, and it could always be
1738 # GC'd from the DB later.
1739 self.issue2component_tbl.Delete(cnxn, component_id=component_id)
1740
1741 ### Local ID generation
1742
1743 def InitializeLocalID(self, cnxn, project_id):
1744 """Initialize the local ID counter for the specified project to zero.
1745
1746 Args:
1747 cnxn: connection to SQL database.
1748 project_id: int ID of the project.
1749 """
1750 self.localidcounter_tbl.InsertRow(
1751 cnxn, project_id=project_id, used_local_id=0, used_spam_id=0)
1752
1753 def SetUsedLocalID(self, cnxn, project_id):
1754 """Set the local ID counter based on existing issues.
1755
1756 Args:
1757 cnxn: connection to SQL database.
1758 project_id: int ID of the project.
1759 """
1760 highest_id = self.GetHighestLocalID(cnxn, project_id)
1761 self.localidcounter_tbl.Update(
1762 cnxn, {'used_local_id': highest_id}, project_id=project_id)
1763 return highest_id
1764
1765 def AllocateNextLocalID(self, cnxn, project_id):
1766 """Return the next available issue ID in the specified project.
1767
1768 Args:
1769 cnxn: connection to SQL database.
1770 project_id: int ID of the project.
1771
1772 Returns:
1773 The next local ID.
1774 """
1775 try:
1776 next_local_id = self.localidcounter_tbl.IncrementCounterValue(
1777 cnxn, 'used_local_id', project_id=project_id)
1778 except AssertionError:
1779 next_local_id = self.SetUsedLocalID(cnxn, project_id) + 1
1780 return next_local_id
1781
1782 def SetUsedSpamID(self, cnxn, project_id):
1783 """Set the local ID counter based on existing issues.
1784
1785 Args:
1786 cnxn: connection to SQL database.
1787 project_id: int ID of the project.
1788 """
1789 current_id = self.localidcounter_tbl.SelectValue(
1790 cnxn, 'used_spam_id', project_id=project_id)
1791 current_id = current_id or 0 # Will be None if project has no issues.
1792
1793 self.localidcounter_tbl.Update(
1794 cnxn, {'used_spam_id': current_id + 1}, project_id=project_id)
1795 return current_id + 1
1796
1797 def AllocateNextSpamLocalID(self, cnxn, project_id):
1798 """Return the next available spam issue ID in the specified project.
1799
1800 Args:
1801 cnxn: connection to SQL database.
1802 project_id: int ID of the project.
1803
1804 Returns:
1805 The next local ID.
1806 """
1807 try:
1808 next_spam_id = self.localidcounter_tbl.IncrementCounterValue(
1809 cnxn, 'used_spam_id', project_id=project_id)
1810 except AssertionError:
1811 next_spam_id = self.SetUsedSpamID(cnxn, project_id) + 1
1812 return -next_spam_id
1813
1814 def GetHighestLocalID(self, cnxn, project_id):
1815 """Return the highest used issue ID in the specified project.
1816
1817 Args:
1818 cnxn: connection to SQL database.
1819 project_id: int ID of the project.
1820
1821 Returns:
1822 The highest local ID for an active or moved issues.
1823 """
1824 highest = self.issue_tbl.SelectValue(
1825 cnxn, 'MAX(local_id)', project_id=project_id)
1826 highest = highest or 0 # It will be None if the project has no issues.
1827 highest_former = self.issueformerlocations_tbl.SelectValue(
1828 cnxn, 'MAX(local_id)', project_id=project_id)
1829 highest_former = highest_former or 0
1830 return max(highest, highest_former)
1831
1832 def GetAllLocalIDsInProject(self, cnxn, project_id, min_local_id=None):
1833 """Return the list of local IDs only, not the actual issues.
1834
1835 Args:
1836 cnxn: connection to SQL database.
1837 project_id: the ID of the project to which the issue belongs.
1838 min_local_id: point to start at.
1839
1840 Returns:
1841 A range object of local IDs from 1 to N, or from min_local_id to N. It
1842 may be the case that some of those local IDs are no longer used, e.g.,
1843 if some issues were moved out of this project.
1844 """
1845 if not min_local_id:
1846 min_local_id = 1
1847 highest_local_id = self.GetHighestLocalID(cnxn, project_id)
1848 return range(min_local_id, highest_local_id + 1)
1849
1850 def ExpungeLocalIDCounters(self, cnxn, project_id):
1851 """Delete history of local ids that were in this project."""
1852 self.localidcounter_tbl.Delete(cnxn, project_id=project_id)
1853
1854 ### Comments
1855
1856 def _UnpackComment(self, comment_row):
1857 """Partially construct a Comment PB from a DB row."""
1858 (comment_id, issue_id, created, project_id, commenter_id, content,
1859 inbound_message, was_escaped, deleted_by, is_spam) = comment_row
1860 comment = tracker_pb2.IssueComment()
1861 comment.id = comment_id
1862 comment.issue_id = issue_id
1863 comment.timestamp = created
1864 comment.project_id = project_id
1865 comment.user_id = commenter_id
1866 comment.content = content or ''
1867 comment.inbound_message = inbound_message or ''
1868 comment.was_escaped = bool(was_escaped)
1869 comment.deleted_by = deleted_by or 0
1870 comment.is_spam = bool(is_spam)
1871 return comment
1872
1873 def _UnpackAmendment(self, amendment_row):
1874 """Construct an Amendment PB from a DB row."""
1875 (_id, _issue_id, comment_id, field_name,
1876 old_value, new_value, added_user_id, removed_user_id,
1877 custom_field_name) = amendment_row
1878 amendment = tracker_pb2.Amendment()
1879 field_enum = tracker_pb2.FieldID(field_name.upper())
1880 amendment.field = field_enum
1881
1882 # TODO(jrobbins): display old values in more cases.
1883 if new_value is not None:
1884 amendment.newvalue = new_value
1885 if old_value is not None:
1886 amendment.oldvalue = old_value
1887 if added_user_id:
1888 amendment.added_user_ids.append(added_user_id)
1889 if removed_user_id:
1890 amendment.removed_user_ids.append(removed_user_id)
1891 if custom_field_name:
1892 amendment.custom_field_name = custom_field_name
1893 return amendment, comment_id
1894
1895 def _ConsolidateAmendments(self, amendments):
1896 """Consoliodate amendments of the same field in one comment into one
1897 amendment PB."""
1898
1899 fields_dict = {}
1900 result = []
1901
1902 for amendment in amendments:
1903 fields_dict.setdefault(amendment.field, []).append(amendment)
1904 for field, amendments in fields_dict.iteritems():
1905 new_amendment = tracker_pb2.Amendment()
1906 new_amendment.field = field
1907 for amendment in amendments:
1908 if amendment.newvalue is not None:
1909 new_amendment.newvalue = amendment.newvalue
1910 if amendment.oldvalue is not None:
1911 new_amendment.oldvalue = amendment.oldvalue
1912 if amendment.added_user_ids:
1913 new_amendment.added_user_ids.extend(amendment.added_user_ids)
1914 if amendment.removed_user_ids:
1915 new_amendment.removed_user_ids.extend(amendment.removed_user_ids)
1916 if amendment.custom_field_name:
1917 new_amendment.custom_field_name = amendment.custom_field_name
1918 result.append(new_amendment)
1919 return result
1920
1921 def _UnpackAttachment(self, attachment_row):
1922 """Construct an Attachment PB from a DB row."""
1923 (attachment_id, _issue_id, comment_id, filename, filesize, mimetype,
1924 deleted, gcs_object_id) = attachment_row
1925 attach = tracker_pb2.Attachment()
1926 attach.attachment_id = attachment_id
1927 attach.filename = filename
1928 attach.filesize = filesize
1929 attach.mimetype = mimetype
1930 attach.deleted = bool(deleted)
1931 attach.gcs_object_id = gcs_object_id
1932 return attach, comment_id
1933
1934 def _DeserializeComments(
1935 self, comment_rows, amendment_rows, attachment_rows):
1936 """Turn rows into IssueComment PBs."""
1937 results = [] # keep objects in the same order as the rows
1938 results_dict = {} # for fast access when joining.
1939
1940 for comment_row in comment_rows:
1941 comment = self._UnpackComment(comment_row)
1942 results.append(comment)
1943 results_dict[comment.id] = comment
1944
1945 for amendment_row in amendment_rows:
1946 amendment, comment_id = self._UnpackAmendment(amendment_row)
1947 try:
1948 results_dict[comment_id].amendments.extend([amendment])
1949 except KeyError:
1950 logging.error('Found amendment for missing comment: %r', comment_id)
1951
1952 for attachment_row in attachment_rows:
1953 attach, comment_id = self._UnpackAttachment(attachment_row)
1954 try:
1955 results_dict[comment_id].attachments.append(attach)
1956 except KeyError:
1957 logging.error('Found attachment for missing comment: %r', comment_id)
1958
1959 for c in results:
1960 c.amendments = self._ConsolidateAmendments(c.amendments)
1961
1962 return results
1963
1964 # TODO(jrobbins): make this a private method and expose just the interface
1965 # needed by activities.py.
1966 def GetComments(self, cnxn, where=None, order_by=None, **kwargs):
1967 """Retrieve comments from SQL."""
1968 # Explicitly specify column Comment.id to allow joins on other tables that
1969 # have an id column.
1970 order_by = order_by or [('created', [])]
1971 comment_rows = self.comment_tbl.Select(
1972 cnxn, cols=COMMENT_COLS, where=where,
1973 order_by=order_by, **kwargs)
1974 cids = [row[0] for row in comment_rows]
1975 amendment_rows = self.issueupdate_tbl.Select(
1976 cnxn, cols=ISSUEUPDATE_COLS, comment_id=cids)
1977 attachment_rows = self.attachment_tbl.Select(
1978 cnxn, cols=ATTACHMENT_COLS, comment_id=cids)
1979
1980 comments = self._DeserializeComments(
1981 comment_rows, amendment_rows, attachment_rows)
1982 return comments
1983
1984 def GetComment(self, cnxn, comment_id):
1985 """Get the requested comment, or raise an exception."""
1986 comments = self.GetComments(cnxn, id=comment_id)
1987 try:
1988 return comments[0]
1989 except IndexError:
1990 raise NoSuchCommentException()
1991
1992 def GetCommentsForIssue(self, cnxn, issue_id):
1993 """Return all IssueComment PBs for the specified issue.
1994
1995 Args:
1996 cnxn: connection to SQL database.
1997 issue_id: int global ID of the issue.
1998
1999 Returns:
2000 A list of the IssueComment protocol buffers for the description
2001 and comments on this issue.
2002 """
2003 comments = self.GetComments(cnxn, issue_id=[issue_id])
2004 for i, comment in enumerate(comments):
2005 comment.sequence = i
2006
2007 return comments
2008
2009 def GetCommentsByID(self, cnxn, comment_ids, sequences):
2010 """Return all IssueComment PBs by comment ids.
2011
2012 Args:
2013 cnxn: connection to SQL database.
2014 comment_ids: a list of comment ids.
2015 sequences: sequence of the comments.
2016
2017 Returns:
2018 A list of the IssueComment protocol buffers for the description
2019 and comments on this issue.
2020 """
2021 order_by = [('created ASC', [])]
2022 comment_rows = self.comment_tbl.Select(
2023 cnxn, cols=COMMENT_COLS, order_by=order_by, id=comment_ids)
2024 amendment_rows = self.issueupdate_tbl.Select(
2025 cnxn, cols=ISSUEUPDATE_COLS, comment_id=comment_ids)
2026 attachment_rows = self.attachment_tbl.Select(
2027 cnxn, cols=ATTACHMENT_COLS, comment_id=comment_ids)
2028
2029 comments = self._DeserializeComments(
2030 comment_rows, amendment_rows, attachment_rows)
2031
2032 for i in xrange(len(comment_ids)):
2033 comments[i].sequence = sequences[i]
2034
2035 return comments
2036
2037 def GetAbbrCommentsForIssue(self, cnxn, issue_id):
2038 """Get all abbreviated comments for the specified issue."""
2039 order_by = [('created ASC', [])]
2040 comment_rows = self.comment_tbl.Select(
2041 cnxn, cols=ABBR_COMMENT_COLS, issue_id=[issue_id], order_by=order_by)
2042
2043 return comment_rows
2044
2045 # TODO(jrobbins): remove this message because it is too slow when an issue
2046 # has a huge number of comments.
2047 def GetCommentsForIssues(self, cnxn, issue_ids):
2048 """Return all IssueComment PBs for each issue ID in the given list.
2049
2050 Args:
2051 cnxn: connection to SQL database.
2052 issue_ids: list of integer global issue IDs.
2053
2054 Returns:
2055 Dict {issue_id: [IssueComment, ...]} with IssueComment protocol
2056 buffers for the description and comments on each issue.
2057 """
2058 comments = self.GetComments(cnxn, issue_id=issue_ids)
2059
2060 comments_dict = collections.defaultdict(list)
2061 for comment in comments:
2062 comment.sequence = len(comments_dict[comment.issue_id])
2063 comments_dict[comment.issue_id].append(comment)
2064
2065 return comments_dict
2066
2067 def InsertComment(self, cnxn, comment, commit=True):
2068 """Store the given issue comment in SQL.
2069
2070 Args:
2071 cnxn: connection to SQL database.
2072 comment: IssueComment PB to insert into the database.
2073 commit: set to False to avoid doing the commit for now.
2074 """
2075 comment_id = self.comment_tbl.InsertRow(
2076 cnxn, issue_id=comment.issue_id, created=comment.timestamp,
2077 project_id=comment.project_id,
2078 commenter_id=comment.user_id, content=comment.content,
2079 inbound_message=comment.inbound_message,
2080 was_escaped=comment.was_escaped,
2081 deleted_by=comment.deleted_by or None,
2082 is_spam=comment.is_spam,
2083 commit=commit)
2084 comment.id = comment_id
2085
2086 amendment_rows = []
2087 for amendment in comment.amendments:
2088 field_enum = str(amendment.field).lower()
2089 if (amendment.get_assigned_value('newvalue') is not None and
2090 not amendment.added_user_ids and not amendment.removed_user_ids):
2091 amendment_rows.append((
2092 comment.issue_id, comment_id, field_enum,
2093 amendment.oldvalue, amendment.newvalue,
2094 None, None, amendment.custom_field_name))
2095 for added_user_id in amendment.added_user_ids:
2096 amendment_rows.append((
2097 comment.issue_id, comment_id, field_enum, None, None,
2098 added_user_id, None, amendment.custom_field_name))
2099 for removed_user_id in amendment.removed_user_ids:
2100 amendment_rows.append((
2101 comment.issue_id, comment_id, field_enum, None, None,
2102 None, removed_user_id, amendment.custom_field_name))
2103 # ISSUEUPDATE_COLS[1:] to skip id column.
2104 self.issueupdate_tbl.InsertRows(
2105 cnxn, ISSUEUPDATE_COLS[1:], amendment_rows, commit=commit)
2106
2107 attachment_rows = []
2108 for attach in comment.attachments:
2109 attachment_rows.append([
2110 comment.issue_id, comment.id, attach.filename, attach.filesize,
2111 attach.mimetype, attach.deleted, attach.gcs_object_id])
2112 self.attachment_tbl.InsertRows(
2113 cnxn, ATTACHMENT_COLS[1:], attachment_rows, commit=commit)
2114
2115 def _UpdateComment(self, cnxn, comment, update_cols=None):
2116 """Update the given issue comment in SQL.
2117
2118 Args:
2119 cnxn: connection to SQL database.
2120 comment: IssueComment PB to update in the database.
2121 update_cols: optional list of just the field names to update.
2122 """
2123 delta = {
2124 'commenter_id': comment.user_id,
2125 'content': comment.content,
2126 'deleted_by': comment.deleted_by or None,
2127 'is_spam': comment.is_spam,
2128 }
2129 if update_cols is not None:
2130 delta = {key: val for key, val in delta.iteritems()
2131 if key in update_cols}
2132
2133 self.comment_tbl.Update(cnxn, delta, id=comment.id)
2134
2135 def _MakeIssueComment(
2136 self, project_id, user_id, content, inbound_message=None,
2137 amendments=None, attachments=None, timestamp=None, was_escaped=False,
2138 is_spam=False):
2139 """Create in IssueComment protocol buffer in RAM.
2140
2141 Args:
2142 project_id: Project with the issue.
2143 user_id: the user ID of the user who entered the comment.
2144 content: string body of the comment.
2145 inbound_message: optional string full text of an email that
2146 caused this comment to be added.
2147 amendments: list of Amendment PBs describing the
2148 metadata changes that the user made along w/ comment.
2149 attachments: [(filename, contents, mimetype),...] attachments uploaded at
2150 the time the comment was made.
2151 timestamp: time at which the comment was made, defaults to now.
2152 was_escaped: True if the comment was HTML escaped already.
2153 is_spam: True if the comment was classified as spam.
2154 Returns:
2155 The new IssueComment protocol buffer.
2156
2157 The content may have some markup done during input processing.
2158
2159 Any attachments are immediately stored.
2160 """
2161 comment = tracker_pb2.IssueComment()
2162 comment.project_id = project_id
2163 comment.user_id = user_id
2164 comment.content = content or ''
2165 comment.was_escaped = was_escaped
2166 comment.is_spam = is_spam
2167 if not timestamp:
2168 timestamp = int(time.time())
2169 comment.timestamp = int(timestamp)
2170 if inbound_message:
2171 comment.inbound_message = inbound_message
2172 if amendments:
2173 logging.info('amendments is %r', amendments)
2174 comment.amendments.extend(amendments)
2175
2176 if attachments:
2177 for filename, body, mimetype in attachments:
2178 gcs_object_id = gcs_helpers.StoreObjectInGCS(body, mimetype, project_id)
2179 attach = tracker_pb2.Attachment()
2180 # attachment id is determined later by the SQL DB.
2181 attach.filename = filename
2182 attach.filesize = len(body)
2183 attach.mimetype = mimetype
2184 attach.gcs_object_id = gcs_object_id
2185 comment.attachments.extend([attach])
2186 logging.info("Save attachment with object_id: %s" % gcs_object_id)
2187
2188 return comment
2189
2190 def CreateIssueComment(
2191 self, cnxn, project_id, local_id, user_id, content, inbound_message=None,
2192 amendments=None, attachments=None, timestamp=None, is_spam=False,
2193 commit=True):
2194 """Create and store a new comment on the specified issue.
2195
2196 Args:
2197 cnxn: connection to SQL database.
2198 project_id: int ID of the current Project.
2199 local_id: the issue on which to add the comment.
2200 user_id: the user ID of the user who entered the comment.
2201 content: string body of the comment.
2202 inbound_message: optional string full text of an email that caused
2203 this comment to be added.
2204 amendments: list of Amendment PBs describing the
2205 metadata changes that the user made along w/ comment.
2206 attachments: [(filename, contents, mimetype),...] attachments uploaded at
2207 the time the comment was made.
2208 timestamp: time at which the comment was made, defaults to now.
2209 is_spam: True if the comment is classified as spam.
2210 commit: set to False to not commit to DB yet.
2211
2212 Returns:
2213 The new IssueComment protocol buffer.
2214
2215 Note that we assume that the content is safe to echo out
2216 again. The content may have some markup done during input
2217 processing.
2218 """
2219 issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
2220
2221 comment = self._MakeIssueComment(
2222 issue.project_id, user_id, content, amendments=amendments,
2223 inbound_message=inbound_message, attachments=attachments,
2224 timestamp=timestamp, is_spam=is_spam)
2225 comment.issue_id = issue.issue_id
2226
2227 if attachments:
2228 issue.attachment_count = issue.attachment_count + len(attachments)
2229 self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
2230
2231 self.InsertComment(cnxn, comment, commit=commit)
2232
2233 return comment
2234
2235 def SoftDeleteComment(
2236 self, cnxn, project_id, local_id, sequence_num, deleted_by_user_id,
2237 user_service, delete=True, reindex=True, is_spam=False):
2238 """Mark comment as un/deleted, which shows/hides it from average users."""
2239 issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
2240
2241 all_comments = self.GetCommentsForIssue(cnxn, issue.issue_id)
2242 try:
2243 issue_comment = all_comments[sequence_num]
2244 except IndexError:
2245 logging.warning(
2246 'Tried to (un)delete non-existent comment #%s in issue %s:%s',
2247 sequence_num, project_id, local_id)
2248 return
2249
2250 # Update number of attachments
2251 attachments = 0
2252 if issue_comment.attachments:
2253 for attachment in issue_comment.attachments:
2254 if not attachment.deleted:
2255 attachments += 1
2256
2257 # Delete only if it's not in deleted state
2258 if delete:
2259 if not issue_comment.deleted_by:
2260 issue_comment.deleted_by = deleted_by_user_id
2261 issue.attachment_count = issue.attachment_count - attachments
2262
2263 # Undelete only if it's in deleted state
2264 elif issue_comment.deleted_by:
2265 issue_comment.deleted_by = 0
2266 issue.attachment_count = issue.attachment_count + attachments
2267
2268 issue_comment.is_spam = is_spam
2269 self._UpdateComment(
2270 cnxn, issue_comment, update_cols=['deleted_by', 'is_spam'])
2271 self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
2272
2273 # Reindex the issue to take the comment deletion/undeletion into account.
2274 if reindex:
2275 tracker_fulltext.IndexIssues(
2276 cnxn, [issue], user_service, self, self._config_service)
2277
2278 ### Attachments
2279
2280 def GetAttachmentAndContext(self, cnxn, attachment_id):
2281 """Load a IssueAttachment from database, and its comment ID and IID.
2282
2283 Args:
2284 cnxn: connection to SQL database.
2285 attachment_id: long integer unique ID of desired issue attachment.
2286
2287 Returns:
2288 An Attachment protocol buffer that contains metadata about the attached
2289 file, or None if it doesn't exist. Also, the comment ID and issue IID
2290 of the comment and issue that contain this attachment.
2291
2292 Raises:
2293 NoSuchAttachmentException: the attachment was not found.
2294 """
2295 if attachment_id is None:
2296 raise NoSuchAttachmentException()
2297
2298 attachment_row = self.attachment_tbl.SelectRow(
2299 cnxn, cols=ATTACHMENT_COLS, id=attachment_id)
2300 if attachment_row:
2301 (attach_id, issue_id, comment_id, filename, filesize, mimetype,
2302 deleted, gcs_object_id) = attachment_row
2303 if not deleted:
2304 attachment = tracker_pb2.Attachment(
2305 attachment_id=attach_id, filename=filename, filesize=filesize,
2306 mimetype=mimetype, deleted=bool(deleted),
2307 gcs_object_id=gcs_object_id)
2308 return attachment, comment_id, issue_id
2309
2310 raise NoSuchAttachmentException()
2311
2312 def _UpdateAttachment(self, cnxn, attach, update_cols=None):
2313 """Update attachment metadata in the DB.
2314
2315 Args:
2316 cnxn: connection to SQL database.
2317 attach: IssueAttachment PB to update in the DB.
2318 update_cols: optional list of just the field names to update.
2319 """
2320 delta = {
2321 'filename': attach.filename,
2322 'filesize': attach.filesize,
2323 'mimetype': attach.mimetype,
2324 'deleted': bool(attach.deleted),
2325 }
2326 if update_cols is not None:
2327 delta = {key: val for key, val in delta.iteritems()
2328 if key in update_cols}
2329
2330 self.attachment_tbl.Update(cnxn, delta, id=attach.attachment_id)
2331
2332 def SoftDeleteAttachment(
2333 self, cnxn, project_id, local_id, seq_num, attach_id, user_service,
2334 delete=True, index_now=True):
2335 """Mark attachment as un/deleted, which shows/hides it from avg users."""
2336 issue = self.GetIssueByLocalID(cnxn, project_id, local_id)
2337 all_comments = self.GetCommentsForIssue(cnxn, issue.issue_id)
2338 try:
2339 issue_comment = all_comments[seq_num]
2340 except IndexError:
2341 logging.warning(
2342 'Tried to (un)delete attachment on non-existent comment #%s in '
2343 'issue %s:%s', seq_num, project_id, local_id)
2344 return
2345
2346 attachment = None
2347 for attach in issue_comment.attachments:
2348 if attach.attachment_id == attach_id:
2349 attachment = attach
2350
2351 if not attachment:
2352 logging.warning(
2353 'Tried to (un)delete non-existent attachment #%s in project '
2354 '%s issue %s', attach_id, project_id, local_id)
2355 return
2356
2357 if not issue_comment.deleted_by:
2358 # Decrement attachment count only if it's not in deleted state
2359 if delete:
2360 if not attachment.deleted:
2361 issue.attachment_count = issue.attachment_count - 1
2362
2363 # Increment attachment count only if it's in deleted state
2364 elif attachment.deleted:
2365 issue.attachment_count = issue.attachment_count + 1
2366
2367 attachment.deleted = delete
2368
2369 self._UpdateAttachment(cnxn, attachment, update_cols=['deleted'])
2370 self.UpdateIssue(cnxn, issue, update_cols=['attachment_count'])
2371
2372 if index_now:
2373 tracker_fulltext.IndexIssues(
2374 cnxn, [issue], user_service, self, self._config_service)
2375
2376 ### Reindex queue
2377
2378 def EnqueueIssuesForIndexing(self, cnxn, issue_ids):
2379 """Add the given issue IDs to the ReindexQueue table."""
2380 reindex_rows = [(issue_id,) for issue_id in issue_ids]
2381 self.reindexqueue_tbl.InsertRows(
2382 cnxn, ['issue_id'], reindex_rows, ignore=True)
2383
2384 def ReindexIssues(self, cnxn, num_to_reindex, user_service):
2385 """Reindex some issues specified in the IndexQueue table."""
2386 rows = self.reindexqueue_tbl.Select(
2387 cnxn, order_by=[('created', [])], limit=num_to_reindex)
2388 issue_ids = [row[0] for row in rows]
2389
2390 if issue_ids:
2391 issues = self.GetIssues(cnxn, issue_ids)
2392 tracker_fulltext.IndexIssues(
2393 cnxn, issues, user_service, self, self._config_service)
2394 self.reindexqueue_tbl.Delete(cnxn, issue_id=issue_ids)
2395
2396 return len(issue_ids)
2397
2398 ### Search functions
2399
2400 def RunIssueQuery(
2401 self, cnxn, left_joins, where, order_by, shard_id=None, limit=None):
2402 """Run a SQL query to find matching issue IDs.
2403
2404 Args:
2405 cnxn: connection to SQL database.
2406 left_joins: list of SQL LEFT JOIN clauses.
2407 where: list of SQL WHERE clauses.
2408 order_by: list of SQL ORDER BY clauses.
2409 shard_id: int shard ID to focus the search.
2410 limit: int maximum number of results, defaults to
2411 settings.search_limit_per_shard.
2412
2413 Returns:
2414 (issue_ids, capped) where issue_ids is a list of the result issue IDs,
2415 and capped is True if the number of results reached the limit.
2416 """
2417 limit = limit or settings.search_limit_per_shard
2418 where = where + [('Issue.deleted = %s', [False])]
2419 rows = self.issue_tbl.Select(
2420 cnxn, shard_id=shard_id, distinct=True, cols=['Issue.id'],
2421 left_joins=left_joins, where=where, order_by=order_by,
2422 limit=limit)
2423 issue_ids = [row[0] for row in rows]
2424 capped = len(issue_ids) >= limit
2425 return issue_ids, capped
2426
2427 def GetIIDsByLabelIDs(self, cnxn, label_ids, project_id, shard_id):
2428 """Return a list of IIDs for issues with any of the given label IDs."""
2429 where = []
2430 if shard_id is not None:
2431 slice_term = ('shard = %s', [shard_id])
2432 where.append(slice_term)
2433
2434 rows = self.issue_tbl.Select(
2435 cnxn, shard_id=shard_id, cols=['id'],
2436 left_joins=[('Issue2Label ON Issue.id = Issue2Label.issue_id', [])],
2437 label_id=label_ids, project_id=project_id, where=where)
2438
2439 return [row[0] for row in rows]
2440
2441 def GetIIDsByParticipant(self, cnxn, user_ids, project_ids, shard_id):
2442 """Return IIDs for issues where any of the given users participate."""
2443 iids = []
2444 where = []
2445 if shard_id is not None:
2446 where.append(('shard = %s', [shard_id]))
2447 if project_ids:
2448 cond_str = 'Issue.project_id IN (%s)' % sql.PlaceHolders(project_ids)
2449 where.append((cond_str, project_ids))
2450
2451 # TODO(jrobbins): Combine these 3 queries into one with ORs. It currently
2452 # is not the bottleneck.
2453 rows = self.issue_tbl.Select(
2454 cnxn, cols=['id'], reporter_id=user_ids,
2455 where=where, shard_id=shard_id)
2456 for row in rows:
2457 iids.append(row[0])
2458
2459 rows = self.issue_tbl.Select(
2460 cnxn, cols=['id'], owner_id=user_ids,
2461 where=where, shard_id=shard_id)
2462 for row in rows:
2463 iids.append(row[0])
2464
2465 rows = self.issue_tbl.Select(
2466 cnxn, cols=['id'], derived_owner_id=user_ids,
2467 where=where, shard_id=shard_id)
2468 for row in rows:
2469 iids.append(row[0])
2470
2471 rows = self.issue_tbl.Select(
2472 cnxn, cols=['id'],
2473 left_joins=[('Issue2Cc ON Issue2Cc.issue_id = Issue.id', [])],
2474 cc_id=user_ids,
2475 where=where + [('cc_id IS NOT NULL', [])],
2476 shard_id=shard_id)
2477 for row in rows:
2478 iids.append(row[0])
2479
2480 rows = self.issue_tbl.Select(
2481 cnxn, cols=['Issue.id'],
2482 left_joins=[
2483 ('Issue2FieldValue ON Issue.id = Issue2FieldValue.issue_id', []),
2484 ('FieldDef ON Issue2FieldValue.field_id = FieldDef.id', [])],
2485 user_id=user_ids, grants_perm='View',
2486 where=where + [('user_id IS NOT NULL', [])],
2487 shard_id=shard_id)
2488 for row in rows:
2489 iids.append(row[0])
2490
2491 return iids
2492
2493
2494 def _UpdateClosedTimestamp(config, issue, old_effective_status):
2495 """Sets or unsets the closed_timestamp based based on status changes.
2496
2497 If the status is changing from open to closed, the closed_timestamp is set to
2498 the current time.
2499
2500 If the status is changing form closed to open, the close_timestamp is unset.
2501
2502 If the status is changing from one closed to another closed, or from one
2503 open to another open, no operations are performed.
2504
2505 Args:
2506 config: the project configuration
2507 issue: the issue being updated (a protocol buffer)
2508 old_effective_status: the old issue status string. E.g., 'New'
2509 """
2510 # open -> closed
2511 if (tracker_helpers.MeansOpenInProject(old_effective_status, config)
2512 and not tracker_helpers.MeansOpenInProject(
2513 tracker_bizobj.GetStatus(issue), config)):
2514
2515 logging.info('setting closed_timestamp on issue: %d', issue.local_id)
2516
2517 issue.closed_timestamp = int(time.time())
2518 return
2519
2520 # closed -> open
2521 if (not tracker_helpers.MeansOpenInProject(old_effective_status, config)
2522 and tracker_helpers.MeansOpenInProject(
2523 tracker_bizobj.GetStatus(issue), config)):
2524
2525 logging.info('clearing closed_timestamp on issue: %s', issue.local_id)
2526
2527 issue.reset('closed_timestamp')
2528 return
2529
2530
2531 class Error(Exception):
2532 """Base exception class for this package."""
2533 pass
2534
2535
2536 class NoSuchIssueException(Error):
2537 """The requested issue was not found."""
2538 pass
2539
2540
2541 class NoSuchAttachmentException(Error):
2542 """The requested attachment was not found."""
2543 pass
2544
2545
2546 class NoSuchCommentException(Error):
2547 """The requested comment was not found."""
2548 pass
2549
2550
2551 class MidAirCollisionException(Error):
2552 """The item was updated by another user at the same time."""
2553
2554 def __init__(self, name, continue_issue_id):
2555 super(MidAirCollisionException, self).__init__()
2556 self.name = name # human-readable name for the artifact being edited.
2557 self.continue_issue_id = continue_issue_id # ID of issue to start over.
OLDNEW
« no previous file with comments | « appengine/monorail/services/fulltext_helpers.py ('k') | appengine/monorail/services/project_svc.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698