Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(296)

Side by Side Diff: appengine/monorail/search/test/frontendsearchpipeline_test.py

Issue 1868553004: Open Source Monorail (Closed) Base URL: https://chromium.googlesource.com/infra/infra.git@master
Patch Set: Rebase Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright 2016 The Chromium Authors. All rights reserved.
2 # Use of this source code is govered by a BSD-style
3 # license that can be found in the LICENSE file or at
4 # https://developers.google.com/open-source/licenses/bsd
5
6 """Tests for the frontendsearchpipeline module."""
7
8 import mox
9 import unittest
10
11 from google.appengine.api import memcache
12 from google.appengine.api import modules
13 from google.appengine.ext import testbed
14 from google.appengine.api import urlfetch
15
16 import settings
17 from framework import profiler
18 from framework import sorting
19 from framework import urls
20 from proto import ast_pb2
21 from proto import project_pb2
22 from proto import tracker_pb2
23 from search import frontendsearchpipeline
24 from search import searchpipeline
25 from services import service_manager
26 from testing import fake
27 from testing import testing_helpers
28 from tracker import tracker_bizobj
29
30
31 # Just an example timestamp. The value does not matter.
32 NOW = 2444950132
33
34
35 class FrontendSearchPipelineTest(unittest.TestCase):
36
37 def setUp(self):
38 self.cnxn = 'fake cnxn'
39 self.config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
40 self.services = service_manager.Services(
41 user=fake.UserService(),
42 project=fake.ProjectService(),
43 issue=fake.IssueService(),
44 config=fake.ConfigService(),
45 cache_manager=fake.CacheManager())
46 self.profiler = profiler.Profiler()
47 self.services.user.TestAddUser('a@example.com', 111L)
48 self.project = self.services.project.TestAddProject('proj', project_id=789)
49 self.mr = testing_helpers.MakeMonorailRequest(
50 path='/p/proj/issues/list', project=self.project)
51 self.mr.me_user_id = 111L
52
53 self.issue_1 = fake.MakeTestIssue(
54 789, 1, 'one', 'New', 111L, labels=['Priority-High'])
55 self.services.issue.TestAddIssue(self.issue_1)
56 self.issue_2 = fake.MakeTestIssue(
57 789, 2, 'two', 'New', 111L, labels=['Priority-Low'])
58 self.services.issue.TestAddIssue(self.issue_2)
59 self.issue_3 = fake.MakeTestIssue(
60 789, 3, 'three', 'New', 111L, labels=['Priority-Medium'])
61 self.services.issue.TestAddIssue(self.issue_3)
62 self.mr.sort_spec = 'Priority'
63
64 self.mox = mox.Mox()
65 self.testbed = testbed.Testbed()
66 self.testbed.activate()
67 self.testbed.init_user_stub()
68 self.testbed.init_memcache_stub()
69 sorting.InitializeArtValues(self.services)
70
71 def tearDown(self):
72 self.testbed.deactivate()
73 self.mox.UnsetStubs()
74 self.mox.ResetAll()
75
76 def testSearchForIIDs_AllResultsCached_AllAtRiskCached(self):
77 unfiltered_iids = {1: [1001, 1011]}
78 nonviewable_iids = {1: set()}
79 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearch')
80 frontendsearchpipeline._StartBackendSearch(
81 self.mr, set(['proj']), [789], mox.IsA(tracker_pb2.ProjectIssueConfig),
82 unfiltered_iids, {}, nonviewable_iids, set(), self.services).AndReturn([])
83 self.mox.StubOutWithMock(frontendsearchpipeline, '_FinishBackendSearch')
84 frontendsearchpipeline._FinishBackendSearch([])
85 self.mox.ReplayAll()
86
87 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
88 self.mr, self.services, self.profiler, 100)
89 pipeline.unfiltered_iids = unfiltered_iids
90 pipeline.nonviewable_iids = nonviewable_iids
91 pipeline.SearchForIIDs()
92 self.mox.VerifyAll()
93 self.assertEqual(2, pipeline.total_count)
94 self.assertEqual(2, pipeline.counts[1])
95 self.assertEqual([1001, 1011], pipeline.filtered_iids[1])
96
97 def testMergeAndSortIssues_EmptyResult(self):
98 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
99 self.mr, self.services, self.profiler, 100)
100 pipeline.filtered_iids = {0: [], 1: [], 2: []}
101
102 pipeline.MergeAndSortIssues()
103 self.assertEqual([], pipeline.allowed_iids)
104 self.assertEqual([], pipeline.allowed_results)
105 self.assertEqual({}, pipeline.users_by_id)
106
107 def testMergeAndSortIssues_Normal(self):
108 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
109 self.mr, self.services, self.profiler, 100)
110 # In this unit test case we are not calling SearchForIIDs(), instead just
111 # set pipeline.filtered_iids directly.
112 pipeline.filtered_iids = {
113 0: [],
114 1: [self.issue_1.issue_id],
115 2: [self.issue_2.issue_id],
116 3: [self.issue_3.issue_id]
117 }
118
119 pipeline.MergeAndSortIssues()
120 self.assertEqual(
121 [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
122 pipeline.allowed_iids)
123 self.assertEqual(
124 [self.issue_1, self.issue_3, self.issue_2], # high, medium, low.
125 pipeline.allowed_results)
126 self.assertEqual([111L], pipeline.users_by_id.keys())
127
128 def testDetermineIssuePosition_Normal(self):
129 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
130 self.mr, self.services, self.profiler, 100)
131 # In this unit test case we are not calling SearchForIIDs(), instead just
132 # set pipeline.filtered_iids directly.
133 pipeline.filtered_iids = {
134 0: [],
135 1: [self.issue_1.issue_id],
136 2: [self.issue_2.issue_id],
137 3: [self.issue_3.issue_id]
138 }
139
140 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
141 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
142 self.assertEqual(self.issue_1.issue_id, prev_iid)
143 self.assertEqual(1, index)
144 self.assertEqual(self.issue_2.issue_id, next_iid)
145
146 def testDetermineIssuePosition_NotInResults(self):
147 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
148 self.mr, self.services, self.profiler, 100)
149 # In this unit test case we are not calling SearchForIIDs(), instead just
150 # set pipeline.filtered_iids directly.
151 pipeline.filtered_iids = {
152 0: [],
153 1: [self.issue_1.issue_id],
154 2: [self.issue_2.issue_id],
155 3: []
156 }
157
158 prev_iid, index, next_iid = pipeline.DetermineIssuePosition(self.issue_3)
159 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
160 self.assertEqual(None, prev_iid)
161 self.assertEqual(None, index)
162 self.assertEqual(None, next_iid)
163
164 def testDetermineIssuePositionInShard_IssueIsInShard(self):
165 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
166 self.mr, self.services, self.profiler, 100)
167 # Let's assume issues 1, 2, and 3 are all in the same shard.
168 pipeline.filtered_iids = {
169 0: [self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id],
170 }
171
172 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
173 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
174 0, self.issue_1, {})
175 self.assertEqual(None, prev_cand)
176 self.assertEqual(0, index)
177 self.assertEqual(self.issue_3, next_cand)
178
179 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
180 0, self.issue_3, {})
181 self.assertEqual(self.issue_1, prev_cand)
182 self.assertEqual(1, index)
183 self.assertEqual(self.issue_2, next_cand)
184
185 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
186 0, self.issue_2, {})
187 self.assertEqual(self.issue_3, prev_cand)
188 self.assertEqual(2, index)
189 self.assertEqual(None, next_cand)
190
191 def testDetermineIssuePositionInShard_IssueIsNotInShard(self):
192 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
193 self.mr, self.services, self.profiler, 100)
194
195 # The total ordering is issue_1, issue_3, issue_2 for high, med, low.
196 pipeline.filtered_iids = {
197 0: [self.issue_2.issue_id, self.issue_3.issue_id],
198 }
199 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
200 0, self.issue_1, {})
201 self.assertEqual(None, prev_cand)
202 self.assertEqual(0, index)
203 self.assertEqual(self.issue_3, next_cand)
204
205 pipeline.filtered_iids = {
206 0: [self.issue_1.issue_id, self.issue_2.issue_id],
207 }
208 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
209 0, self.issue_3, {})
210 self.assertEqual(self.issue_1, prev_cand)
211 self.assertEqual(1, index)
212 self.assertEqual(self.issue_2, next_cand)
213
214 pipeline.filtered_iids = {
215 0: [self.issue_1.issue_id, self.issue_3.issue_id],
216 }
217 prev_cand, index, next_cand = pipeline._DetermineIssuePositionInShard(
218 0, self.issue_2, {})
219 self.assertEqual(self.issue_3, prev_cand)
220 self.assertEqual(2, index)
221 self.assertEqual(None, next_cand)
222
223 def testAccumulateSampleIssues_Empty(self):
224 """When the search gave no results, there cannot be any samples."""
225 sample_dict = {}
226 needed_iids = []
227 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
228 self.mr, self.services, self.profiler, 100)
229 issue_ids = []
230 pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
231 self.assertEqual({}, sample_dict)
232 self.assertEqual([], needed_iids)
233
234 def testAccumulateSampleIssues_Small(self):
235 """When the search gave few results, don't bother with samples."""
236 sample_dict = {}
237 needed_iids = []
238 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
239 self.mr, self.services, self.profiler, 100)
240 issue_ids = [78901, 78902]
241 pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
242 self.assertEqual({}, sample_dict)
243 self.assertEqual([], needed_iids)
244
245 def testAccumulateSampleIssues_Normal(self):
246 """We will choose at least one sample for every 10 results in a shard."""
247 sample_dict = {}
248 needed_iids = []
249 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
250 self.mr, self.services, self.profiler, 100)
251 issues = []
252 for i in range(23):
253 issue = fake.MakeTestIssue(789, 100 + i, 'samp test', 'New', 111L)
254 issues.append(issue)
255 self.services.issue.TestAddIssue(issue)
256
257 issue_ids = [issue.issue_id for issue in issues]
258 pipeline._AccumulateSampleIssues(issue_ids, sample_dict, needed_iids)
259 self.assertEqual(2, len(needed_iids))
260 for sample_iid in needed_iids:
261 self.assertIn(sample_iid, issue_ids)
262
263 def testLookupNeededUsers(self):
264 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
265 self.mr, self.services, self.profiler, 100)
266
267 pipeline._LookupNeededUsers([])
268 self.assertEqual([], pipeline.users_by_id.keys())
269
270 pipeline._LookupNeededUsers([self.issue_1, self.issue_2, self.issue_3])
271 self.assertEqual([111L], pipeline.users_by_id.keys())
272
273 def testPaginate_Grid(self):
274 self.mr.mode = 'grid'
275 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
276 self.mr, self.services, self.profiler, 100)
277 pipeline.allowed_iids = [
278 self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
279 pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
280 pipeline.total_count = len(pipeline.allowed_results)
281 pipeline.Paginate()
282 self.assertEqual(
283 [self.issue_1, self.issue_2, self.issue_3],
284 pipeline.visible_results)
285
286 def testPaginate_List(self):
287 pipeline = frontendsearchpipeline.FrontendSearchPipeline(
288 self.mr, self.services, self.profiler, 100)
289 pipeline.allowed_iids = [
290 self.issue_1.issue_id, self.issue_2.issue_id, self.issue_3.issue_id]
291 pipeline.allowed_results = [self.issue_1, self.issue_2, self.issue_3]
292 pipeline.total_count = len(pipeline.allowed_results)
293 pipeline.Paginate()
294 self.assertEqual(
295 [self.issue_1, self.issue_2, self.issue_3],
296 pipeline.visible_results)
297 self.assertFalse(pipeline.pagination.limit_reached)
298
299
300 class FrontendSearchPipelineMethodsTest(unittest.TestCase):
301
302 def setUp(self):
303 self.mox = mox.Mox()
304 self.testbed = testbed.Testbed()
305 self.testbed.activate()
306 self.testbed.init_user_stub()
307 self.testbed.init_memcache_stub()
308
309 def tearDown(self):
310 self.testbed.deactivate()
311 self.mox.UnsetStubs()
312 self.mox.ResetAll()
313
314 def testMakeBackendCallback(self):
315 called_with = []
316
317 def func(a, b):
318 called_with.append((a, b))
319
320 callback = frontendsearchpipeline._MakeBackendCallback(func, 10, 20)
321 callback()
322 self.assertEqual([(10, 20)], called_with)
323
324 def testStartBackendSearch(self):
325 # TODO(jrobbins): write this test.
326 pass
327
328 def testFinishBackendSearch(self):
329 # TODO(jrobbins): write this test.
330 pass
331
332 def testGetProjectTimestamps_NoneSet(self):
333 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
334 [], [])
335 self.assertEqual({}, project_shard_timestamps)
336
337 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
338 [], [0, 1, 2, 3, 4])
339 self.assertEqual({}, project_shard_timestamps)
340
341 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
342 [789], [0, 1, 2, 3, 4])
343 self.assertEqual({}, project_shard_timestamps)
344
345 def testGetProjectTimestamps_SpecificProjects(self):
346 memcache.set('789;0', NOW)
347 memcache.set('789;1', NOW - 1000)
348 memcache.set('789;2', NOW - 3000)
349 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
350 [789], [0, 1, 2])
351 self.assertEqual(
352 { (789, 0): NOW,
353 (789, 1): NOW - 1000,
354 (789, 2): NOW - 3000,
355 },
356 project_shard_timestamps)
357
358 memcache.set('790;0', NOW)
359 memcache.set('790;1', NOW - 10000)
360 memcache.set('790;2', NOW - 30000)
361 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
362 [789, 790], [0, 1, 2])
363 self.assertEqual(
364 { (789, 0): NOW,
365 (789, 1): NOW - 1000,
366 (789, 2): NOW - 3000,
367 (790, 0): NOW,
368 (790, 1): NOW - 10000,
369 (790, 2): NOW - 30000,
370 },
371 project_shard_timestamps)
372
373 def testGetProjectTimestamps_SiteWide(self):
374 memcache.set('all;0', NOW)
375 memcache.set('all;1', NOW - 10000)
376 memcache.set('all;2', NOW - 30000)
377 project_shard_timestamps = frontendsearchpipeline._GetProjectTimestamps(
378 [], [0, 1, 2])
379 self.assertEqual(
380 { ('all', 0): NOW,
381 ('all', 1): NOW - 10000,
382 ('all', 2): NOW - 30000,
383 },
384 project_shard_timestamps)
385
386 def testGetNonviewableIIDs_SearchMissSoNoOp(self):
387 """If search cache missed, don't bother looking up nonviewable IIDs."""
388 unfiltered_iids_dict = {} # No cached search results found.
389 rpc_tuples = [] # Nothing should accumulate here in this case.
390 nonviewable_iids = {} # Nothing should accumulate here in this case.
391 processed_invalidations_up_to = 12345
392 frontendsearchpipeline._GetNonviewableIIDs(
393 [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
394 {}, processed_invalidations_up_to, True)
395 self.assertEqual([], rpc_tuples)
396 self.assertEqual({}, nonviewable_iids)
397
398 def testGetNonviewableIIDs_SearchHitThenNonviewableHit(self):
399 """If search cache hit, get nonviewable info from cache."""
400 unfiltered_iids_dict = {
401 1: [10001, 10021],
402 2: ['the search result issue_ids do not matter'],
403 }
404 rpc_tuples = [] # Nothing should accumulate here in this case.
405 nonviewable_iids = {} # Our mock results should end up here.
406 processed_invalidations_up_to = 12345
407 memcache.set('nonviewable:789;111;1',
408 ([10001, 10031], processed_invalidations_up_to - 10))
409 memcache.set('nonviewable:789;111;2',
410 ([10002, 10042], processed_invalidations_up_to - 30))
411
412 project_shard_timestamps = {
413 (789, 1): 0, # not stale
414 (789, 2): 0, # not stale
415 }
416 frontendsearchpipeline._GetNonviewableIIDs(
417 [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
418 project_shard_timestamps, processed_invalidations_up_to, True)
419 self.assertEqual([], rpc_tuples)
420 self.assertEqual({1: {10001, 10031}, 2: {10002, 10042}}, nonviewable_iids)
421
422 def testGetNonviewableIIDs_SearchHitNonviewableMissSoStartRPC(self):
423 """If search hit and n-v miss, create RPCs to get nonviewable info."""
424 self.mox.StubOutWithMock(
425 frontendsearchpipeline, '_StartBackendNonviewableCall')
426 unfiltered_iids_dict = {
427 2: ['the search result issue_ids do not matter'],
428 }
429 rpc_tuples = [] # One RPC object should accumulate here.
430 nonviewable_iids = {} # This will stay empty until RPCs complete.
431 processed_invalidations_up_to = 12345
432 # Nothing is set in memcache for this case.
433 a_fake_rpc = testing_helpers.Blank(callback=None)
434 frontendsearchpipeline._StartBackendNonviewableCall(
435 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
436 self.mox.ReplayAll()
437
438 frontendsearchpipeline._GetNonviewableIIDs(
439 [789], 111L, unfiltered_iids_dict.keys(), rpc_tuples, nonviewable_iids,
440 {}, processed_invalidations_up_to, True)
441 self.mox.VerifyAll()
442 _, sid_0, rpc_0 = rpc_tuples[0]
443 self.assertEqual(2, sid_0)
444 self.assertEqual({}, nonviewable_iids)
445 self.assertEqual(a_fake_rpc, rpc_0)
446 self.assertIsNotNone(a_fake_rpc.callback)
447
448 def testAccumulateNonviewableIIDs_MemcacheHitForProject(self):
449 processed_invalidations_up_to = 12345
450 cached_dict = {
451 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
452 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
453 }
454 rpc_tuples = [] # Nothing should accumulate here.
455 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
456 project_shard_timestamps = {
457 (789, 1): 0, # not stale
458 (789, 2): 0, # not stale
459 }
460 frontendsearchpipeline._AccumulateNonviewableIIDs(
461 789, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
462 rpc_tuples, processed_invalidations_up_to)
463 self.assertEqual([], rpc_tuples)
464 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
465
466 def testAccumulateNonviewableIIDs_MemcacheStaleForProject(self):
467 self.mox.StubOutWithMock(
468 frontendsearchpipeline, '_StartBackendNonviewableCall')
469 processed_invalidations_up_to = 12345
470 cached_dict = {
471 '789;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
472 '789;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
473 }
474 rpc_tuples = [] # Nothing should accumulate here.
475 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes
476 project_shard_timestamps = {
477 (789, 1): 0, # not stale
478 (789, 2): processed_invalidations_up_to, # stale!
479 }
480 a_fake_rpc = testing_helpers.Blank(callback=None)
481 frontendsearchpipeline._StartBackendNonviewableCall(
482 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
483 self.mox.ReplayAll()
484
485 frontendsearchpipeline._AccumulateNonviewableIIDs(
486 789, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
487 rpc_tuples, processed_invalidations_up_to)
488 self.mox.VerifyAll()
489 _, sid_0, rpc_0 = rpc_tuples[0]
490 self.assertEqual(2, sid_0)
491 self.assertEqual(a_fake_rpc, rpc_0)
492 self.assertIsNotNone(a_fake_rpc.callback)
493 self.assertEqual({1: {10001}}, nonviewable_iids)
494
495 def testAccumulateNonviewableIIDs_MemcacheHitForWholeSite(self):
496 processed_invalidations_up_to = 12345
497 cached_dict = {
498 'all;111;2': ([10002, 10042], processed_invalidations_up_to - 10),
499 'all;111;3': ([10003, 10093], processed_invalidations_up_to - 30),
500 }
501 rpc_tuples = [] # Nothing should accumulate here.
502 nonviewable_iids = {1: {10001}} # This will gain the shard 2 values.
503 project_shard_timestamps = {
504 (None, 1): 0, # not stale
505 (None, 2): 0, # not stale
506 }
507 frontendsearchpipeline._AccumulateNonviewableIIDs(
508 None, 111L, 2, cached_dict, nonviewable_iids, project_shard_timestamps,
509 rpc_tuples, processed_invalidations_up_to)
510 self.assertEqual([], rpc_tuples)
511 self.assertEqual({1: {10001}, 2: {10002, 10042}}, nonviewable_iids)
512
513 def testAccumulateNonviewableIIDs_MemcacheMissSoStartRPC(self):
514 self.mox.StubOutWithMock(
515 frontendsearchpipeline, '_StartBackendNonviewableCall')
516 cached_dict = {} # Nothing here, so it is an at-risk cache miss.
517 rpc_tuples = [] # One RPC should accumulate here.
518 nonviewable_iids = {1: {10001}} # Nothing added here until RPC completes.
519 processed_invalidations_up_to = 12345
520 a_fake_rpc = testing_helpers.Blank(callback=None)
521 frontendsearchpipeline._StartBackendNonviewableCall(
522 789, 111L, 2, processed_invalidations_up_to).AndReturn(a_fake_rpc)
523 self.mox.ReplayAll()
524
525 frontendsearchpipeline._AccumulateNonviewableIIDs(
526 789, 111L, 2, cached_dict, nonviewable_iids, {}, rpc_tuples,
527 processed_invalidations_up_to)
528 self.mox.VerifyAll()
529 _, sid_0, rpc_0 = rpc_tuples[0]
530 self.assertEqual(2, sid_0)
531 self.assertEqual(a_fake_rpc, rpc_0)
532 self.assertIsNotNone(a_fake_rpc.callback)
533 self.assertEqual({1: {10001}}, nonviewable_iids)
534
535 def testGetCachedSearchResults(self):
536 # TODO(jrobbins): Write this test.
537 pass
538
539 def testMakeBackendRequestHeaders(self):
540 headers = frontendsearchpipeline._MakeBackendRequestHeaders(False)
541 self.assertNotIn('X-AppEngine-FailFast', headers)
542 headers = frontendsearchpipeline._MakeBackendRequestHeaders(True)
543 self.assertEqual('Yes', headers['X-AppEngine-FailFast'])
544
545 def testStartBackendSearchCall(self):
546 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
547 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
548 self.mox.StubOutWithMock(modules, 'get_hostname')
549 a_fake_rpc = testing_helpers.Blank(callback=None)
550 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
551 a_fake_rpc)
552 modules.get_hostname(module='besearch')
553 urlfetch.make_fetch_call(
554 a_fake_rpc, mox.StrContains(urls.BACKEND_SEARCH), follow_redirects=False,
555 headers=mox.IsA(dict))
556 self.mox.ReplayAll()
557
558 processed_invalidations_up_to = 12345
559 mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
560 mr.me_user_id = 111L
561 frontendsearchpipeline._StartBackendSearchCall(
562 mr, ['proj'], 2, processed_invalidations_up_to)
563 self.mox.VerifyAll()
564
565 def testStartBackendNonviewableCall(self):
566 self.mox.StubOutWithMock(urlfetch, 'create_rpc')
567 self.mox.StubOutWithMock(urlfetch, 'make_fetch_call')
568 self.mox.StubOutWithMock(modules, 'get_hostname')
569 a_fake_rpc = testing_helpers.Blank(callback=None)
570 urlfetch.create_rpc(deadline=settings.backend_deadline).AndReturn(
571 a_fake_rpc)
572 modules.get_hostname(module='besearch')
573 urlfetch.make_fetch_call(
574 a_fake_rpc, mox.StrContains(urls.BACKEND_NONVIEWABLE),
575 follow_redirects=False, headers=mox.IsA(dict))
576 self.mox.ReplayAll()
577
578 processed_invalidations_up_to = 12345
579 frontendsearchpipeline._StartBackendNonviewableCall(
580 789, 111L, 2, processed_invalidations_up_to)
581 self.mox.VerifyAll()
582
583 def testHandleBackendSearchResponse_Error(self):
584 response_str = 'There was a problem processing the query.'
585 rpc = testing_helpers.Blank(
586 get_result=lambda: testing_helpers.Blank(
587 content=response_str, status_code=500))
588 rpc_tuple = (NOW, 2, rpc)
589 rpc_tuples = [] # Nothing should be added for this case.
590 filtered_iids = {} # Search results should accumlate here, per-shard.
591 search_limit_reached = {} # Booleans accumulate here, per-shard.
592 processed_invalidations_up_to = 12345
593
594 mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
595 mr.me_user_id = 111L
596 error_responses = set()
597
598 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
599 frontendsearchpipeline._HandleBackendSearchResponse(
600 mr, ['proj'], rpc_tuple, rpc_tuples, 0, filtered_iids,
601 search_limit_reached, processed_invalidations_up_to, error_responses)
602 self.assertEqual([], rpc_tuples)
603 self.assertIn(2, error_responses)
604
605 def testHandleBackendSearchResponse_Normal(self):
606 response_str = (
607 '})]\'\n'
608 '{'
609 ' "unfiltered_iids": [10002, 10042],'
610 ' "search_limit_reached": false'
611 '}'
612 )
613 rpc = testing_helpers.Blank(
614 get_result=lambda: testing_helpers.Blank(
615 content=response_str, status_code=200))
616 rpc_tuple = (NOW, 2, rpc)
617 rpc_tuples = [] # Nothing should be added for this case.
618 filtered_iids = {} # Search results should accumlate here, per-shard.
619 search_limit_reached = {} # Booleans accumulate here, per-shard.
620 processed_invalidations_up_to = 12345
621
622 mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
623 mr.me_user_id = 111L
624 error_responses = set()
625 frontendsearchpipeline._HandleBackendSearchResponse(
626 mr, ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids,
627 search_limit_reached, processed_invalidations_up_to, error_responses)
628 self.assertEqual([], rpc_tuples)
629 self.assertEqual({2: [10002, 10042]}, filtered_iids)
630 self.assertEqual({2: False}, search_limit_reached)
631
632
633 def testHandleBackendSearchResponse_TriggersRetry(self):
634 response_str = None
635 rpc = testing_helpers.Blank(
636 get_result=lambda: testing_helpers.Blank(content=response_str))
637 rpc_tuple = (NOW, 2, rpc)
638 rpc_tuples = [] # New RPC should be appended here
639 filtered_iids = {} # No change here until retry completes.
640 search_limit_reached = {} # No change here until retry completes.
641 processed_invalidations_up_to = 12345
642 error_responses = set()
643
644 mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
645 mr.me_user_id = 111L
646
647 self.mox.StubOutWithMock(frontendsearchpipeline, '_StartBackendSearchCall')
648 a_fake_rpc = testing_helpers.Blank(callback=None)
649 rpc = frontendsearchpipeline._StartBackendSearchCall(
650 mr, ['proj'], 2, processed_invalidations_up_to, failfast=False
651 ).AndReturn(a_fake_rpc)
652 self.mox.ReplayAll()
653
654 frontendsearchpipeline._HandleBackendSearchResponse(
655 mr, ['proj'], rpc_tuple, rpc_tuples, 2, filtered_iids,
656 search_limit_reached, processed_invalidations_up_to, error_responses)
657 self.mox.VerifyAll()
658 _, retry_shard_id, retry_rpc = rpc_tuples[0]
659 self.assertEqual(2, retry_shard_id)
660 self.assertEqual(a_fake_rpc, retry_rpc)
661 self.assertIsNotNone(retry_rpc.callback)
662 self.assertEqual({}, filtered_iids)
663 self.assertEqual({}, search_limit_reached)
664
665 def testHandleBackendNonviewableResponse_Error(self):
666 response_str = 'There was an error.'
667 rpc = testing_helpers.Blank(
668 get_result=lambda: testing_helpers.Blank(
669 content=response_str,
670 status_code=500
671 ))
672 rpc_tuple = (NOW, 2, rpc)
673 rpc_tuples = [] # Nothing should be added for this case.
674 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
675 processed_invalidations_up_to = 12345
676
677 self.mox.StubOutWithMock(
678 frontendsearchpipeline, '_StartBackendNonviewableCall')
679 frontendsearchpipeline._HandleBackendNonviewableResponse(
680 789, 111L, 2, rpc_tuple, rpc_tuples, 0, nonviewable_iids,
681 processed_invalidations_up_to)
682 self.assertEqual([], rpc_tuples)
683 self.assertNotEqual({2: {10002, 10042}}, nonviewable_iids)
684
685 def testHandleBackendNonviewableResponse_Normal(self):
686 response_str = (
687 '})]\'\n'
688 '{'
689 ' "nonviewable": [10002, 10042]'
690 '}'
691 )
692 rpc = testing_helpers.Blank(
693 get_result=lambda: testing_helpers.Blank(
694 content=response_str,
695 status_code=200
696 ))
697 rpc_tuple = (NOW, 2, rpc)
698 rpc_tuples = [] # Nothing should be added for this case.
699 nonviewable_iids = {} # At-risk issue IDs should accumlate here, per-shard.
700 processed_invalidations_up_to = 12345
701
702 frontendsearchpipeline._HandleBackendNonviewableResponse(
703 789, 111L, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
704 processed_invalidations_up_to)
705 self.assertEqual([], rpc_tuples)
706 self.assertEqual({2: {10002, 10042}}, nonviewable_iids)
707
708 def testHandleBackendAtRiskResponse_TriggersRetry(self):
709 response_str = None
710 rpc = testing_helpers.Blank(
711 get_result=lambda: testing_helpers.Blank(content=response_str))
712 rpc_tuple = (NOW, 2, rpc)
713 rpc_tuples = [] # New RPC should be appended here
714 nonviewable_iids = {} # No change here until retry completes.
715 processed_invalidations_up_to = 12345
716
717 self.mox.StubOutWithMock(
718 frontendsearchpipeline, '_StartBackendNonviewableCall')
719 a_fake_rpc = testing_helpers.Blank(callback=None)
720 rpc = frontendsearchpipeline._StartBackendNonviewableCall(
721 789, 111L, 2, processed_invalidations_up_to, failfast=False
722 ).AndReturn(a_fake_rpc)
723 self.mox.ReplayAll()
724
725 frontendsearchpipeline._HandleBackendNonviewableResponse(
726 789, 111L, 2, rpc_tuple, rpc_tuples, 2, nonviewable_iids,
727 processed_invalidations_up_to)
728 self.mox.VerifyAll()
729 _, retry_shard_id, retry_rpc = rpc_tuples[0]
730 self.assertEqual(2, retry_shard_id)
731 self.assertIsNotNone(retry_rpc.callback)
732 self.assertEqual(a_fake_rpc, retry_rpc)
733 self.assertEqual({}, nonviewable_iids)
734
735 def testSortIssues(self):
736 services = service_manager.Services(
737 cache_manager=fake.CacheManager())
738 sorting.InitializeArtValues(services)
739
740 mr = testing_helpers.MakeMonorailRequest(path='/p/proj/issues/list?q=foo')
741 mr.sort_spec = 'priority'
742 issue_1 = fake.MakeTestIssue(
743 789, 1, 'one', 'New', 111L, labels=['Priority-High'])
744 issue_2 = fake.MakeTestIssue(
745 789, 2, 'two', 'New', 111L, labels=['Priority-Low'])
746 issue_3 = fake.MakeTestIssue(
747 789, 3, 'three', 'New', 111L, labels=['Priority-Medium'])
748 issues = [issue_1, issue_2, issue_3]
749 config = tracker_bizobj.MakeDefaultProjectIssueConfig(789)
750
751 sorted_issues = frontendsearchpipeline._SortIssues(mr, issues, config, {})
752
753 self.assertEqual(
754 [issue_1, issue_3, issue_2], # Order is high, medium, low.
755 sorted_issues)
756
757
758 class FrontendSearchPipelineShardMethodsTest(unittest.TestCase):
759
760 def setUp(self):
761 self.sharded_iids = {
762 0: [10, 20, 30, 40, 50],
763 1: [21, 41, 61, 81],
764 2: [42, 52, 62, 72, 102],
765 3: [],
766 }
767
768 def testTotalLength_Empty(self):
769 """If there were no results, the length of the sharded list is zero."""
770 self.assertEqual(0, frontendsearchpipeline._TotalLength({}))
771
772 def testTotalLength_Normal(self):
773 """The length of the sharded list is the sum of the shard lengths."""
774 self.assertEqual(
775 14, frontendsearchpipeline._TotalLength(self.sharded_iids))
776
777 def testReverseShards_Empty(self):
778 """Reversing an empty sharded list is still empty."""
779 empty_sharded_iids = {}
780 frontendsearchpipeline._ReverseShards(empty_sharded_iids)
781 self.assertEqual({}, empty_sharded_iids)
782
783 def testReverseShards_Normal(self):
784 """Reversing a sharded list reverses each shard."""
785 frontendsearchpipeline._ReverseShards(self.sharded_iids)
786 self.assertEqual(
787 {0: [50, 40, 30, 20, 10],
788 1: [81, 61, 41, 21],
789 2: [102, 72, 62, 52, 42],
790 3: [],
791 },
792 self.sharded_iids)
793
794 def testTrimShardedIIDs_Empty(self):
795 """If the sharded list is empty, trimming it makes no change."""
796 empty_sharded_iids = {}
797 frontendsearchpipeline._TrimEndShardedIIDs(empty_sharded_iids, [], 12)
798 self.assertEqual({}, empty_sharded_iids)
799
800 frontendsearchpipeline._TrimEndShardedIIDs(
801 empty_sharded_iids, [100, 88, 99], 12)
802 self.assertEqual({}, empty_sharded_iids)
803
804 def testTrimShardedIIDs_NoSamples(self):
805 """If there are no samples, we don't trim off any IIDs."""
806 orig_sharded_iids = {
807 shard_id: iids[:] for shard_id, iids in self.sharded_iids.iteritems()}
808 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
809 self.sharded_iids, [], 12)
810 self.assertEqual(0, num_trimmed)
811 self.assertEqual(orig_sharded_iids, self.sharded_iids)
812
813 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
814 self.sharded_iids, [], 1)
815 self.assertEqual(0, num_trimmed)
816 self.assertEqual(orig_sharded_iids, self.sharded_iids)
817
818 def testTrimShardedIIDs_Normal(self):
819 """The first 3 samples contribute all needed IIDs, so trim off the rest."""
820 samples = [30, 41, 62, 40, 81]
821 num_trimmed = frontendsearchpipeline._TrimEndShardedIIDs(
822 self.sharded_iids, samples, 5)
823 self.assertEqual(2 + 1 + 0 + 0, num_trimmed)
824 self.assertEqual(
825 { # shard_id: iids before lower-bound + iids before 1st excess sample.
826 0: [10, 20] + [30],
827 1: [21] + [41, 61],
828 2: [42, 52] + [62, 72, 102],
829 3: [] + []},
830 self.sharded_iids)
831
832 def testCalcSamplePositions_Empty(self):
833 sharded_iids = {0: []}
834 samples = []
835 self.assertEqual(
836 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
837
838 sharded_iids = {0: [10, 20, 30, 40]}
839 samples = []
840 self.assertEqual(
841 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
842
843 sharded_iids = {0: []}
844 # E.g., the IIDs 2 and 4 might have been trimmed out in the forward phase.
845 # But we still have them in the list for the backwards phase, and they
846 # should just not contribute anything to the result.
847 samples = [2, 4]
848 self.assertEqual(
849 [], frontendsearchpipeline._CalcSamplePositions(sharded_iids, samples))
850
851 def testCalcSamplePositions_Normal(self):
852 samples = [30, 41, 62, 40, 81]
853 self.assertEqual(
854 [(30, 2), (41, 1), (62, 2), (40, 3), (81, 3)],
855 frontendsearchpipeline._CalcSamplePositions(self.sharded_iids, samples))
856
857
858 if __name__ == '__main__':
859 unittest.main()
OLDNEW
« no previous file with comments | « appengine/monorail/search/test/backendsearchpipeline_test.py ('k') | appengine/monorail/search/test/query2ast_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698