Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(25)

Side by Side Diff: chrome/browser/history/history_querying_unittest.cc

Issue 16776004: Replace FTS in the history_service with a brute force text search. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Address Brett's comments. Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "base/basictypes.h" 5 #include "base/basictypes.h"
6 #include "base/bind.h" 6 #include "base/bind.h"
7 #include "base/bind_helpers.h" 7 #include "base/bind_helpers.h"
8 #include "base/file_util.h" 8 #include "base/file_util.h"
9 #include "base/files/file_path.h" 9 #include "base/files/file_path.h"
10 #include "base/files/scoped_temp_dir.h" 10 #include "base/files/scoped_temp_dir.h"
11 #include "base/path_service.h" 11 #include "base/path_service.h"
12 #include "base/strings/utf_string_conversions.h" 12 #include "base/strings/utf_string_conversions.h"
13 #include "chrome/browser/history/history_service.h" 13 #include "chrome/browser/history/history_service.h"
14 #include "testing/gtest/include/gtest/gtest.h" 14 #include "testing/gtest/include/gtest/gtest.h"
15 15
16 using base::Time; 16 using base::Time;
17 using base::TimeDelta; 17 using base::TimeDelta;
18 18
19 // Tests the history service for querying functionality. 19 // Tests the history service for querying functionality.
20 20
21 namespace history { 21 namespace history {
22 22
23 namespace { 23 namespace {
24 24
25 struct TestEntry { 25 struct TestEntry {
26 const char* url; 26 const char* url;
27 const char* title; 27 const char* title;
28 const int days_ago; 28 const int days_ago;
29 const char* body;
30 Time time; // Filled by SetUp. 29 Time time; // Filled by SetUp.
31 } test_entries[] = { 30 } test_entries[] = {
32 // This one is visited super long ago so it will be in a different database 31 // This one is visited super long ago so it will be in a different database
33 // from the next appearance of it at the end. 32 // from the next appearance of it at the end.
34 {"http://example.com/", "Other", 180, "Other"}, 33 {"http://example.com/", "Other", 180},
35 34
36 // These are deliberately added out of chronological order. The history 35 // These are deliberately added out of chronological order. The history
37 // service should sort them by visit time when returning query results. 36 // service should sort them by visit time when returning query results.
38 // The correct index sort order is 4 2 3 1 7 6 5 0. 37 // The correct index sort order is 4 2 3 1 7 6 5 0.
39 {"http://www.google.com/1", "Title 1", 10, 38 {"http://www.google.com/1", "Title PAGEONE FOO some text", 10},
40 "PAGEONE FOO some body text"}, 39 {"http://www.google.com/3", "Title PAGETHREE BAR some hello world", 8},
41 {"http://www.google.com/3", "Title 3", 8, 40 {"http://www.google.com/2", "Title PAGETWO FOO some more blah blah blah", 9},
42 "PAGETHREE BAR some hello world for you"},
43 {"http://www.google.com/2", "Title 2", 9,
44 "PAGETWO FOO some more blah blah blah Title"},
45 41
46 // A more recent visit of the first one. 42 // A more recent visit of the first one.
47 {"http://example.com/", "Other", 6, "Other"}, 43 {"http://example.com/", "Other", 6},
48 44
49 {"http://www.google.com/6", "Title 6", 13, "I'm the second oldest"}, 45 {"http://www.google.com/6", "Title I'm the second oldest", 13},
50 {"http://www.google.com/4", "Title 4", 12, "four"}, 46 {"http://www.google.com/4", "Title four", 12},
51 {"http://www.google.com/5", "Title 5", 11, "five"}, 47 {"http://www.google.com/5", "Title five", 11},
52 }; 48 };
53 49
54 // Returns true if the nth result in the given results set matches. It will 50 // Returns true if the nth result in the given results set matches. It will
55 // return false on a non-match or if there aren't enough results. 51 // return false on a non-match or if there aren't enough results.
56 bool NthResultIs(const QueryResults& results, 52 bool NthResultIs(const QueryResults& results,
57 int n, // Result index to check. 53 int n, // Result index to check.
58 int test_entry_index) { // Index of test_entries to compare. 54 int test_entry_index) { // Index of test_entries to compare.
59 if (static_cast<int>(results.size()) <= n) 55 if (static_cast<int>(results.size()) <= n)
60 return false; 56 return false;
61 57
(...skipping 22 matching lines...) Expand all
84 history_->QueryHistory( 80 history_->QueryHistory(
85 UTF8ToUTF16(text_query), options, &consumer_, 81 UTF8ToUTF16(text_query), options, &consumer_,
86 base::Bind(&HistoryQueryTest::QueryHistoryComplete, 82 base::Bind(&HistoryQueryTest::QueryHistoryComplete,
87 base::Unretained(this))); 83 base::Unretained(this)));
88 // Will go until ...Complete calls Quit. 84 // Will go until ...Complete calls Quit.
89 base::MessageLoop::current()->Run(); 85 base::MessageLoop::current()->Run();
90 results->Swap(&last_query_results_); 86 results->Swap(&last_query_results_);
91 } 87 }
92 88
93 // Test paging through results, with a fixed number of results per page. 89 // Test paging through results, with a fixed number of results per page.
94 // Defined here so code can be shared for the FTS version and the non-FTS 90 // Defined here so code can be shared for the text search and the non-text
95 // version. 91 // seach versions.
96 void TestPaging(const std::string& query_text, 92 void TestPaging(const std::string& query_text,
97 const int* expected_results, 93 const int* expected_results,
98 int results_length) { 94 int results_length) {
99 ASSERT_TRUE(history_.get()); 95 ASSERT_TRUE(history_.get());
100 96
101 QueryOptions options; 97 QueryOptions options;
102 QueryResults results; 98 QueryResults results;
103 99
104 options.max_count = 1; 100 options.max_count = 1;
105 for (int i = 0; i < results_length; i++) { 101 for (int i = 0; i < results_length; i++) {
(...skipping 12 matching lines...) Expand all
118 for (int i = 0; i < results_length / 2; i++) { 114 for (int i = 0; i < results_length / 2; i++) {
119 SCOPED_TRACE(testing::Message() << "i = " << i); 115 SCOPED_TRACE(testing::Message() << "i = " << i);
120 QueryHistory(query_text, options, &results); 116 QueryHistory(query_text, options, &results);
121 ASSERT_EQ(2U, results.size()); 117 ASSERT_EQ(2U, results.size());
122 EXPECT_TRUE(NthResultIs(results, 0, expected_results[i * 2])); 118 EXPECT_TRUE(NthResultIs(results, 0, expected_results[i * 2]));
123 EXPECT_TRUE(NthResultIs(results, 1, expected_results[i * 2 + 1])); 119 EXPECT_TRUE(NthResultIs(results, 1, expected_results[i * 2 + 1]));
124 options.end_time = results.back().visit_time(); 120 options.end_time = results.back().visit_time();
125 } 121 }
126 122
127 // Add a couple of entries with duplicate timestamps. Use |query_text| as 123 // Add a couple of entries with duplicate timestamps. Use |query_text| as
128 // the body of both entries so that they match a full-text query. 124 // the title of both entries so that they match a text query.
129 TestEntry duplicates[] = { 125 TestEntry duplicates[] = {
130 { "http://www.google.com/x", "", 1, query_text.c_str() }, 126 { "http://www.google.com/x", query_text.c_str(), 1, },
131 { "http://www.google.com/y", "", 1, query_text.c_str() } 127 { "http://www.google.com/y", query_text.c_str(), 1, }
132 }; 128 };
133 AddEntryToHistory(duplicates[0]); 129 AddEntryToHistory(duplicates[0]);
134 AddEntryToHistory(duplicates[1]); 130 AddEntryToHistory(duplicates[1]);
135 131
136 // Make sure that paging proceeds even if there are duplicate timestamps. 132 // Make sure that paging proceeds even if there are duplicate timestamps.
137 options.end_time = base::Time(); 133 options.end_time = base::Time();
138 do { 134 do {
139 QueryHistory(query_text, options, &results); 135 QueryHistory(query_text, options, &results);
140 ASSERT_NE(options.end_time, results.back().visit_time()); 136 ASSERT_NE(options.end_time, results.back().visit_time());
141 options.end_time = results.back().visit_time(); 137 options.end_time = results.back().visit_time();
142 } while (!results.reached_beginning()); 138 } while (!results.reached_beginning());
143 } 139 }
144 140
145 protected: 141 protected:
146 scoped_ptr<HistoryService> history_; 142 scoped_ptr<HistoryService> history_;
147 143
148 // Counter used to generate a unique ID for each page added to the history. 144 // Counter used to generate a unique ID for each page added to the history.
149 int32 page_id_; 145 int32 page_id_;
150 146
151 void AddEntryToHistory(const TestEntry& entry) { 147 void AddEntryToHistory(const TestEntry& entry) {
152 // We need the ID scope and page ID so that the visit tracker can find it. 148 // We need the ID scope and page ID so that the visit tracker can find it.
153 const void* id_scope = reinterpret_cast<void*>(1); 149 const void* id_scope = reinterpret_cast<void*>(1);
154 GURL url(entry.url); 150 GURL url(entry.url);
155 151
156 history_->AddPage(url, entry.time, id_scope, page_id_++, GURL(), 152 history_->AddPage(url, entry.time, id_scope, page_id_++, GURL(),
157 history::RedirectList(), content::PAGE_TRANSITION_LINK, 153 history::RedirectList(), content::PAGE_TRANSITION_LINK,
158 history::SOURCE_BROWSED, false); 154 history::SOURCE_BROWSED, false);
159 history_->SetPageTitle(url, UTF8ToUTF16(entry.title)); 155 history_->SetPageTitle(url, UTF8ToUTF16(entry.title));
160 history_->SetPageContents(url, UTF8ToUTF16(entry.body));
161 } 156 }
162 157
163 private: 158 private:
164 virtual void SetUp() { 159 virtual void SetUp() {
165 ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); 160 ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
166 history_dir_ = temp_dir_.path().AppendASCII("HistoryTest"); 161 history_dir_ = temp_dir_.path().AppendASCII("HistoryTest");
167 ASSERT_TRUE(file_util::CreateDirectory(history_dir_)); 162 ASSERT_TRUE(file_util::CreateDirectory(history_dir_));
168 163
169 history_.reset(new HistoryService); 164 history_.reset(new HistoryService);
170 if (!history_->Init(history_dir_, NULL)) { 165 if (!history_->Init(history_dir_, NULL)) {
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after
306 EXPECT_TRUE(results.reached_beginning()); 301 EXPECT_TRUE(results.reached_beginning());
307 options.max_count = results.size(); 302 options.max_count = results.size();
308 QueryHistory(std::string(), options, &results); 303 QueryHistory(std::string(), options, &results);
309 EXPECT_TRUE(results.reached_beginning()); 304 EXPECT_TRUE(results.reached_beginning());
310 305
311 options.max_count = 100; 306 options.max_count = 100;
312 QueryHistory("some", options, &results); 307 QueryHistory("some", options, &results);
313 EXPECT_TRUE(results.reached_beginning()); 308 EXPECT_TRUE(results.reached_beginning());
314 options.max_count = results.size(); 309 options.max_count = results.size();
315 QueryHistory("some", options, &results); 310 QueryHistory("some", options, &results);
316 // Since the query didn't cover the oldest visit in the database, we 311 EXPECT_TRUE(results.reached_beginning());
Scott Hess - ex-Googler 2013/06/20 19:50:22 I think the previous EXPECT_FALSE() case was tryin
rmcilroy 2013/06/20 21:48:08 I _think_ this was not reaching the oldest in this
Scott Hess - ex-Googler 2013/06/20 22:18:31 <no-idea-dog/>, but it sounds like you're on top o
317 // expect false here.
318 EXPECT_FALSE(results.reached_beginning());
319 } 312 }
320 313
321 // This does most of the same tests above, but searches for a FTS string that 314 // This does most of the same tests above, but performs a text searches for a
322 // will match the pages in question. This will trigger a different code path. 315 // string that will match the pages in question. This will trigger a
323 TEST_F(HistoryQueryTest, FTS) { 316 // different code path.
317 TEST_F(HistoryQueryTest, TextSearch) {
324 ASSERT_TRUE(history_.get()); 318 ASSERT_TRUE(history_.get());
325 319
326 QueryOptions options; 320 QueryOptions options;
327 QueryResults results; 321 QueryResults results;
328 322
329 // Query all of them to make sure they are there and in order. Note that 323 // Query all of them to make sure they are there and in order. Note that
330 // this query will return the starred item twice since we requested all 324 // this query will return the starred item twice since we requested all
331 // starred entries and no de-duping. 325 // starred entries and no de-duping.
332 QueryHistory("some", options, &results); 326 QueryHistory("some", options, &results);
333 EXPECT_EQ(3U, results.size()); 327 EXPECT_EQ(3U, results.size());
334 EXPECT_TRUE(NthResultIs(results, 0, 2)); 328 EXPECT_TRUE(NthResultIs(results, 0, 2));
335 EXPECT_TRUE(NthResultIs(results, 1, 3)); 329 EXPECT_TRUE(NthResultIs(results, 1, 3));
336 EXPECT_TRUE(NthResultIs(results, 2, 1)); 330 EXPECT_TRUE(NthResultIs(results, 2, 1));
337 331
338 // Do a query that should only match one of them. 332 // Do a query that should only match one of them.
339 QueryHistory("PAGETWO", options, &results); 333 QueryHistory("PAGETWO", options, &results);
340 EXPECT_EQ(1U, results.size()); 334 EXPECT_EQ(1U, results.size());
341 EXPECT_TRUE(NthResultIs(results, 0, 3)); 335 EXPECT_TRUE(NthResultIs(results, 0, 3));
342 336
343 // Next query a time range. The beginning should be inclusive, the ending 337 // Next query a time range. The beginning should be inclusive, the ending
344 // should be exclusive. 338 // should be exclusive.
345 options.begin_time = test_entries[1].time; 339 options.begin_time = test_entries[1].time;
346 options.end_time = test_entries[3].time; 340 options.end_time = test_entries[3].time;
347 QueryHistory("some", options, &results); 341 QueryHistory("some", options, &results);
348 EXPECT_EQ(1U, results.size()); 342 EXPECT_EQ(1U, results.size());
349 EXPECT_TRUE(NthResultIs(results, 0, 1)); 343 EXPECT_TRUE(NthResultIs(results, 0, 1));
350 } 344 }
351 345
352 // Searches titles. 346 // Tests prefix searching for text search queries.
353 TEST_F(HistoryQueryTest, FTSTitle) { 347 TEST_F(HistoryQueryTest, TextSearchPrefix) {
354 ASSERT_TRUE(history_.get()); 348 ASSERT_TRUE(history_.get());
355 349
356 QueryOptions options; 350 QueryOptions options;
357 QueryResults results;
358
359 // First execute a body-only query, to ensure that it works and that that
360 // version of the statement is not cached for the next query.
361 options.body_only = true;
Scott Hess - ex-Googler 2013/06/20 19:50:22 Is |body_only| still present as an option?
rmcilroy 2013/06/20 21:48:08 I will remove it in the followup CL which removes
Scott Hess - ex-Googler 2013/06/20 22:18:31 OK. Mostly wanted to make sure there wasn't an al
362 QueryHistory("Title", options, &results);
363 EXPECT_EQ(1U, results.size());
364 EXPECT_TRUE(NthResultIs(results, 0, 3));
365 options.body_only = false;
366
367 // Query all time but with a limit on the number of entries. We should
368 // get the N most recent entries.
369 options.max_count = 3;
370 QueryHistory("title", options, &results);
371 EXPECT_EQ(3U, results.size());
372 EXPECT_TRUE(NthResultIs(results, 0, 2));
373 EXPECT_TRUE(NthResultIs(results, 1, 3));
374 EXPECT_TRUE(NthResultIs(results, 2, 1));
375 }
376
377 // Tests prefix searching for Full Text Search queries.
378 TEST_F(HistoryQueryTest, FTSPrefix) {
379 ASSERT_TRUE(history_.get());
380
381 QueryOptions options;
382 QueryResults results; 351 QueryResults results;
383 352
384 // Query with a prefix search. Should return matches for "PAGETWO" and 353 // Query with a prefix search. Should return matches for "PAGETWO" and
385 // "PAGETHREE". 354 // "PAGETHREE".
386 QueryHistory("PAGET", options, &results); 355 QueryHistory("PAGET", options, &results);
387 EXPECT_EQ(2U, results.size()); 356 EXPECT_EQ(2U, results.size());
388 EXPECT_TRUE(NthResultIs(results, 0, 2)); 357 EXPECT_TRUE(NthResultIs(results, 0, 2));
389 EXPECT_TRUE(NthResultIs(results, 1, 3)); 358 EXPECT_TRUE(NthResultIs(results, 1, 3));
390 } 359 }
391 360
392 // Tests max_count feature for Full Text Search queries. 361 // Tests max_count feature for text search queries.
393 TEST_F(HistoryQueryTest, FTSCount) { 362 TEST_F(HistoryQueryTest, TextSearchCount) {
394 ASSERT_TRUE(history_.get()); 363 ASSERT_TRUE(history_.get());
395 364
396 QueryOptions options; 365 QueryOptions options;
397 QueryResults results; 366 QueryResults results;
398 367
399 // Query all time but with a limit on the number of entries. We should 368 // Query all time but with a limit on the number of entries. We should
400 // get the N most recent entries. 369 // get the N most recent entries.
401 options.max_count = 2; 370 options.max_count = 2;
402 QueryHistory("some", options, &results); 371 QueryHistory("some", options, &results);
403 EXPECT_EQ(2U, results.size()); 372 EXPECT_EQ(2U, results.size());
404 EXPECT_TRUE(NthResultIs(results, 0, 2)); 373 EXPECT_TRUE(NthResultIs(results, 0, 2));
405 EXPECT_TRUE(NthResultIs(results, 1, 3)); 374 EXPECT_TRUE(NthResultIs(results, 1, 3));
406 375
407 // Now query a subset of the pages and limit by N items. "FOO" should match 376 // Now query a subset of the pages and limit by N items. "FOO" should match
408 // the 2nd & 3rd pages, but we should only get the 3rd one because of the one 377 // the 2nd & 3rd pages, but we should only get the 3rd one because of the one
409 // page max restriction. 378 // page max restriction.
410 options.max_count = 1; 379 options.max_count = 1;
411 QueryHistory("FOO", options, &results); 380 QueryHistory("FOO", options, &results);
412 EXPECT_EQ(1U, results.size()); 381 EXPECT_EQ(1U, results.size());
413 EXPECT_TRUE(NthResultIs(results, 0, 3)); 382 EXPECT_TRUE(NthResultIs(results, 0, 3));
414 } 383 }
415 384
416 // Tests that FTS queries can find URLs when they exist only in the archived 385 // Tests that text search queries can find URLs when they exist only in the
417 // database. This also tests that imported URLs can be found, since we use 386 // archived database. This also tests that imported URLs can be found, since
418 // AddPageWithDetails just like the importer. 387 // we use AddPageWithDetails just like the importer.
419 TEST_F(HistoryQueryTest, FTSArchived) { 388 TEST_F(HistoryQueryTest, TextSearchArchived) {
420 ASSERT_TRUE(history_.get()); 389 ASSERT_TRUE(history_.get());
421 390
422 URLRows urls_to_add; 391 URLRows urls_to_add;
423 392
424 URLRow row1(GURL("http://foo.bar/")); 393 URLRow row1(GURL("http://foo.bar/"));
425 row1.set_title(UTF8ToUTF16("archived title")); 394 row1.set_title(UTF8ToUTF16("archived title same"));
426 row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365)); 395 row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365));
427 urls_to_add.push_back(row1); 396 urls_to_add.push_back(row1);
428 397
429 URLRow row2(GURL("http://foo.bar/")); 398 URLRow row2(GURL("http://foo.bar/"));
430 row2.set_title(UTF8ToUTF16("nonarchived title")); 399 row2.set_title(UTF8ToUTF16("nonarchived title same"));
431 row2.set_last_visit(Time::Now()); 400 row2.set_last_visit(Time::Now());
432 urls_to_add.push_back(row2); 401 urls_to_add.push_back(row2);
433 402
434 history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED); 403 history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED);
435 404
436 QueryOptions options; 405 QueryOptions options;
437 QueryResults results; 406 QueryResults results;
438 407
439 // Query all time. The title we get should be the one in the full text 408 // Query all time. The title we get should be the one in the archived and
440 // database and not the most current title (since otherwise highlighting in 409 // not the most current title (since otherwise highlighting in
441 // the title might be wrong). 410 // the title might be wrong).
442 QueryHistory("archived", options, &results); 411 QueryHistory("archived", options, &results);
443 ASSERT_EQ(1U, results.size()); 412 ASSERT_EQ(1U, results.size());
444 EXPECT_TRUE(row1.url() == results[0].url()); 413 EXPECT_TRUE(row1.url() == results[0].url());
445 EXPECT_TRUE(row1.title() == results[0].title()); 414 EXPECT_TRUE(row1.title() == results[0].title());
415
416 // Check query is ordered correctly when split between archived and
417 // non-archived database.
418 QueryHistory("same", options, &results);
Scott Hess - ex-Googler 2013/06/20 19:50:22 Or "title".
rmcilroy 2013/06/20 21:48:08 "title" also finds the other results which contain
Scott Hess - ex-Googler 2013/06/20 22:18:31 In that case, nevermind my point.
419 ASSERT_EQ(2U, results.size());
420 EXPECT_TRUE(row2.url() == results[0].url());
421 EXPECT_TRUE(row2.title() == results[0].title());
422 EXPECT_TRUE(row1.url() == results[1].url());
423 EXPECT_TRUE(row1.title() == results[1].title());
446 } 424 }
447 425
448 /* TODO(brettw) re-enable this. It is commented out because the current history 426 /* TODO(brettw) re-enable this. It is commented out because the current history
449 code prohibits adding more than one indexed page with the same URL. When we 427 code prohibits adding more than one indexed page with the same URL. When we
450 have tiered history, there could be a dupe in the archived history which 428 have tiered history, there could be a dupe in the archived history which
451 won't get picked up by the deletor and it can happen again. When this is the 429 won't get picked up by the deletor and it can happen again. When this is the
452 case, we should fix this test to duplicate that situation. 430 case, we should fix this test to duplicate that situation.
453 431
454 // Tests duplicate collapsing and not in Full Text Search situations. 432 // Tests duplicate collapsing and not in text search situations.
455 TEST_F(HistoryQueryTest, FTSDupes) { 433 TEST_F(HistoryQueryTest, TextSearchDupes) {
456 ASSERT_TRUE(history_.get()); 434 ASSERT_TRUE(history_.get());
457 435
458 QueryOptions options; 436 QueryOptions options;
459 QueryResults results; 437 QueryResults results;
460 438
461 QueryHistory("Other", options, &results); 439 QueryHistory("Other", options, &results);
462 EXPECT_EQ(1, results.urls().size()); 440 EXPECT_EQ(1U, results.size());
463 EXPECT_TRUE(NthResultIs(results, 0, 4)); 441 EXPECT_TRUE(NthResultIs(results, 0, 4));
464 } 442 }
465 */ 443 */
466 444
467 // Test iterating over pages of results. 445 // Test iterating over pages of results.
468 TEST_F(HistoryQueryTest, Paging) { 446 TEST_F(HistoryQueryTest, Paging) {
469 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not 447 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not
470 // be de-duplicated. 448 // be de-duplicated.
471 int expected_results[] = { 4, 2, 3, 1, 7, 6, 5, 0 }; 449 int expected_results[] = { 4, 2, 3, 1, 7, 6, 5, 0 };
472 TestPaging(std::string(), expected_results, arraysize(expected_results)); 450 TestPaging(std::string(), expected_results, arraysize(expected_results));
473 } 451 }
474 452
475 TEST_F(HistoryQueryTest, FTSPaging) { 453 TEST_F(HistoryQueryTest, TextSearchPaging) {
476 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not 454 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not
477 // be de-duplicated. Entry #4 does not contain the text "title", so it 455 // be de-duplicated. Entry #4 does not contain the text "title", so it
478 // shouldn't appear. 456 // shouldn't appear.
479 int expected_results[] = { 2, 3, 1, 7, 6, 5 }; 457 int expected_results[] = { 2, 3, 1, 7, 6, 5 };
480 TestPaging("title", expected_results, arraysize(expected_results)); 458 TestPaging("title", expected_results, arraysize(expected_results));
481 } 459 }
482 460
483 } // namespace history 461 } // namespace history
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698