Index: chrome/browser/history/history_querying_unittest.cc |
diff --git a/chrome/browser/history/history_querying_unittest.cc b/chrome/browser/history/history_querying_unittest.cc |
index 4719b90ea0eda9a9420f9d6c523f02625a497e59..aa69fc478c3e34a0632fd1b2cafe26c1ca68b11b 100644 |
--- a/chrome/browser/history/history_querying_unittest.cc |
+++ b/chrome/browser/history/history_querying_unittest.cc |
@@ -26,29 +26,25 @@ struct TestEntry { |
const char* url; |
const char* title; |
const int days_ago; |
- const char* body; |
Time time; // Filled by SetUp. |
} test_entries[] = { |
// This one is visited super long ago so it will be in a different database |
// from the next appearance of it at the end. |
- {"http://example.com/", "Other", 180, "Other"}, |
+ {"http://example.com/", "Other", 180}, |
// These are deliberately added out of chronological order. The history |
// service should sort them by visit time when returning query results. |
// The correct index sort order is 4 2 3 1 7 6 5 0. |
- {"http://www.google.com/1", "Title 1", 10, |
- "PAGEONE FOO some body text"}, |
- {"http://www.google.com/3", "Title 3", 8, |
- "PAGETHREE BAR some hello world for you"}, |
- {"http://www.google.com/2", "Title 2", 9, |
- "PAGETWO FOO some more blah blah blah Title"}, |
+ {"http://www.google.com/1", "Title PAGEONE FOO some text", 10}, |
+ {"http://www.google.com/3", "Title PAGETHREE BAR some hello world", 8}, |
+ {"http://www.google.com/2", "Title PAGETWO FOO some more blah blah blah", 9}, |
// A more recent visit of the first one. |
- {"http://example.com/", "Other", 6, "Other"}, |
+ {"http://example.com/", "Other", 6}, |
- {"http://www.google.com/6", "Title 6", 13, "I'm the second oldest"}, |
- {"http://www.google.com/4", "Title 4", 12, "four"}, |
- {"http://www.google.com/5", "Title 5", 11, "five"}, |
+ {"http://www.google.com/6", "Title I'm the second oldest", 13}, |
+ {"http://www.google.com/4", "Title four", 12}, |
+ {"http://www.google.com/5", "Title five", 11}, |
}; |
// Returns true if the nth result in the given results set matches. It will |
@@ -91,8 +87,8 @@ class HistoryQueryTest : public testing::Test { |
} |
// Test paging through results, with a fixed number of results per page. |
- // Defined here so code can be shared for the FTS version and the non-FTS |
- // version. |
+ // Defined here so code can be shared for the text search and the non-text |
+ // seach versions. |
void TestPaging(const std::string& query_text, |
const int* expected_results, |
int results_length) { |
@@ -125,10 +121,10 @@ class HistoryQueryTest : public testing::Test { |
} |
// Add a couple of entries with duplicate timestamps. Use |query_text| as |
- // the body of both entries so that they match a full-text query. |
+ // the title of both entries so that they match a text query. |
TestEntry duplicates[] = { |
- { "http://www.google.com/x", "", 1, query_text.c_str() }, |
- { "http://www.google.com/y", "", 1, query_text.c_str() } |
+ { "http://www.google.com/x", query_text.c_str(), 1, }, |
+ { "http://www.google.com/y", query_text.c_str(), 1, } |
}; |
AddEntryToHistory(duplicates[0]); |
AddEntryToHistory(duplicates[1]); |
@@ -157,7 +153,6 @@ class HistoryQueryTest : public testing::Test { |
history::RedirectList(), content::PAGE_TRANSITION_LINK, |
history::SOURCE_BROWSED, false); |
history_->SetPageTitle(url, UTF8ToUTF16(entry.title)); |
- history_->SetPageContents(url, UTF8ToUTF16(entry.body)); |
} |
private: |
@@ -313,14 +308,13 @@ TEST_F(HistoryQueryTest, ReachedBeginning) { |
EXPECT_TRUE(results.reached_beginning()); |
options.max_count = results.size(); |
QueryHistory("some", options, &results); |
- // Since the query didn't cover the oldest visit in the database, we |
- // expect false here. |
- EXPECT_FALSE(results.reached_beginning()); |
+ EXPECT_TRUE(results.reached_beginning()); |
Scott Hess - ex-Googler
2013/06/20 19:50:22
I think the previous EXPECT_FALSE() case was tryin
rmcilroy
2013/06/20 21:48:08
I _think_ this was not reaching the oldest in this
Scott Hess - ex-Googler
2013/06/20 22:18:31
<no-idea-dog/>, but it sounds like you're on top o
|
} |
-// This does most of the same tests above, but searches for a FTS string that |
-// will match the pages in question. This will trigger a different code path. |
-TEST_F(HistoryQueryTest, FTS) { |
+// This does most of the same tests above, but performs a text searches for a |
+// string that will match the pages in question. This will trigger a |
+// different code path. |
+TEST_F(HistoryQueryTest, TextSearch) { |
ASSERT_TRUE(history_.get()); |
QueryOptions options; |
@@ -349,33 +343,8 @@ TEST_F(HistoryQueryTest, FTS) { |
EXPECT_TRUE(NthResultIs(results, 0, 1)); |
} |
-// Searches titles. |
-TEST_F(HistoryQueryTest, FTSTitle) { |
- ASSERT_TRUE(history_.get()); |
- |
- QueryOptions options; |
- QueryResults results; |
- |
- // First execute a body-only query, to ensure that it works and that that |
- // version of the statement is not cached for the next query. |
- options.body_only = true; |
Scott Hess - ex-Googler
2013/06/20 19:50:22
Is |body_only| still present as an option?
rmcilroy
2013/06/20 21:48:08
I will remove it in the followup CL which removes
Scott Hess - ex-Googler
2013/06/20 22:18:31
OK. Mostly wanted to make sure there wasn't an al
|
- QueryHistory("Title", options, &results); |
- EXPECT_EQ(1U, results.size()); |
- EXPECT_TRUE(NthResultIs(results, 0, 3)); |
- options.body_only = false; |
- |
- // Query all time but with a limit on the number of entries. We should |
- // get the N most recent entries. |
- options.max_count = 3; |
- QueryHistory("title", options, &results); |
- EXPECT_EQ(3U, results.size()); |
- EXPECT_TRUE(NthResultIs(results, 0, 2)); |
- EXPECT_TRUE(NthResultIs(results, 1, 3)); |
- EXPECT_TRUE(NthResultIs(results, 2, 1)); |
-} |
- |
-// Tests prefix searching for Full Text Search queries. |
-TEST_F(HistoryQueryTest, FTSPrefix) { |
+// Tests prefix searching for text search queries. |
+TEST_F(HistoryQueryTest, TextSearchPrefix) { |
ASSERT_TRUE(history_.get()); |
QueryOptions options; |
@@ -389,8 +358,8 @@ TEST_F(HistoryQueryTest, FTSPrefix) { |
EXPECT_TRUE(NthResultIs(results, 1, 3)); |
} |
-// Tests max_count feature for Full Text Search queries. |
-TEST_F(HistoryQueryTest, FTSCount) { |
+// Tests max_count feature for text search queries. |
+TEST_F(HistoryQueryTest, TextSearchCount) { |
ASSERT_TRUE(history_.get()); |
QueryOptions options; |
@@ -413,21 +382,21 @@ TEST_F(HistoryQueryTest, FTSCount) { |
EXPECT_TRUE(NthResultIs(results, 0, 3)); |
} |
-// Tests that FTS queries can find URLs when they exist only in the archived |
-// database. This also tests that imported URLs can be found, since we use |
-// AddPageWithDetails just like the importer. |
-TEST_F(HistoryQueryTest, FTSArchived) { |
+// Tests that text search queries can find URLs when they exist only in the |
+// archived database. This also tests that imported URLs can be found, since |
+// we use AddPageWithDetails just like the importer. |
+TEST_F(HistoryQueryTest, TextSearchArchived) { |
ASSERT_TRUE(history_.get()); |
URLRows urls_to_add; |
URLRow row1(GURL("http://foo.bar/")); |
- row1.set_title(UTF8ToUTF16("archived title")); |
+ row1.set_title(UTF8ToUTF16("archived title same")); |
row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365)); |
urls_to_add.push_back(row1); |
URLRow row2(GURL("http://foo.bar/")); |
- row2.set_title(UTF8ToUTF16("nonarchived title")); |
+ row2.set_title(UTF8ToUTF16("nonarchived title same")); |
row2.set_last_visit(Time::Now()); |
urls_to_add.push_back(row2); |
@@ -436,13 +405,22 @@ TEST_F(HistoryQueryTest, FTSArchived) { |
QueryOptions options; |
QueryResults results; |
- // Query all time. The title we get should be the one in the full text |
- // database and not the most current title (since otherwise highlighting in |
+ // Query all time. The title we get should be the one in the archived and |
+ // not the most current title (since otherwise highlighting in |
// the title might be wrong). |
QueryHistory("archived", options, &results); |
ASSERT_EQ(1U, results.size()); |
EXPECT_TRUE(row1.url() == results[0].url()); |
EXPECT_TRUE(row1.title() == results[0].title()); |
+ |
+ // Check query is ordered correctly when split between archived and |
+ // non-archived database. |
+ QueryHistory("same", options, &results); |
Scott Hess - ex-Googler
2013/06/20 19:50:22
Or "title".
rmcilroy
2013/06/20 21:48:08
"title" also finds the other results which contain
Scott Hess - ex-Googler
2013/06/20 22:18:31
In that case, nevermind my point.
|
+ ASSERT_EQ(2U, results.size()); |
+ EXPECT_TRUE(row2.url() == results[0].url()); |
+ EXPECT_TRUE(row2.title() == results[0].title()); |
+ EXPECT_TRUE(row1.url() == results[1].url()); |
+ EXPECT_TRUE(row1.title() == results[1].title()); |
} |
/* TODO(brettw) re-enable this. It is commented out because the current history |
@@ -451,15 +429,15 @@ TEST_F(HistoryQueryTest, FTSArchived) { |
won't get picked up by the deletor and it can happen again. When this is the |
case, we should fix this test to duplicate that situation. |
-// Tests duplicate collapsing and not in Full Text Search situations. |
-TEST_F(HistoryQueryTest, FTSDupes) { |
+// Tests duplicate collapsing and not in text search situations. |
+TEST_F(HistoryQueryTest, TextSearchDupes) { |
ASSERT_TRUE(history_.get()); |
QueryOptions options; |
QueryResults results; |
QueryHistory("Other", options, &results); |
- EXPECT_EQ(1, results.urls().size()); |
+ EXPECT_EQ(1U, results.size()); |
EXPECT_TRUE(NthResultIs(results, 0, 4)); |
} |
*/ |
@@ -472,7 +450,7 @@ TEST_F(HistoryQueryTest, Paging) { |
TestPaging(std::string(), expected_results, arraysize(expected_results)); |
} |
-TEST_F(HistoryQueryTest, FTSPaging) { |
+TEST_F(HistoryQueryTest, TextSearchPaging) { |
// Since results are fetched 1 and 2 at a time, entry #0 and #6 will not |
// be de-duplicated. Entry #4 does not contain the text "title", so it |
// shouldn't appear. |