Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1191)

Unified Diff: chrome/browser/history/history_querying_unittest.cc

Issue 235863023: Eliminate the archived history database and clean up related code. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Rename a few outstanding instances of "archived" to "expired". Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/browser/history/history_querying_unittest.cc
diff --git a/chrome/browser/history/history_querying_unittest.cc b/chrome/browser/history/history_querying_unittest.cc
index 123eb206b8608f69ce891e4fb52b6e0a8b4be789..af6577584aaf73829ca4c706994a61f219e95d8e 100644
--- a/chrome/browser/history/history_querying_unittest.cc
+++ b/chrome/browser/history/history_querying_unittest.cc
@@ -383,66 +383,6 @@ TEST_F(HistoryQueryTest, TextSearchCount) {
EXPECT_TRUE(NthResultIs(results, 0, 3));
}
-// Tests that text search queries can find URLs when they exist only in the
-// archived database. This also tests that imported URLs can be found, since
-// we use AddPageWithDetails just like the importer.
-TEST_F(HistoryQueryTest, TextSearchArchived) {
- ASSERT_TRUE(history_.get());
-
- URLRows urls_to_add;
-
- URLRow row1(GURL("http://foo.bar/"));
- row1.set_title(base::UTF8ToUTF16("archived title same"));
- row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365));
- urls_to_add.push_back(row1);
-
- URLRow row2(GURL("http://foo.bar/"));
- row2.set_title(base::UTF8ToUTF16("nonarchived title same"));
- row2.set_last_visit(Time::Now());
- urls_to_add.push_back(row2);
-
- history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED);
-
- QueryOptions options;
- QueryResults results;
-
- // Query all time. The title we get should be the one in the archived and
- // not the most current title (since otherwise highlighting in
- // the title might be wrong).
- QueryHistory("archived", options, &results);
- ASSERT_EQ(1U, results.size());
- EXPECT_TRUE(row1.url() == results[0].url());
- EXPECT_TRUE(row1.title() == results[0].title());
-
- // Check query is ordered correctly when split between archived and
- // non-archived database.
- QueryHistory("same", options, &results);
- ASSERT_EQ(2U, results.size());
- EXPECT_TRUE(row2.url() == results[0].url());
- EXPECT_TRUE(row2.title() == results[0].title());
- EXPECT_TRUE(row1.url() == results[1].url());
- EXPECT_TRUE(row1.title() == results[1].title());
-}
-
-/* TODO(brettw) re-enable this. It is commented out because the current history
- code prohibits adding more than one indexed page with the same URL. When we
- have tiered history, there could be a dupe in the archived history which
- won't get picked up by the deletor and it can happen again. When this is the
- case, we should fix this test to duplicate that situation.
-
-// Tests duplicate collapsing and not in text search situations.
-TEST_F(HistoryQueryTest, TextSearchDupes) {
- ASSERT_TRUE(history_.get());
-
- QueryOptions options;
- QueryResults results;
-
- QueryHistory("Other", options, &results);
- EXPECT_EQ(1U, results.size());
- EXPECT_TRUE(NthResultIs(results, 0, 4));
-}
-*/
-
// Tests IDN text search by both ASCII and UTF.
TEST_F(HistoryQueryTest, TextSearchIDN) {
ASSERT_TRUE(history_.get());

Powered by Google App Engine
This is Rietveld 408576698