| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "base/basictypes.h" | 5 #include "base/basictypes.h" |
| 6 #include "base/bind.h" | 6 #include "base/bind.h" |
| 7 #include "base/bind_helpers.h" | 7 #include "base/bind_helpers.h" |
| 8 #include "base/file_util.h" | 8 #include "base/file_util.h" |
| 9 #include "base/files/file_path.h" | 9 #include "base/files/file_path.h" |
| 10 #include "base/files/scoped_temp_dir.h" | 10 #include "base/files/scoped_temp_dir.h" |
| (...skipping 365 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 376 | 376 |
| 377 // Now query a subset of the pages and limit by N items. "FOO" should match | 377 // Now query a subset of the pages and limit by N items. "FOO" should match |
| 378 // the 2nd & 3rd pages, but we should only get the 3rd one because of the one | 378 // the 2nd & 3rd pages, but we should only get the 3rd one because of the one |
| 379 // page max restriction. | 379 // page max restriction. |
| 380 options.max_count = 1; | 380 options.max_count = 1; |
| 381 QueryHistory("FOO", options, &results); | 381 QueryHistory("FOO", options, &results); |
| 382 EXPECT_EQ(1U, results.size()); | 382 EXPECT_EQ(1U, results.size()); |
| 383 EXPECT_TRUE(NthResultIs(results, 0, 3)); | 383 EXPECT_TRUE(NthResultIs(results, 0, 3)); |
| 384 } | 384 } |
| 385 | 385 |
| 386 // Tests that text search queries can find URLs when they exist only in the | |
| 387 // archived database. This also tests that imported URLs can be found, since | |
| 388 // we use AddPageWithDetails just like the importer. | |
| 389 TEST_F(HistoryQueryTest, TextSearchArchived) { | |
| 390 ASSERT_TRUE(history_.get()); | |
| 391 | |
| 392 URLRows urls_to_add; | |
| 393 | |
| 394 URLRow row1(GURL("http://foo.bar/")); | |
| 395 row1.set_title(base::UTF8ToUTF16("archived title same")); | |
| 396 row1.set_last_visit(Time::Now() - TimeDelta::FromDays(365)); | |
| 397 urls_to_add.push_back(row1); | |
| 398 | |
| 399 URLRow row2(GURL("http://foo.bar/")); | |
| 400 row2.set_title(base::UTF8ToUTF16("nonarchived title same")); | |
| 401 row2.set_last_visit(Time::Now()); | |
| 402 urls_to_add.push_back(row2); | |
| 403 | |
| 404 history_->AddPagesWithDetails(urls_to_add, history::SOURCE_BROWSED); | |
| 405 | |
| 406 QueryOptions options; | |
| 407 QueryResults results; | |
| 408 | |
| 409 // Query all time. The title we get should be the one in the archived and | |
| 410 // not the most current title (since otherwise highlighting in | |
| 411 // the title might be wrong). | |
| 412 QueryHistory("archived", options, &results); | |
| 413 ASSERT_EQ(1U, results.size()); | |
| 414 EXPECT_TRUE(row1.url() == results[0].url()); | |
| 415 EXPECT_TRUE(row1.title() == results[0].title()); | |
| 416 | |
| 417 // Check query is ordered correctly when split between archived and | |
| 418 // non-archived database. | |
| 419 QueryHistory("same", options, &results); | |
| 420 ASSERT_EQ(2U, results.size()); | |
| 421 EXPECT_TRUE(row2.url() == results[0].url()); | |
| 422 EXPECT_TRUE(row2.title() == results[0].title()); | |
| 423 EXPECT_TRUE(row1.url() == results[1].url()); | |
| 424 EXPECT_TRUE(row1.title() == results[1].title()); | |
| 425 } | |
| 426 | |
| 427 /* TODO(brettw) re-enable this. It is commented out because the current history | |
| 428 code prohibits adding more than one indexed page with the same URL. When we | |
| 429 have tiered history, there could be a dupe in the archived history which | |
| 430 won't get picked up by the deletor and it can happen again. When this is the | |
| 431 case, we should fix this test to duplicate that situation. | |
| 432 | |
| 433 // Tests duplicate collapsing and not in text search situations. | |
| 434 TEST_F(HistoryQueryTest, TextSearchDupes) { | |
| 435 ASSERT_TRUE(history_.get()); | |
| 436 | |
| 437 QueryOptions options; | |
| 438 QueryResults results; | |
| 439 | |
| 440 QueryHistory("Other", options, &results); | |
| 441 EXPECT_EQ(1U, results.size()); | |
| 442 EXPECT_TRUE(NthResultIs(results, 0, 4)); | |
| 443 } | |
| 444 */ | |
| 445 | |
| 446 // Tests IDN text search by both ASCII and UTF. | 386 // Tests IDN text search by both ASCII and UTF. |
| 447 TEST_F(HistoryQueryTest, TextSearchIDN) { | 387 TEST_F(HistoryQueryTest, TextSearchIDN) { |
| 448 ASSERT_TRUE(history_.get()); | 388 ASSERT_TRUE(history_.get()); |
| 449 | 389 |
| 450 QueryOptions options; | 390 QueryOptions options; |
| 451 QueryResults results; | 391 QueryResults results; |
| 452 | 392 |
| 453 TestEntry entry = { "http://xn--d1abbgf6aiiy.xn--p1ai/", "Nothing", 0, }; | 393 TestEntry entry = { "http://xn--d1abbgf6aiiy.xn--p1ai/", "Nothing", 0, }; |
| 454 AddEntryToHistory(entry); | 394 AddEntryToHistory(entry); |
| 455 | 395 |
| (...skipping 23 matching lines...) Expand all Loading... |
| 479 | 419 |
| 480 TEST_F(HistoryQueryTest, TextSearchPaging) { | 420 TEST_F(HistoryQueryTest, TextSearchPaging) { |
| 481 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not | 421 // Since results are fetched 1 and 2 at a time, entry #0 and #6 will not |
| 482 // be de-duplicated. Entry #4 does not contain the text "title", so it | 422 // be de-duplicated. Entry #4 does not contain the text "title", so it |
| 483 // shouldn't appear. | 423 // shouldn't appear. |
| 484 int expected_results[] = { 2, 3, 1, 7, 6, 5 }; | 424 int expected_results[] = { 2, 3, 1, 7, 6, 5 }; |
| 485 TestPaging("title", expected_results, arraysize(expected_results)); | 425 TestPaging("title", expected_results, arraysize(expected_results)); |
| 486 } | 426 } |
| 487 | 427 |
| 488 } // namespace history | 428 } // namespace history |
| OLD | NEW |