OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "base/files/file_path.h" | |
6 #include "base/files/scoped_temp_dir.h" | |
7 #include "base/path_service.h" | |
8 #include "base/strings/string_util.h" | |
9 #include "base/strings/utf_string_conversions.h" | |
10 #include "chrome/browser/history/url_database.h" | |
11 #include "sql/connection.h" | |
12 #include "testing/gtest/include/gtest/gtest.h" | |
13 | |
14 using base::Time; | |
15 using base::TimeDelta; | |
16 | |
17 namespace history { | |
18 | |
19 namespace { | |
20 | |
21 bool IsURLRowEqual(const URLRow& a, | |
22 const URLRow& b) { | |
23 // TODO(brettw) when the database stores an actual Time value rather than | |
24 // a time_t, do a reaul comparison. Instead, we have to do a more rough | |
25 // comparison since the conversion reduces the precision. | |
26 return a.title() == b.title() && | |
27 a.visit_count() == b.visit_count() && | |
28 a.typed_count() == b.typed_count() && | |
29 a.last_visit() - b.last_visit() <= TimeDelta::FromSeconds(1) && | |
30 a.hidden() == b.hidden(); | |
31 } | |
32 | |
33 } // namespace | |
34 | |
35 class URLDatabaseTest : public testing::Test, | |
36 public URLDatabase { | |
37 public: | |
38 URLDatabaseTest() { | |
39 } | |
40 | |
41 protected: | |
42 // Provided for URL/VisitDatabase. | |
43 virtual sql::Connection& GetDB() OVERRIDE { | |
44 return db_; | |
45 } | |
46 | |
47 private: | |
48 // Test setup. | |
49 virtual void SetUp() { | |
50 ASSERT_TRUE(temp_dir_.CreateUniqueTempDir()); | |
51 base::FilePath db_file = temp_dir_.path().AppendASCII("URLTest.db"); | |
52 | |
53 EXPECT_TRUE(db_.Open(db_file)); | |
54 | |
55 // Initialize the tables for this test. | |
56 CreateURLTable(false); | |
57 CreateMainURLIndex(); | |
58 InitKeywordSearchTermsTable(); | |
59 CreateKeywordSearchTermsIndices(); | |
60 } | |
61 virtual void TearDown() { | |
62 db_.Close(); | |
63 } | |
64 | |
65 base::ScopedTempDir temp_dir_; | |
66 sql::Connection db_; | |
67 }; | |
68 | |
69 // Test add, update, upsert, and query for the URL table in the HistoryDatabase. | |
70 TEST_F(URLDatabaseTest, AddAndUpdateURL) { | |
71 // First, add two URLs. | |
72 const GURL url1("http://www.google.com/"); | |
73 URLRow url_info1(url1); | |
74 url_info1.set_title(base::UTF8ToUTF16("Google")); | |
75 url_info1.set_visit_count(4); | |
76 url_info1.set_typed_count(2); | |
77 url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
78 url_info1.set_hidden(false); | |
79 URLID id1_initially = AddURL(url_info1); | |
80 EXPECT_TRUE(id1_initially); | |
81 | |
82 const GURL url2("http://mail.google.com/"); | |
83 URLRow url_info2(url2); | |
84 url_info2.set_title(base::UTF8ToUTF16("Google Mail")); | |
85 url_info2.set_visit_count(3); | |
86 url_info2.set_typed_count(0); | |
87 url_info2.set_last_visit(Time::Now() - TimeDelta::FromDays(2)); | |
88 url_info2.set_hidden(true); | |
89 EXPECT_TRUE(AddURL(url_info2)); | |
90 | |
91 // Query both of them. | |
92 URLRow info; | |
93 EXPECT_TRUE(GetRowForURL(url1, &info)); | |
94 EXPECT_TRUE(IsURLRowEqual(url_info1, info)); | |
95 URLID id2 = GetRowForURL(url2, &info); | |
96 EXPECT_TRUE(id2); | |
97 EXPECT_TRUE(IsURLRowEqual(url_info2, info)); | |
98 | |
99 // Update the second. | |
100 url_info2.set_title(base::UTF8ToUTF16("Google Mail Too")); | |
101 url_info2.set_visit_count(4); | |
102 url_info2.set_typed_count(1); | |
103 url_info2.set_typed_count(91011); | |
104 url_info2.set_hidden(false); | |
105 EXPECT_TRUE(UpdateURLRow(id2, url_info2)); | |
106 | |
107 // Make sure it got updated. | |
108 URLRow info2; | |
109 EXPECT_TRUE(GetRowForURL(url2, &info2)); | |
110 EXPECT_TRUE(IsURLRowEqual(url_info2, info2)); | |
111 | |
112 // Try updating a non-existing row. This should fail and have no effects. | |
113 const GURL url3("http://youtube.com/"); | |
114 URLRow url_info3(url3); | |
115 url_info3.set_id(42); | |
116 EXPECT_FALSE(UpdateURLRow(url_info3.id(), url_info3)); | |
117 EXPECT_EQ(0, GetRowForURL(url3, &info)); | |
118 | |
119 // Update an existing URL and insert a new one using the upsert operation. | |
120 url_info1.set_id(id1_initially); | |
121 url_info1.set_title(base::UTF8ToUTF16("Google Again!")); | |
122 url_info1.set_visit_count(5); | |
123 url_info1.set_typed_count(3); | |
124 url_info1.set_last_visit(Time::Now()); | |
125 url_info1.set_hidden(true); | |
126 EXPECT_TRUE(InsertOrUpdateURLRowByID(url_info1)); | |
127 | |
128 const GURL url4("http://maps.google.com/"); | |
129 URLRow url_info4(url4); | |
130 url_info4.set_id(43); | |
131 url_info4.set_title(base::UTF8ToUTF16("Google Maps")); | |
132 url_info4.set_visit_count(7); | |
133 url_info4.set_typed_count(6); | |
134 url_info4.set_last_visit(Time::Now() - TimeDelta::FromDays(3)); | |
135 url_info4.set_hidden(false); | |
136 EXPECT_TRUE(InsertOrUpdateURLRowByID(url_info4)); | |
137 | |
138 // Query both of these as well. | |
139 URLID id1 = GetRowForURL(url1, &info); | |
140 EXPECT_EQ(id1_initially, id1); | |
141 EXPECT_TRUE(IsURLRowEqual(url_info1, info)); | |
142 URLID id4 = GetRowForURL(url4, &info); | |
143 EXPECT_EQ(43, id4); | |
144 EXPECT_TRUE(IsURLRowEqual(url_info4, info)); | |
145 | |
146 // Query a nonexistent URL. | |
147 EXPECT_EQ(0, GetRowForURL(GURL("http://news.google.com/"), &info)); | |
148 | |
149 // Delete all urls in the domain. | |
150 // TODO(acw): test the new url based delete domain | |
151 // EXPECT_TRUE(db.DeleteDomain(kDomainID)); | |
152 | |
153 // Make sure the urls have been properly removed. | |
154 // TODO(acw): commented out because remove no longer works. | |
155 // EXPECT_TRUE(db.GetURLInfo(url1, NULL) == NULL); | |
156 // EXPECT_TRUE(db.GetURLInfo(url2, NULL) == NULL); | |
157 } | |
158 | |
159 // Tests adding, querying and deleting keyword visits. | |
160 TEST_F(URLDatabaseTest, KeywordSearchTermVisit) { | |
161 URLRow url_info1(GURL("http://www.google.com/")); | |
162 url_info1.set_title(base::UTF8ToUTF16("Google")); | |
163 url_info1.set_visit_count(4); | |
164 url_info1.set_typed_count(2); | |
165 url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
166 url_info1.set_hidden(false); | |
167 URLID url_id = AddURL(url_info1); | |
168 ASSERT_NE(0, url_id); | |
169 | |
170 // Add a keyword visit. | |
171 KeywordID keyword_id = 100; | |
172 base::string16 keyword = base::UTF8ToUTF16("visit"); | |
173 ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id, keyword_id, keyword)); | |
174 | |
175 // Make sure we get it back. | |
176 std::vector<KeywordSearchTermVisit> matches; | |
177 GetMostRecentKeywordSearchTerms(keyword_id, keyword, 10, &matches); | |
178 ASSERT_EQ(1U, matches.size()); | |
179 ASSERT_EQ(keyword, matches[0].term); | |
180 | |
181 KeywordSearchTermRow keyword_search_term_row; | |
182 ASSERT_TRUE(GetKeywordSearchTermRow(url_id, &keyword_search_term_row)); | |
183 EXPECT_EQ(keyword_id, keyword_search_term_row.keyword_id); | |
184 EXPECT_EQ(url_id, keyword_search_term_row.url_id); | |
185 EXPECT_EQ(keyword, keyword_search_term_row.term); | |
186 | |
187 // Delete the keyword visit. | |
188 DeleteAllSearchTermsForKeyword(keyword_id); | |
189 | |
190 // Make sure we don't get it back when querying. | |
191 matches.clear(); | |
192 GetMostRecentKeywordSearchTerms(keyword_id, keyword, 10, &matches); | |
193 ASSERT_EQ(0U, matches.size()); | |
194 | |
195 ASSERT_FALSE(GetKeywordSearchTermRow(url_id, &keyword_search_term_row)); | |
196 } | |
197 | |
198 // Make sure deleting a URL also deletes a keyword visit. | |
199 TEST_F(URLDatabaseTest, DeleteURLDeletesKeywordSearchTermVisit) { | |
200 URLRow url_info1(GURL("http://www.google.com/")); | |
201 url_info1.set_title(base::UTF8ToUTF16("Google")); | |
202 url_info1.set_visit_count(4); | |
203 url_info1.set_typed_count(2); | |
204 url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
205 url_info1.set_hidden(false); | |
206 URLID url_id = AddURL(url_info1); | |
207 ASSERT_NE(0, url_id); | |
208 | |
209 // Add a keyword visit. | |
210 ASSERT_TRUE( | |
211 SetKeywordSearchTermsForURL(url_id, 1, base::UTF8ToUTF16("visit"))); | |
212 | |
213 // Delete the url. | |
214 ASSERT_TRUE(DeleteURLRow(url_id)); | |
215 | |
216 // Make sure the keyword visit was deleted. | |
217 std::vector<KeywordSearchTermVisit> matches; | |
218 GetMostRecentKeywordSearchTerms(1, base::UTF8ToUTF16("visit"), 10, &matches); | |
219 ASSERT_EQ(0U, matches.size()); | |
220 } | |
221 | |
222 TEST_F(URLDatabaseTest, EnumeratorForSignificant) { | |
223 std::set<std::string> good_urls; | |
224 // Add URLs which do and don't meet the criteria. | |
225 URLRow url_no_match(GURL("http://www.url_no_match.com/")); | |
226 EXPECT_TRUE(AddURL(url_no_match)); | |
227 | |
228 std::string url_string2("http://www.url_match_visit_count.com/"); | |
229 good_urls.insert("http://www.url_match_visit_count.com/"); | |
230 URLRow url_match_visit_count(GURL("http://www.url_match_visit_count.com/")); | |
231 url_match_visit_count.set_visit_count(kLowQualityMatchVisitLimit); | |
232 EXPECT_TRUE(AddURL(url_match_visit_count)); | |
233 | |
234 good_urls.insert("http://www.url_match_typed_count.com/"); | |
235 URLRow url_match_typed_count(GURL("http://www.url_match_typed_count.com/")); | |
236 url_match_typed_count.set_typed_count(kLowQualityMatchTypedLimit); | |
237 EXPECT_TRUE(AddURL(url_match_typed_count)); | |
238 | |
239 good_urls.insert("http://www.url_match_last_visit.com/"); | |
240 URLRow url_match_last_visit(GURL("http://www.url_match_last_visit.com/")); | |
241 url_match_last_visit.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
242 EXPECT_TRUE(AddURL(url_match_last_visit)); | |
243 | |
244 URLRow url_no_match_last_visit(GURL( | |
245 "http://www.url_no_match_last_visit.com/")); | |
246 url_no_match_last_visit.set_last_visit(Time::Now() - | |
247 TimeDelta::FromDays(kLowQualityMatchAgeLimitInDays + 1)); | |
248 EXPECT_TRUE(AddURL(url_no_match_last_visit)); | |
249 | |
250 URLDatabase::URLEnumerator history_enum; | |
251 EXPECT_TRUE(InitURLEnumeratorForSignificant(&history_enum)); | |
252 URLRow row; | |
253 int row_count = 0; | |
254 for (; history_enum.GetNextURL(&row); ++row_count) | |
255 EXPECT_EQ(1U, good_urls.count(row.url().spec())); | |
256 EXPECT_EQ(3, row_count); | |
257 } | |
258 | |
259 // Test GetKeywordSearchTermRows and DeleteSearchTerm | |
260 TEST_F(URLDatabaseTest, GetAndDeleteKeywordSearchTermByTerm) { | |
261 URLRow url_info1(GURL("http://www.google.com/")); | |
262 url_info1.set_title(base::UTF8ToUTF16("Google")); | |
263 url_info1.set_visit_count(4); | |
264 url_info1.set_typed_count(2); | |
265 url_info1.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
266 url_info1.set_hidden(false); | |
267 URLID url_id1 = AddURL(url_info1); | |
268 ASSERT_NE(0, url_id1); | |
269 | |
270 // Add a keyword visit. | |
271 KeywordID keyword_id = 100; | |
272 base::string16 keyword = base::UTF8ToUTF16("visit"); | |
273 ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id1, keyword_id, keyword)); | |
274 | |
275 URLRow url_info2(GURL("https://www.google.com/")); | |
276 url_info2.set_title(base::UTF8ToUTF16("Google")); | |
277 url_info2.set_visit_count(4); | |
278 url_info2.set_typed_count(2); | |
279 url_info2.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
280 url_info2.set_hidden(false); | |
281 URLID url_id2 = AddURL(url_info2); | |
282 ASSERT_NE(0, url_id2); | |
283 // Add the same keyword for url_info2. | |
284 ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id2, keyword_id, keyword)); | |
285 | |
286 // Add another URL for different keyword. | |
287 URLRow url_info3(GURL("https://www.google.com/search")); | |
288 url_info3.set_title(base::UTF8ToUTF16("Google")); | |
289 url_info3.set_visit_count(4); | |
290 url_info3.set_typed_count(2); | |
291 url_info3.set_last_visit(Time::Now() - TimeDelta::FromDays(1)); | |
292 url_info3.set_hidden(false); | |
293 URLID url_id3 = AddURL(url_info3); | |
294 ASSERT_NE(0, url_id3); | |
295 base::string16 keyword2 = base::UTF8ToUTF16("Search"); | |
296 | |
297 ASSERT_TRUE(SetKeywordSearchTermsForURL(url_id3, keyword_id, keyword2)); | |
298 | |
299 // We should get 2 rows for |keyword|. | |
300 std::vector<KeywordSearchTermRow> rows; | |
301 ASSERT_TRUE(GetKeywordSearchTermRows(keyword, &rows)); | |
302 ASSERT_EQ(2u, rows.size()); | |
303 if (rows[0].url_id == url_id1) { | |
304 EXPECT_EQ(keyword, rows[0].term); | |
305 EXPECT_EQ(keyword, rows[1].term); | |
306 EXPECT_EQ(url_id2, rows[1].url_id); | |
307 } else { | |
308 EXPECT_EQ(keyword, rows[0].term); | |
309 EXPECT_EQ(url_id1, rows[1].url_id); | |
310 EXPECT_EQ(keyword, rows[1].term); | |
311 EXPECT_EQ(url_id2, rows[0].url_id); | |
312 } | |
313 | |
314 // We should get 1 row for |keyword2|. | |
315 rows.clear(); | |
316 ASSERT_TRUE(GetKeywordSearchTermRows(keyword2, &rows)); | |
317 ASSERT_EQ(1u, rows.size()); | |
318 EXPECT_EQ(keyword2, rows[0].term); | |
319 EXPECT_EQ(url_id3, rows[0].url_id); | |
320 | |
321 // Delete all rows have keyword. | |
322 ASSERT_TRUE(DeleteKeywordSearchTerm(keyword)); | |
323 rows.clear(); | |
324 // We should still find keyword2. | |
325 ASSERT_TRUE(GetKeywordSearchTermRows(keyword2, &rows)); | |
326 ASSERT_EQ(1u, rows.size()); | |
327 EXPECT_EQ(keyword2, rows[0].term); | |
328 EXPECT_EQ(url_id3, rows[0].url_id); | |
329 rows.clear(); | |
330 // No row for keyword. | |
331 ASSERT_TRUE(GetKeywordSearchTermRows(keyword, &rows)); | |
332 EXPECT_TRUE(rows.empty()); | |
333 } | |
334 | |
335 } // namespace history | |
OLD | NEW |