OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "chrome/browser/autofill/autocheckout/whitelist_manager.h" | |
6 | |
7 #include "base/command_line.h" | |
8 #include "base/logging.h" | |
9 #include "base/memory/scoped_ptr.h" | |
10 #include "base/string_split.h" | |
11 #include "base/supports_user_data.h" | |
12 #include "chrome/browser/autofill/autocheckout/whitelist_url.h" | |
13 #include "chrome/common/chrome_switches.h" | |
14 #include "content/public/browser/browser_context.h" | |
15 #include "googleurl/src/gurl.h" | |
16 #include "net/http/http_status_code.h" | |
17 #include "net/url_request/url_fetcher.h" | |
18 #include "net/url_request/url_request_context_getter.h" | |
19 | |
20 namespace { | |
21 | |
22 // Back off in seconds after each whitelist download is attempted. | |
23 const int kWhitelistDownloadBackoffDelay = 86400; | |
ahutter
2013/01/19 02:07:16
Can you add a readable time here? Like 1 day or s
benquan
2013/01/23 23:50:53
added comment here.
| |
24 | |
25 const char kWhiteListKeyName[] = "autocheckout_whitelist_manager"; | |
ahutter
2013/01/19 02:07:16
new line.
benquan
2013/01/23 23:50:53
Done.
| |
26 } // namespace | |
27 | |
28 | |
29 namespace autocheckout { | |
30 | |
31 // static | |
32 WhitelistManager* WhitelistManager::GetForBrowserContext( | |
33 content::BrowserContext* context) { | |
34 WhitelistManager* wm = static_cast<WhitelistManager*>( | |
35 context->GetUserData(kWhiteListKeyName)); | |
36 if (!wm) { | |
37 wm = new WhitelistManager(context->GetRequestContext()); | |
38 context->SetUserData(kWhiteListKeyName, wm); | |
39 } | |
40 return wm; | |
41 } | |
42 | |
43 // static | |
44 void WhitelistManager::RemoveFromBrowserContext( | |
45 content::BrowserContext* context) { | |
46 context->RemoveUserData(kWhiteListKeyName); | |
47 } | |
48 | |
49 WhitelistManager::WhitelistManager( | |
50 net::URLRequestContextGetter* context_getter) | |
51 : context_getter_(context_getter), | |
52 next_query_request_(base::Time::Now()), | |
53 experimental_form_filling_enabled_( | |
54 CommandLine::ForCurrentProcess()->HasSwitch( | |
55 switches::kEnableExperimentalFormFilling)) { | |
56 } | |
57 | |
58 bool WhitelistManager::DownloadWhitelist() { | |
59 if (!experimental_form_filling_enabled_) { | |
60 // The feature is not enabled: do not do the request. | |
ahutter
2013/01/19 02:07:16
did you mean ";" instead of ":"
benquan
2013/01/23 23:50:53
Done.
| |
61 return false; | |
62 } | |
63 base::Time now = base::Time::Now(); | |
64 if (next_query_request_ > now) { | |
65 // We are in back-off mode: do not do the request. | |
ahutter
2013/01/19 02:07:16
ditto.
benquan
2013/01/23 23:50:53
Done.
| |
66 return false; | |
67 } | |
68 next_query_request_ = now + | |
69 base::TimeDelta::FromSeconds(kWhitelistDownloadBackoffDelay); | |
70 | |
71 request_.reset(net::URLFetcher::Create( | |
72 0, GetAutocheckoutWhitelistUrl(), net::URLFetcher::GET, this)); | |
73 request_->SetRequestContext(context_getter_); | |
74 request_->Start(); | |
75 return true; | |
76 } | |
77 | |
78 void WhitelistManager::OnURLFetchComplete( | |
79 const net::URLFetcher* source) { | |
80 scoped_ptr<net::URLFetcher> old_request = request_.Pass(); | |
81 DCHECK_EQ(source, old_request.get()); | |
82 | |
83 DVLOG(1) << "Got response from " << source->GetOriginalURL(); | |
84 | |
85 if (source->GetResponseCode() != net::HTTP_OK) | |
86 return; | |
87 | |
88 std::string data; | |
89 source->GetResponseAsString(&data); | |
90 BuildWhitelist(data); | |
91 } | |
92 | |
93 bool WhitelistManager::IsAutocheckoutEnabled(const GURL& url) { | |
94 if (!experimental_form_filling_enabled_) { | |
95 // The feature is not enabled: return false. | |
96 return false; | |
97 } | |
98 if (url.is_empty()) | |
99 return false; | |
100 | |
101 for (std::vector<std::string>::iterator it = url_prefixes_.begin(); | |
102 it != url_prefixes_.end(); ++it) { | |
103 // This is only for ~20 sites initially, liner search is sufficient. | |
104 // TODO(benquan): Looking for optimization options when we support | |
105 // more sites. | |
106 if (url.spec().compare(0, it->size(), *it) == 0) { | |
ahutter
2013/01/19 02:07:16
no curlies
benquan
2013/01/23 23:50:53
Done.
| |
107 return true; | |
108 } | |
109 } | |
110 return false; | |
111 } | |
112 | |
113 void WhitelistManager::BuildWhitelist(const std::string& data) { | |
114 // TODO(benquan) find a better way to parse csv data. | |
ahutter
2013/01/19 02:07:16
missing ":"
benquan
2013/01/23 23:50:53
Done.
| |
115 std::vector<std::string> new_url_prefixes; | |
116 | |
117 std::stringstream dataStream(data); | |
118 std::string line; | |
119 while (std::getline(dataStream, line)) { | |
120 if (!line.empty()) { | |
121 std::vector<std::string> fields; | |
122 base::SplitString(line, ',', &fields); | |
123 // The whilist file is a simple CSV file, and the first column is the url | |
124 // prefix. | |
125 if (!fields[0].empty()) | |
126 new_url_prefixes.push_back(fields[0]); | |
127 } | |
128 } | |
129 url_prefixes_ = new_url_prefixes; | |
130 } | |
131 | |
132 } // namespace autocheckout | |
133 | |
OLD | NEW |