Chromium Code Reviews| Index: chrome/browser/autofill/autocheckout/whitelist_manager.cc |
| diff --git a/chrome/browser/autofill/autocheckout/whitelist_manager.cc b/chrome/browser/autofill/autocheckout/whitelist_manager.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..5e90c7934a59f2a2d595d4baa0db17a0e0a3edde |
| --- /dev/null |
| +++ b/chrome/browser/autofill/autocheckout/whitelist_manager.cc |
| @@ -0,0 +1,133 @@ |
| +// Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "chrome/browser/autofill/autocheckout/whitelist_manager.h" |
| + |
| +#include "base/command_line.h" |
| +#include "base/logging.h" |
| +#include "base/memory/scoped_ptr.h" |
| +#include "base/string_split.h" |
| +#include "base/supports_user_data.h" |
| +#include "chrome/browser/autofill/autocheckout/whitelist_url.h" |
| +#include "chrome/common/chrome_switches.h" |
| +#include "content/public/browser/browser_context.h" |
| +#include "googleurl/src/gurl.h" |
| +#include "net/http/http_status_code.h" |
| +#include "net/url_request/url_fetcher.h" |
| +#include "net/url_request/url_request_context_getter.h" |
| + |
| +namespace { |
| + |
| +// Back off in seconds after each whitelist download is attempted. |
| +const int kWhitelistDownloadBackoffDelay = 86400; |
|
ahutter
2013/01/19 02:07:16
Can you add a readable time here? Like 1 day or s
benquan
2013/01/23 23:50:53
added comment here.
|
| + |
| +const char kWhiteListKeyName[] = "autocheckout_whitelist_manager"; |
|
ahutter
2013/01/19 02:07:16
new line.
benquan
2013/01/23 23:50:53
Done.
|
| +} // namespace |
| + |
| + |
| +namespace autocheckout { |
| + |
| +// static |
| +WhitelistManager* WhitelistManager::GetForBrowserContext( |
| + content::BrowserContext* context) { |
| + WhitelistManager* wm = static_cast<WhitelistManager*>( |
| + context->GetUserData(kWhiteListKeyName)); |
| + if (!wm) { |
| + wm = new WhitelistManager(context->GetRequestContext()); |
| + context->SetUserData(kWhiteListKeyName, wm); |
| + } |
| + return wm; |
| +} |
| + |
| +// static |
| +void WhitelistManager::RemoveFromBrowserContext( |
| + content::BrowserContext* context) { |
| + context->RemoveUserData(kWhiteListKeyName); |
| +} |
| + |
| +WhitelistManager::WhitelistManager( |
| + net::URLRequestContextGetter* context_getter) |
| + : context_getter_(context_getter), |
| + next_query_request_(base::Time::Now()), |
| + experimental_form_filling_enabled_( |
| + CommandLine::ForCurrentProcess()->HasSwitch( |
| + switches::kEnableExperimentalFormFilling)) { |
| +} |
| + |
| +bool WhitelistManager::DownloadWhitelist() { |
| + if (!experimental_form_filling_enabled_) { |
| + // The feature is not enabled: do not do the request. |
|
ahutter
2013/01/19 02:07:16
did you mean ";" instead of ":"
benquan
2013/01/23 23:50:53
Done.
|
| + return false; |
| + } |
| + base::Time now = base::Time::Now(); |
| + if (next_query_request_ > now) { |
| + // We are in back-off mode: do not do the request. |
|
ahutter
2013/01/19 02:07:16
ditto.
benquan
2013/01/23 23:50:53
Done.
|
| + return false; |
| + } |
| + next_query_request_ = now + |
| + base::TimeDelta::FromSeconds(kWhitelistDownloadBackoffDelay); |
| + |
| + request_.reset(net::URLFetcher::Create( |
| + 0, GetAutocheckoutWhitelistUrl(), net::URLFetcher::GET, this)); |
| + request_->SetRequestContext(context_getter_); |
| + request_->Start(); |
| + return true; |
| +} |
| + |
| +void WhitelistManager::OnURLFetchComplete( |
| + const net::URLFetcher* source) { |
| + scoped_ptr<net::URLFetcher> old_request = request_.Pass(); |
| + DCHECK_EQ(source, old_request.get()); |
| + |
| + DVLOG(1) << "Got response from " << source->GetOriginalURL(); |
| + |
| + if (source->GetResponseCode() != net::HTTP_OK) |
| + return; |
| + |
| + std::string data; |
| + source->GetResponseAsString(&data); |
| + BuildWhitelist(data); |
| +} |
| + |
| +bool WhitelistManager::IsAutocheckoutEnabled(const GURL& url) { |
| + if (!experimental_form_filling_enabled_) { |
| + // The feature is not enabled: return false. |
| + return false; |
| + } |
| + if (url.is_empty()) |
| + return false; |
| + |
| + for (std::vector<std::string>::iterator it = url_prefixes_.begin(); |
| + it != url_prefixes_.end(); ++it) { |
| + // This is only for ~20 sites initially, liner search is sufficient. |
| + // TODO(benquan): Looking for optimization options when we support |
| + // more sites. |
| + if (url.spec().compare(0, it->size(), *it) == 0) { |
|
ahutter
2013/01/19 02:07:16
no curlies
benquan
2013/01/23 23:50:53
Done.
|
| + return true; |
| + } |
| + } |
| + return false; |
| +} |
| + |
| +void WhitelistManager::BuildWhitelist(const std::string& data) { |
| + // TODO(benquan) find a better way to parse csv data. |
|
ahutter
2013/01/19 02:07:16
missing ":"
benquan
2013/01/23 23:50:53
Done.
|
| + std::vector<std::string> new_url_prefixes; |
| + |
| + std::stringstream dataStream(data); |
| + std::string line; |
| + while (std::getline(dataStream, line)) { |
| + if (!line.empty()) { |
| + std::vector<std::string> fields; |
| + base::SplitString(line, ',', &fields); |
| + // The whilist file is a simple CSV file, and the first column is the url |
| + // prefix. |
| + if (!fields[0].empty()) |
| + new_url_prefixes.push_back(fields[0]); |
| + } |
| + } |
| + url_prefixes_ = new_url_prefixes; |
| +} |
| + |
| +} // namespace autocheckout |
| + |