| Index: chrome/browser/safe_browsing/protocol_manager.cc
|
| diff --git a/chrome/browser/safe_browsing/protocol_manager.cc b/chrome/browser/safe_browsing/protocol_manager.cc
|
| index e825e16fb166b30014f1d9811d7e317fbee9e423..8ba91ca44b2bb1509456abc37ea96c9f7e4d121c 100644
|
| --- a/chrome/browser/safe_browsing/protocol_manager.cc
|
| +++ b/chrome/browser/safe_browsing/protocol_manager.cc
|
| @@ -21,6 +21,7 @@
|
| #include "google_apis/google_api_keys.h"
|
| #include "net/base/escape.h"
|
| #include "net/base/load_flags.h"
|
| +#include "net/base/net_errors.h"
|
| #include "net/url_request/url_fetcher.h"
|
| #include "net/url_request/url_request_context_getter.h"
|
| #include "net/url_request/url_request_status.h"
|
| @@ -28,8 +29,36 @@
|
| using base::Time;
|
| using base::TimeDelta;
|
|
|
| +namespace {
|
| +
|
| +// UpdateResult indicates what happened with the primary and/or backup update
|
| +// requests. The ordering of the values must stay the same for UMA consistency,
|
| +// and is also ordered in this way to match ProtocolManager::BackupUpdateReason.
|
| +enum UpdateResult {
|
| + UPDATE_RESULT_FAIL,
|
| + UPDATE_RESULT_SUCCESS,
|
| + UPDATE_RESULT_BACKUP_CONNECT_FAIL,
|
| + UPDATE_RESULT_BACKUP_CONNECT_SUCCESS,
|
| + UPDATE_RESULT_BACKUP_HTTP_FAIL,
|
| + UPDATE_RESULT_BACKUP_HTTP_SUCCESS,
|
| + UPDATE_RESULT_BACKUP_NETWORK_FAIL,
|
| + UPDATE_RESULT_BACKUP_NETWORK_SUCCESS,
|
| + UPDATE_RESULT_MAX,
|
| + UPDATE_RESULT_BACKUP_START = UPDATE_RESULT_BACKUP_CONNECT_FAIL,
|
| +};
|
| +
|
| +void RecordUpdateResult(UpdateResult result) {
|
| + DCHECK(result >= 0 && result < UPDATE_RESULT_MAX);
|
| + UMA_HISTOGRAM_ENUMERATION("SB2.UpdateResult", result, UPDATE_RESULT_MAX);
|
| +}
|
| +
|
| +} // namespace
|
| +
|
| +// Minimum time, in seconds, from start up before we must issue an update query.
|
| +static const int kSbTimerStartIntervalSecMin = 60;
|
| +
|
| // Maximum time, in seconds, from start up before we must issue an update query.
|
| -static const int kSbTimerStartIntervalSec = 5 * 60;
|
| +static const int kSbTimerStartIntervalSecMax = 300;
|
|
|
| // The maximum time, in seconds, to wait for a response to an update request.
|
| static const int kSbMaxUpdateWaitSec = 30;
|
| @@ -80,7 +109,8 @@ SafeBrowsingProtocolManager::SafeBrowsingProtocolManager(
|
| update_back_off_mult_(1),
|
| gethash_back_off_mult_(1),
|
| next_update_interval_(base::TimeDelta::FromSeconds(
|
| - base::RandInt(60, kSbTimerStartIntervalSec))),
|
| + base::RandInt(kSbTimerStartIntervalSecMin,
|
| + kSbTimerStartIntervalSecMax))),
|
| update_state_(FIRST_REQUEST),
|
| chunk_pending_to_write_(false),
|
| version_(config.version),
|
| @@ -88,10 +118,18 @@ SafeBrowsingProtocolManager::SafeBrowsingProtocolManager(
|
| client_name_(config.client_name),
|
| request_context_getter_(request_context_getter),
|
| url_prefix_(config.url_prefix),
|
| + backup_update_reason_(BACKUP_UPDATE_REASON_MAX),
|
| disable_auto_update_(config.disable_auto_update),
|
| url_fetcher_id_(0) {
|
| DCHECK(!url_prefix_.empty());
|
|
|
| + backup_url_prefixes_[BACKUP_UPDATE_REASON_CONNECT] =
|
| + config.backup_connect_error_url_prefix;
|
| + backup_url_prefixes_[BACKUP_UPDATE_REASON_HTTP] =
|
| + config.backup_http_error_url_prefix;
|
| + backup_url_prefixes_[BACKUP_UPDATE_REASON_NETWORK] =
|
| + config.backup_network_error_url_prefix;
|
| +
|
| // Set the backoff multiplier fuzz to a random value between 0 and 1.
|
| back_off_fuzz_ = static_cast<float>(base::RandDouble());
|
| if (version_.empty())
|
| @@ -154,7 +192,7 @@ void SafeBrowsingProtocolManager::GetFullHash(
|
|
|
| void SafeBrowsingProtocolManager::GetNextUpdate() {
|
| DCHECK(CalledOnValidThread());
|
| - if (!request_.get())
|
| + if (!request_.get() && request_type_ == NO_REQUEST)
|
| IssueUpdateRequest();
|
| }
|
|
|
| @@ -225,7 +263,8 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete(
|
| // Update or chunk response.
|
| fetcher.reset(request_.release());
|
|
|
| - if (request_type_ == UPDATE_REQUEST) {
|
| + if (request_type_ == UPDATE_REQUEST ||
|
| + request_type_ == BACKUP_UPDATE_REQUEST) {
|
| if (!fetcher.get()) {
|
| // We've timed out waiting for an update response, so we've cancelled
|
| // the update request and scheduled a new one. Ignore this response.
|
| @@ -236,7 +275,8 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete(
|
| timeout_timer_.Stop();
|
| }
|
|
|
| - if (source->GetStatus().is_success() && source->GetResponseCode() == 200) {
|
| + net::URLRequestStatus status = source->GetStatus();
|
| + if (status.is_success() && source->GetResponseCode() == 200) {
|
| // We have data from the SafeBrowsing service.
|
| std::string data;
|
| source->GetResponseAsString(&data);
|
| @@ -246,6 +286,10 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete(
|
| VLOG(1) << "SafeBrowsing request for: " << source->GetURL()
|
| << " failed parse.";
|
| chunk_request_urls_.clear();
|
| + if (request_type_ == UPDATE_REQUEST &&
|
| + IssueBackupUpdateRequest(BACKUP_UPDATE_REASON_HTTP)) {
|
| + return;
|
| + }
|
| UpdateFinished(false);
|
| }
|
|
|
| @@ -258,28 +302,52 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete(
|
| }
|
| break;
|
| case UPDATE_REQUEST:
|
| + case BACKUP_UPDATE_REQUEST:
|
| if (chunk_request_urls_.empty() && parsed_ok) {
|
| // We are up to date since the servers gave us nothing new, so we
|
| // are done with this update cycle.
|
| UpdateFinished(true);
|
| }
|
| break;
|
| + case NO_REQUEST:
|
| + // This can happen if HandleServiceResponse fails above.
|
| + break;
|
| default:
|
| NOTREACHED();
|
| break;
|
| }
|
| } else {
|
| - // The SafeBrowsing service error, or very bad response code: back off.
|
| - if (request_type_ == CHUNK_REQUEST)
|
| - chunk_request_urls_.clear();
|
| - UpdateFinished(false);
|
| - if (source->GetStatus().status() == net::URLRequestStatus::FAILED) {
|
| + if (status.status() == net::URLRequestStatus::FAILED) {
|
| VLOG(1) << "SafeBrowsing request for: " << source->GetURL()
|
| << " failed with error: " << source->GetStatus().error();
|
| } else {
|
| VLOG(1) << "SafeBrowsing request for: " << source->GetURL()
|
| << " failed with error: " << source->GetResponseCode();
|
| }
|
| + if (request_type_ == CHUNK_REQUEST) {
|
| + // The SafeBrowsing service error, or very bad response code: back off.
|
| + chunk_request_urls_.clear();
|
| + } else if (request_type_ == UPDATE_REQUEST) {
|
| + BackupUpdateReason backup_update_reason = BACKUP_UPDATE_REASON_MAX;
|
| + if (status.is_success()) {
|
| + backup_update_reason = BACKUP_UPDATE_REASON_HTTP;
|
| + } else {
|
| + switch (status.error()) {
|
| + case net::ERR_INTERNET_DISCONNECTED:
|
| + case net::ERR_NETWORK_CHANGED:
|
| + backup_update_reason = BACKUP_UPDATE_REASON_NETWORK;
|
| + break;
|
| + default:
|
| + backup_update_reason = BACKUP_UPDATE_REASON_CONNECT;
|
| + break;
|
| + }
|
| + }
|
| + if (backup_update_reason != BACKUP_UPDATE_REASON_MAX &&
|
| + IssueBackupUpdateRequest(backup_update_reason)) {
|
| + return;
|
| + }
|
| + }
|
| + UpdateFinished(false);
|
| }
|
| }
|
|
|
| @@ -294,7 +362,8 @@ bool SafeBrowsingProtocolManager::HandleServiceResponse(const GURL& url,
|
| SafeBrowsingProtocolParser parser;
|
|
|
| switch (request_type_) {
|
| - case UPDATE_REQUEST: {
|
| + case UPDATE_REQUEST:
|
| + case BACKUP_UPDATE_REQUEST: {
|
| int next_update_sec = -1;
|
| bool reset = false;
|
| scoped_ptr<std::vector<SBChunkDelete> > chunk_deletes(
|
| @@ -467,6 +536,35 @@ void SafeBrowsingProtocolManager::IssueUpdateRequest() {
|
| base::Unretained(this)));
|
| }
|
|
|
| +// The backup request can run immediately since the chunks have already been
|
| +// retrieved from the DB.
|
| +bool SafeBrowsingProtocolManager::IssueBackupUpdateRequest(
|
| + BackupUpdateReason backup_update_reason) {
|
| + DCHECK(CalledOnValidThread());
|
| + DCHECK_EQ(request_type_, UPDATE_REQUEST);
|
| + DCHECK(backup_update_reason >= 0 &&
|
| + backup_update_reason < BACKUP_UPDATE_REASON_MAX);
|
| + if (backup_url_prefixes_[backup_update_reason].empty())
|
| + return false;
|
| + request_type_ = BACKUP_UPDATE_REQUEST;
|
| + backup_update_reason_ = backup_update_reason;
|
| +
|
| + GURL backup_update_url = BackupUpdateUrl(backup_update_reason);
|
| + request_.reset(net::URLFetcher::Create(
|
| + url_fetcher_id_++, backup_update_url, net::URLFetcher::POST, this));
|
| + request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
|
| + request_->SetRequestContext(request_context_getter_);
|
| + request_->SetUploadData("text/plain", update_list_data_);
|
| + request_->Start();
|
| +
|
| + // Begin the update request timeout.
|
| + timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec),
|
| + this,
|
| + &SafeBrowsingProtocolManager::UpdateResponseTimeout);
|
| +
|
| + return true;
|
| +}
|
| +
|
| void SafeBrowsingProtocolManager::IssueChunkRequest() {
|
| DCHECK(CalledOnValidThread());
|
| // We are only allowed to have one request outstanding at any time. Also,
|
| @@ -491,6 +589,7 @@ void SafeBrowsingProtocolManager::OnGetChunksComplete(
|
| const std::vector<SBListChunkRanges>& lists, bool database_error) {
|
| DCHECK(CalledOnValidThread());
|
| DCHECK_EQ(request_type_, UPDATE_REQUEST);
|
| + DCHECK(update_list_data_.empty());
|
| if (database_error) {
|
| // The update was not successful, but don't back off.
|
| UpdateFinished(false, false);
|
| @@ -498,11 +597,10 @@ void SafeBrowsingProtocolManager::OnGetChunksComplete(
|
| }
|
|
|
| // Format our stored chunks:
|
| - std::string list_data;
|
| bool found_malware = false;
|
| bool found_phishing = false;
|
| for (size_t i = 0; i < lists.size(); ++i) {
|
| - list_data.append(FormatList(lists[i]));
|
| + update_list_data_.append(FormatList(lists[i]));
|
| if (lists[i].name == safe_browsing_util::kPhishingList)
|
| found_phishing = true;
|
|
|
| @@ -513,11 +611,11 @@ void SafeBrowsingProtocolManager::OnGetChunksComplete(
|
| // If we have an empty database, let the server know we want data for these
|
| // lists.
|
| if (!found_phishing)
|
| - list_data.append(FormatList(
|
| + update_list_data_.append(FormatList(
|
| SBListChunkRanges(safe_browsing_util::kPhishingList)));
|
|
|
| if (!found_malware)
|
| - list_data.append(FormatList(
|
| + update_list_data_.append(FormatList(
|
| SBListChunkRanges(safe_browsing_util::kMalwareList)));
|
|
|
| GURL update_url = UpdateUrl();
|
| @@ -525,7 +623,7 @@ void SafeBrowsingProtocolManager::OnGetChunksComplete(
|
| url_fetcher_id_++, update_url, net::URLFetcher::POST, this));
|
| request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
|
| request_->SetRequestContext(request_context_getter_);
|
| - request_->SetUploadData("text/plain", list_data);
|
| + request_->SetUploadData("text/plain", update_list_data_);
|
| request_->Start();
|
|
|
| // Begin the update request timeout.
|
| @@ -538,8 +636,13 @@ void SafeBrowsingProtocolManager::OnGetChunksComplete(
|
| // will run. Close the current update session and schedule another update.
|
| void SafeBrowsingProtocolManager::UpdateResponseTimeout() {
|
| DCHECK(CalledOnValidThread());
|
| - DCHECK_EQ(request_type_, UPDATE_REQUEST);
|
| + DCHECK(request_type_ == UPDATE_REQUEST ||
|
| + request_type_ == BACKUP_UPDATE_REQUEST);
|
| request_.reset();
|
| + if (request_type_ == UPDATE_REQUEST &&
|
| + IssueBackupUpdateRequest(BACKUP_UPDATE_REASON_CONNECT)) {
|
| + return;
|
| + }
|
| UpdateFinished(false);
|
| }
|
|
|
| @@ -589,6 +692,20 @@ void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) {
|
| DCHECK(CalledOnValidThread());
|
| UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_);
|
| update_size_ = 0;
|
| + bool update_success = success || request_type_ == CHUNK_REQUEST;
|
| + if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) {
|
| + RecordUpdateResult(
|
| + update_success ? UPDATE_RESULT_SUCCESS : UPDATE_RESULT_FAIL);
|
| + } else {
|
| + UpdateResult update_result = static_cast<UpdateResult>(
|
| + UPDATE_RESULT_BACKUP_START +
|
| + (static_cast<int>(backup_update_reason_) * 2) +
|
| + update_success);
|
| + RecordUpdateResult(update_result);
|
| + }
|
| + backup_update_reason_ = BACKUP_UPDATE_REASON_MAX;
|
| + request_type_ = NO_REQUEST;
|
| + update_list_data_.clear();
|
| delegate_->UpdateFinished(success);
|
| ScheduleNextUpdate(back_off);
|
| }
|
| @@ -599,6 +716,17 @@ GURL SafeBrowsingProtocolManager::UpdateUrl() const {
|
| return GURL(url);
|
| }
|
|
|
| +GURL SafeBrowsingProtocolManager::BackupUpdateUrl(
|
| + BackupUpdateReason backup_update_reason) const {
|
| + DCHECK(backup_update_reason >= 0 &&
|
| + backup_update_reason < BACKUP_UPDATE_REASON_MAX);
|
| + DCHECK(!backup_url_prefixes_[backup_update_reason].empty());
|
| + std::string url = SafeBrowsingProtocolManagerHelper::ComposeUrl(
|
| + backup_url_prefixes_[backup_update_reason], "downloads", client_name_,
|
| + version_, additional_query_);
|
| + return GURL(url);
|
| +}
|
| +
|
| GURL SafeBrowsingProtocolManager::GetHashUrl() const {
|
| std::string url = SafeBrowsingProtocolManagerHelper::ComposeUrl(
|
| url_prefix_, "gethash", client_name_, version_, additional_query_);
|
|
|