Index: chrome/browser/safe_browsing/protocol_manager.cc |
diff --git a/chrome/browser/safe_browsing/protocol_manager.cc b/chrome/browser/safe_browsing/protocol_manager.cc |
index 6c1377906cc8257a06e0ad1c647ac77dd019c12a..2277d67fdf910d7773d1cbe71bafd41da9b83848 100644 |
--- a/chrome/browser/safe_browsing/protocol_manager.cc |
+++ b/chrome/browser/safe_browsing/protocol_manager.cc |
@@ -54,6 +54,10 @@ void RecordUpdateResult(UpdateResult result) { |
} // namespace |
+// The maximum staleness for a cached entry. |
+// TODO(mattm): remove this when switching to Safebrowsing API 2.3. |
+static const int kMaxStalenessMinutes = 45; |
+ |
// Minimum time, in seconds, from start up before we must issue an update query. |
static const int kSbTimerStartIntervalSecMin = 60; |
@@ -172,7 +176,7 @@ void SafeBrowsingProtocolManager::GetFullHash( |
// required to return empty results (i.e. treat the page as safe). |
if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { |
std::vector<SBFullHashResult> full_hashes; |
- callback.Run(full_hashes, false); |
+ callback.Run(full_hashes, base::TimeDelta()); |
return; |
} |
GURL gethash_url = GetHashUrl(); |
@@ -218,7 +222,7 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete( |
fetcher.reset(it->first); |
const FullHashDetails& details = it->second; |
std::vector<SBFullHashResult> full_hashes; |
- bool can_cache = false; |
+ base::TimeDelta cache_lifetime; |
if (source->GetStatus().is_success() && |
(source->GetResponseCode() == 200 || |
source->GetResponseCode() == 204)) { |
@@ -228,7 +232,9 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete( |
RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); |
else |
RecordGetHashResult(details.is_download, GET_HASH_STATUS_204); |
- can_cache = true; |
+ // In SB 2.3, cache lifetime will be part of the protocol message. For |
+ // now, use the old value of 45 minutes. |
+ cache_lifetime = base::TimeDelta::FromMinutes(kMaxStalenessMinutes); |
gethash_error_count_ = 0; |
gethash_back_off_mult_ = 1; |
SafeBrowsingProtocolParser parser; |
@@ -240,14 +246,18 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete( |
&full_hashes); |
if (!parsed_ok) { |
full_hashes.clear(); |
- // TODO(cbentzel): Should can_cache be set to false here? |
+ RecordGetHashResult(details.is_download, GET_HASH_PARSE_ERROR); |
+ // TODO(cbentzel): Should cache_lifetime be set to 0 here? See |
+ // http://crbug.com/360232. |
} |
} else { |
HandleGetHashError(Time::Now()); |
if (source->GetStatus().status() == net::URLRequestStatus::FAILED) { |
+ RecordGetHashResult(details.is_download, GET_HASH_NETWORK_ERROR); |
VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() |
<< " failed with error: " << source->GetStatus().error(); |
} else { |
+ RecordGetHashResult(details.is_download, GET_HASH_HTTP_ERROR); |
VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() |
<< " failed with error: " << source->GetResponseCode(); |
} |
@@ -256,7 +266,7 @@ void SafeBrowsingProtocolManager::OnURLFetchComplete( |
// Invoke the callback with full_hashes, even if there was a parse error or |
// an error response code (in which case full_hashes will be empty). The |
// caller can't be blocked indefinitely. |
- details.callback.Run(full_hashes, can_cache); |
+ details.callback.Run(full_hashes, cache_lifetime); |
hash_requests_.erase(it); |
} else { |