OLD | NEW |
---|---|
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #include <utility> | 7 #include <utility> |
8 | 8 |
9 #include "base/base64.h" | 9 #include "base/base64.h" |
10 #include "base/environment.h" | 10 #include "base/environment.h" |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
126 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed. | 126 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed. |
127 tracked_objects::ScopedTracker tracking_profile( | 127 tracked_objects::ScopedTracker tracking_profile( |
128 FROM_HERE_WITH_EXPLICIT_FUNCTION( | 128 FROM_HERE_WITH_EXPLICIT_FUNCTION( |
129 "483689 SafeBrowsingProtocolManager::Create")); | 129 "483689 SafeBrowsingProtocolManager::Create")); |
130 if (!factory_) | 130 if (!factory_) |
131 factory_ = new SBProtocolManagerFactoryImpl(); | 131 factory_ = new SBProtocolManagerFactoryImpl(); |
132 return factory_->CreateProtocolManager(delegate, request_context_getter, | 132 return factory_->CreateProtocolManager(delegate, request_context_getter, |
133 config); | 133 config); |
134 } | 134 } |
135 | 135 |
136 // static | |
137 // Backoff interval is MIN(((2^(n-1))*15 minutes) * (RAND + 1), 24 hours) where | |
Nathan Parker
2016/01/12 04:14:38
Is there a public doc we can ref with this spec? I
kcarattini
2016/01/12 05:18:06
I don't know of one either.
| |
138 // n is the number of consecutive errors. | |
139 base::TimeDelta SafeBrowsingProtocolManager::GetNextV4BackOffInterval( | |
140 size_t* error_count, | |
141 size_t* multiplier) { | |
142 DCHECK(multiplier && error_count); | |
143 (*error_count)++; | |
144 if (*error_count > 1 && *error_count < 9) { | |
145 // With error count 9 and above we will hit the 24 hour max interval. | |
146 // Cap the multiplier here to prevent integer overflow errors. | |
147 *multiplier *= 2; | |
148 } | |
149 base::TimeDelta next = base::TimeDelta::FromMinutes( | |
150 *multiplier * (1 + base::RandDouble()) * 15); | |
151 | |
152 base::TimeDelta day = base::TimeDelta::FromHours(24); | |
153 | |
154 if (next < day) | |
155 return next; | |
156 else | |
157 return day; | |
158 } | |
159 | |
136 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager( | 160 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager( |
137 SafeBrowsingProtocolManagerDelegate* delegate, | 161 SafeBrowsingProtocolManagerDelegate* delegate, |
138 net::URLRequestContextGetter* request_context_getter, | 162 net::URLRequestContextGetter* request_context_getter, |
139 const SafeBrowsingProtocolConfig& config) | 163 const SafeBrowsingProtocolConfig& config) |
140 : delegate_(delegate), | 164 : delegate_(delegate), |
141 request_type_(NO_REQUEST), | 165 request_type_(NO_REQUEST), |
142 update_error_count_(0), | 166 update_error_count_(0), |
143 gethash_error_count_(0), | 167 gethash_error_count_(0), |
168 gethash_v4_error_count_(0), | |
144 update_back_off_mult_(1), | 169 update_back_off_mult_(1), |
145 gethash_back_off_mult_(1), | 170 gethash_back_off_mult_(1), |
171 gethash_v4_back_off_mult_(1), | |
146 next_update_interval_(base::TimeDelta::FromSeconds( | 172 next_update_interval_(base::TimeDelta::FromSeconds( |
147 base::RandInt(kSbTimerStartIntervalSecMin, | 173 base::RandInt(kSbTimerStartIntervalSecMin, |
148 kSbTimerStartIntervalSecMax))), | 174 kSbTimerStartIntervalSecMax))), |
149 chunk_pending_to_write_(false), | 175 chunk_pending_to_write_(false), |
176 next_gethash_v4_time_(Time::Now()), | |
Nathan Parker
2016/01/12 04:14:38
How about just zero? Then if the clock drifts bac
kcarattini
2016/01/12 05:18:06
Done.
| |
150 version_(config.version), | 177 version_(config.version), |
151 update_size_(0), | 178 update_size_(0), |
152 client_name_(config.client_name), | 179 client_name_(config.client_name), |
153 request_context_getter_(request_context_getter), | 180 request_context_getter_(request_context_getter), |
154 url_prefix_(config.url_prefix), | 181 url_prefix_(config.url_prefix), |
155 backup_update_reason_(BACKUP_UPDATE_REASON_MAX), | 182 backup_update_reason_(BACKUP_UPDATE_REASON_MAX), |
156 disable_auto_update_(config.disable_auto_update), | 183 disable_auto_update_(config.disable_auto_update), |
157 url_fetcher_id_(0), | 184 url_fetcher_id_(0), |
158 app_in_foreground_(true) { | 185 app_in_foreground_(true) { |
159 DCHECK(!url_prefix_.empty()); | 186 DCHECK(!url_prefix_.empty()); |
(...skipping 113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
273 | 300 |
274 if (!response.ParseFromString(data)) | 301 if (!response.ParseFromString(data)) |
275 return false; | 302 return false; |
276 | 303 |
277 if (response.has_negative_cache_duration()) { | 304 if (response.has_negative_cache_duration()) { |
278 // Seconds resolution is good enough so we ignore the nanos field. | 305 // Seconds resolution is good enough so we ignore the nanos field. |
279 *negative_cache_duration = base::TimeDelta::FromSeconds( | 306 *negative_cache_duration = base::TimeDelta::FromSeconds( |
280 response.negative_cache_duration().seconds()); | 307 response.negative_cache_duration().seconds()); |
281 } | 308 } |
282 | 309 |
310 if (response.has_minimum_wait_duration()) { | |
311 // Seconds resolution is good enough so we ignore the nanos field. | |
312 next_gethash_v4_time_ = Time::Now() + base::TimeDelta::FromSeconds( | |
313 response.minimum_wait_duration().seconds()); | |
314 } | |
315 | |
283 // Loop over the threat matches and fill in full_hashes. | 316 // Loop over the threat matches and fill in full_hashes. |
284 for (const ThreatMatch& match : response.matches()) { | 317 for (const ThreatMatch& match : response.matches()) { |
285 // Make sure the platform and threat entry type match. | 318 // Make sure the platform and threat entry type match. |
286 if (!(match.has_threat_entry_type() && | 319 if (!(match.has_threat_entry_type() && |
287 match.threat_entry_type() == URL_EXPRESSION && | 320 match.threat_entry_type() == URL_EXPRESSION && |
288 match.has_threat())) { | 321 match.has_threat())) { |
289 continue; | 322 continue; |
290 } | 323 } |
291 | 324 |
292 // Fill in the full hash. | 325 // Fill in the full hash. |
(...skipping 24 matching lines...) Expand all Loading... | |
317 } | 350 } |
318 return true; | 351 return true; |
319 } | 352 } |
320 | 353 |
321 void SafeBrowsingProtocolManager::GetV4FullHashes( | 354 void SafeBrowsingProtocolManager::GetV4FullHashes( |
322 const std::vector<SBPrefix>& prefixes, | 355 const std::vector<SBPrefix>& prefixes, |
323 const std::vector<PlatformType>& platforms, | 356 const std::vector<PlatformType>& platforms, |
324 ThreatType threat_type, | 357 ThreatType threat_type, |
325 FullHashCallback callback) { | 358 FullHashCallback callback) { |
326 DCHECK(CalledOnValidThread()); | 359 DCHECK(CalledOnValidThread()); |
327 // TODO(kcarattini): Implement backoff behavior. | 360 // We need to wait the minimum waiting duration, and if we are in backoff, |
361 // we need to check if we're past the next allowed time. If we are, we can | |
362 // proceed with the request. If not, we are required to return empty results | |
363 // (i.e. treat the page as safe). | |
364 if (Time::Now() <= next_gethash_v4_time_) { | |
365 // TODO(kcarattini): Add UMA recording. | |
366 std::vector<SBFullHashResult> full_hashes; | |
367 callback.Run(full_hashes, base::TimeDelta()); | |
368 return; | |
369 } | |
328 | 370 |
329 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type); | 371 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type); |
330 GURL gethash_url = GetV4HashUrl(req_base64); | 372 GURL gethash_url = GetV4HashUrl(req_base64); |
331 | 373 |
332 net::URLFetcher* fetcher = | 374 net::URLFetcher* fetcher = |
333 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, | 375 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, |
334 net::URLFetcher::GET, this) | 376 net::URLFetcher::GET, this) |
335 .release(); | 377 .release(); |
336 v4_hash_requests_[fetcher] = FullHashDetails(callback, | 378 v4_hash_requests_[fetcher] = FullHashDetails(callback, |
337 false /* is_download */); | 379 false /* is_download */); |
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
424 | 466 |
425 hash_requests_.erase(it); | 467 hash_requests_.erase(it); |
426 } else if (v4_it != v4_hash_requests_.end()) { | 468 } else if (v4_it != v4_hash_requests_.end()) { |
427 // V4 FindFullHashes response. | 469 // V4 FindFullHashes response. |
428 fetcher.reset(v4_it->first); | 470 fetcher.reset(v4_it->first); |
429 const FullHashDetails& details = v4_it->second; | 471 const FullHashDetails& details = v4_it->second; |
430 std::vector<SBFullHashResult> full_hashes; | 472 std::vector<SBFullHashResult> full_hashes; |
431 base::TimeDelta negative_cache_duration; | 473 base::TimeDelta negative_cache_duration; |
432 if (status.is_success() && response_code == net::HTTP_OK) { | 474 if (status.is_success() && response_code == net::HTTP_OK) { |
433 // TODO(kcarattini): Add UMA reporting. | 475 // TODO(kcarattini): Add UMA reporting. |
434 // TODO(kcarattini): Implement backoff and minimum waiting duration | 476 gethash_v4_error_count_ = 0; |
Nathan Parker
2016/01/12 04:14:38
Might be tidier to put this in a ResetGetHashV4Err
kcarattini
2016/01/12 05:18:06
Done.
| |
435 // compliance. | 477 gethash_v4_back_off_mult_ = 1; |
436 std::string data; | 478 std::string data; |
437 source->GetResponseAsString(&data); | 479 source->GetResponseAsString(&data); |
438 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) { | 480 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) { |
439 full_hashes.clear(); | 481 full_hashes.clear(); |
440 // TODO(kcarattini): Add UMA reporting. | 482 // TODO(kcarattini): Add UMA reporting. |
441 } | 483 } |
442 } else { | 484 } else { |
443 // TODO(kcarattini): Handle error by setting backoff interval. | 485 HandleGetHashV4Error(Time::Now()); |
444 // TODO(kcarattini): Add UMA reporting. | 486 // TODO(kcarattini): Add UMA reporting. |
445 if (status.status() == net::URLRequestStatus::FAILED) { | 487 if (status.status() == net::URLRequestStatus::FAILED) { |
446 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << | 488 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << |
447 source->GetURL() << " failed with error: " << status.error(); | 489 source->GetURL() << " failed with error: " << status.error(); |
448 } else { | 490 } else { |
449 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << | 491 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << |
450 source->GetURL() << " failed with error: " << response_code; | 492 source->GetURL() << " failed with error: " << response_code; |
451 } | 493 } |
452 } | 494 } |
453 | 495 |
(...skipping 403 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
857 } | 899 } |
858 } | 900 } |
859 | 901 |
860 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) { | 902 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) { |
861 DCHECK(CalledOnValidThread()); | 903 DCHECK(CalledOnValidThread()); |
862 base::TimeDelta next = | 904 base::TimeDelta next = |
863 GetNextBackOffInterval(&gethash_error_count_, &gethash_back_off_mult_); | 905 GetNextBackOffInterval(&gethash_error_count_, &gethash_back_off_mult_); |
864 next_gethash_time_ = now + next; | 906 next_gethash_time_ = now + next; |
865 } | 907 } |
866 | 908 |
909 void SafeBrowsingProtocolManager::HandleGetHashV4Error(const Time& now) { | |
910 DCHECK(CalledOnValidThread()); | |
911 base::TimeDelta next = GetNextV4BackOffInterval( | |
912 &gethash_v4_error_count_, &gethash_v4_back_off_mult_); | |
913 next_gethash_v4_time_ = now + next; | |
914 } | |
915 | |
867 void SafeBrowsingProtocolManager::UpdateFinished(bool success) { | 916 void SafeBrowsingProtocolManager::UpdateFinished(bool success) { |
868 UpdateFinished(success, !success); | 917 UpdateFinished(success, !success); |
869 } | 918 } |
870 | 919 |
871 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) { | 920 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) { |
872 DCHECK(CalledOnValidThread()); | 921 DCHECK(CalledOnValidThread()); |
873 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_); | 922 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_); |
874 update_size_ = 0; | 923 update_size_ = 0; |
875 bool update_success = success || request_type_ == CHUNK_REQUEST; | 924 bool update_success = success || request_type_ == CHUNK_REQUEST; |
876 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) { | 925 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) { |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
956 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( | 1005 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( |
957 FullHashCallback callback, | 1006 FullHashCallback callback, |
958 bool is_download) | 1007 bool is_download) |
959 : callback(callback), is_download(is_download) {} | 1008 : callback(callback), is_download(is_download) {} |
960 | 1009 |
961 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} | 1010 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} |
962 | 1011 |
963 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} | 1012 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} |
964 | 1013 |
965 } // namespace safe_browsing | 1014 } // namespace safe_browsing |
OLD | NEW |