Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(171)

Side by Side Diff: chrome/browser/safe_browsing/protocol_manager.cc

Issue 1555953002: Adds backoff and Min Wait Duration compliance to Protocolmanager pver4 handlers. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@osb-pm-2
Patch Set: Rebase Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/safe_browsing/protocol_manager.h" 5 #include "chrome/browser/safe_browsing/protocol_manager.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/base64.h" 9 #include "base/base64.h"
10 #include "base/environment.h" 10 #include "base/environment.h"
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
127 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed. 127 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed.
128 tracked_objects::ScopedTracker tracking_profile( 128 tracked_objects::ScopedTracker tracking_profile(
129 FROM_HERE_WITH_EXPLICIT_FUNCTION( 129 FROM_HERE_WITH_EXPLICIT_FUNCTION(
130 "483689 SafeBrowsingProtocolManager::Create")); 130 "483689 SafeBrowsingProtocolManager::Create"));
131 if (!factory_) 131 if (!factory_)
132 factory_ = new SBProtocolManagerFactoryImpl(); 132 factory_ = new SBProtocolManagerFactoryImpl();
133 return factory_->CreateProtocolManager(delegate, request_context_getter, 133 return factory_->CreateProtocolManager(delegate, request_context_getter,
134 config); 134 config);
135 } 135 }
136 136
137 // static
138 // Backoff interval is MIN(((2^(n-1))*15 minutes) * (RAND + 1), 24 hours) where
139 // n is the number of consecutive errors.
140 base::TimeDelta SafeBrowsingProtocolManager::GetNextV4BackOffInterval(
141 size_t* error_count,
142 size_t* multiplier) {
143 DCHECK(multiplier && error_count);
144 (*error_count)++;
145 if (*error_count > 1 && *error_count < 9) {
146 // With error count 9 and above we will hit the 24 hour max interval.
147 // Cap the multiplier here to prevent integer overflow errors.
148 *multiplier *= 2;
149 }
150 base::TimeDelta next = base::TimeDelta::FromMinutes(
151 *multiplier * (1 + base::RandDouble()) * 15);
152
153 base::TimeDelta day = base::TimeDelta::FromHours(24);
154
155 if (next < day)
156 return next;
157 else
158 return day;
159 }
160
161 void SafeBrowsingProtocolManager::ResetGetHashV4Errors() {
162 gethash_v4_error_count_ = 0;
163 gethash_v4_back_off_mult_ = 1;
164 }
165
137 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager( 166 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager(
138 SafeBrowsingProtocolManagerDelegate* delegate, 167 SafeBrowsingProtocolManagerDelegate* delegate,
139 net::URLRequestContextGetter* request_context_getter, 168 net::URLRequestContextGetter* request_context_getter,
140 const SafeBrowsingProtocolConfig& config) 169 const SafeBrowsingProtocolConfig& config)
141 : delegate_(delegate), 170 : delegate_(delegate),
142 request_type_(NO_REQUEST), 171 request_type_(NO_REQUEST),
143 update_error_count_(0), 172 update_error_count_(0),
144 gethash_error_count_(0), 173 gethash_error_count_(0),
174 gethash_v4_error_count_(0),
145 update_back_off_mult_(1), 175 update_back_off_mult_(1),
146 gethash_back_off_mult_(1), 176 gethash_back_off_mult_(1),
177 gethash_v4_back_off_mult_(1),
147 next_update_interval_(base::TimeDelta::FromSeconds( 178 next_update_interval_(base::TimeDelta::FromSeconds(
148 base::RandInt(kSbTimerStartIntervalSecMin, 179 base::RandInt(kSbTimerStartIntervalSecMin,
149 kSbTimerStartIntervalSecMax))), 180 kSbTimerStartIntervalSecMax))),
150 chunk_pending_to_write_(false), 181 chunk_pending_to_write_(false),
182 next_gethash_v4_time_(Time::FromDoubleT(0)),
151 version_(config.version), 183 version_(config.version),
152 update_size_(0), 184 update_size_(0),
153 client_name_(config.client_name), 185 client_name_(config.client_name),
154 request_context_getter_(request_context_getter), 186 request_context_getter_(request_context_getter),
155 url_prefix_(config.url_prefix), 187 url_prefix_(config.url_prefix),
156 backup_update_reason_(BACKUP_UPDATE_REASON_MAX), 188 backup_update_reason_(BACKUP_UPDATE_REASON_MAX),
157 disable_auto_update_(config.disable_auto_update), 189 disable_auto_update_(config.disable_auto_update),
158 url_fetcher_id_(0), 190 url_fetcher_id_(0),
159 app_in_foreground_(true) { 191 app_in_foreground_(true) {
160 DCHECK(!url_prefix_.empty()); 192 DCHECK(!url_prefix_.empty());
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after
276 // TODO(kcarattini): Add UMA. 308 // TODO(kcarattini): Add UMA.
277 return false; 309 return false;
278 } 310 }
279 311
280 if (response.has_negative_cache_duration()) { 312 if (response.has_negative_cache_duration()) {
281 // Seconds resolution is good enough so we ignore the nanos field. 313 // Seconds resolution is good enough so we ignore the nanos field.
282 *negative_cache_duration = base::TimeDelta::FromSeconds( 314 *negative_cache_duration = base::TimeDelta::FromSeconds(
283 response.negative_cache_duration().seconds()); 315 response.negative_cache_duration().seconds());
284 } 316 }
285 317
318 if (response.has_minimum_wait_duration()) {
319 // Seconds resolution is good enough so we ignore the nanos field.
320 next_gethash_v4_time_ = Time::Now() + base::TimeDelta::FromSeconds(
321 response.minimum_wait_duration().seconds());
322 }
323
286 // Loop over the threat matches and fill in full_hashes. 324 // Loop over the threat matches and fill in full_hashes.
287 for (const ThreatMatch& match : response.matches()) { 325 for (const ThreatMatch& match : response.matches()) {
288 // Make sure the platform and threat entry type match. 326 // Make sure the platform and threat entry type match.
289 if (!(match.has_threat_entry_type() && 327 if (!(match.has_threat_entry_type() &&
290 match.threat_entry_type() == URL_EXPRESSION && 328 match.threat_entry_type() == URL_EXPRESSION &&
291 match.has_threat())) { 329 match.has_threat())) {
292 continue; 330 continue;
293 } 331 }
294 332
295 // Fill in the full hash. 333 // Fill in the full hash.
(...skipping 27 matching lines...) Expand all
323 } 361 }
324 return true; 362 return true;
325 } 363 }
326 364
327 void SafeBrowsingProtocolManager::GetV4FullHashes( 365 void SafeBrowsingProtocolManager::GetV4FullHashes(
328 const std::vector<SBPrefix>& prefixes, 366 const std::vector<SBPrefix>& prefixes,
329 const std::vector<PlatformType>& platforms, 367 const std::vector<PlatformType>& platforms,
330 ThreatType threat_type, 368 ThreatType threat_type,
331 FullHashCallback callback) { 369 FullHashCallback callback) {
332 DCHECK(CalledOnValidThread()); 370 DCHECK(CalledOnValidThread());
333 // TODO(kcarattini): Implement backoff behavior. 371 // We need to wait the minimum waiting duration, and if we are in backoff,
372 // we need to check if we're past the next allowed time. If we are, we can
373 // proceed with the request. If not, we are required to return empty results
374 // (i.e. treat the page as safe).
375 if (Time::Now() <= next_gethash_v4_time_) {
376 // TODO(kcarattini): Add UMA recording.
377 std::vector<SBFullHashResult> full_hashes;
378 callback.Run(full_hashes, base::TimeDelta());
379 return;
380 }
334 381
335 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type); 382 std::string req_base64 = GetV4HashRequest(prefixes, platforms, threat_type);
336 GURL gethash_url = GetV4HashUrl(req_base64); 383 GURL gethash_url = GetV4HashUrl(req_base64);
337 384
338 net::URLFetcher* fetcher = 385 net::URLFetcher* fetcher =
339 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, 386 net::URLFetcher::Create(url_fetcher_id_++, gethash_url,
340 net::URLFetcher::GET, this) 387 net::URLFetcher::GET, this)
341 .release(); 388 .release();
342 v4_hash_requests_[fetcher] = FullHashDetails(callback, 389 v4_hash_requests_[fetcher] = FullHashDetails(callback,
343 false /* is_download */); 390 false /* is_download */);
(...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after
428 details.callback.Run(full_hashes, cache_lifetime); 475 details.callback.Run(full_hashes, cache_lifetime);
429 476
430 hash_requests_.erase(it); 477 hash_requests_.erase(it);
431 } else if (v4_it != v4_hash_requests_.end()) { 478 } else if (v4_it != v4_hash_requests_.end()) {
432 // V4 FindFullHashes response. 479 // V4 FindFullHashes response.
433 const FullHashDetails& details = v4_it->second; 480 const FullHashDetails& details = v4_it->second;
434 std::vector<SBFullHashResult> full_hashes; 481 std::vector<SBFullHashResult> full_hashes;
435 base::TimeDelta negative_cache_duration; 482 base::TimeDelta negative_cache_duration;
436 if (status.is_success() && response_code == net::HTTP_OK) { 483 if (status.is_success() && response_code == net::HTTP_OK) {
437 // TODO(kcarattini): Add UMA reporting. 484 // TODO(kcarattini): Add UMA reporting.
438 // TODO(kcarattini): Implement backoff and minimum waiting duration 485 ResetGetHashV4Errors();
439 // compliance.
440 std::string data; 486 std::string data;
441 source->GetResponseAsString(&data); 487 source->GetResponseAsString(&data);
442 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) { 488 if (!ParseV4HashResponse(data, &full_hashes, &negative_cache_duration)) {
443 full_hashes.clear(); 489 full_hashes.clear();
444 // TODO(kcarattini): Add UMA reporting. 490 // TODO(kcarattini): Add UMA reporting.
445 } 491 }
446 } else { 492 } else {
447 // TODO(kcarattini): Handle error by setting backoff interval. 493 HandleGetHashV4Error(Time::Now());
448 // TODO(kcarattini): Add UMA reporting. 494 // TODO(kcarattini): Add UMA reporting.
449 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " << 495 DVLOG(1) << "SafeBrowsing GetEncodedFullHashes request for: " <<
450 source->GetURL() << " failed with error: " << status.error() << 496 source->GetURL() << " failed with error: " << status.error() <<
451 " and response code: " << response_code; 497 " and response code: " << response_code;
452 } 498 }
453 499
454 // Invoke the callback with full_hashes, even if there was a parse error or 500 // Invoke the callback with full_hashes, even if there was a parse error or
455 // an error response code (in which case full_hashes will be empty). The 501 // an error response code (in which case full_hashes will be empty). The
456 // caller can't be blocked indefinitely. 502 // caller can't be blocked indefinitely.
457 details.callback.Run(full_hashes, negative_cache_duration); 503 details.callback.Run(full_hashes, negative_cache_duration);
(...skipping 401 matching lines...) Expand 10 before | Expand all | Expand 10 after
859 } 905 }
860 } 906 }
861 907
862 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) { 908 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) {
863 DCHECK(CalledOnValidThread()); 909 DCHECK(CalledOnValidThread());
864 base::TimeDelta next = 910 base::TimeDelta next =
865 GetNextBackOffInterval(&gethash_error_count_, &gethash_back_off_mult_); 911 GetNextBackOffInterval(&gethash_error_count_, &gethash_back_off_mult_);
866 next_gethash_time_ = now + next; 912 next_gethash_time_ = now + next;
867 } 913 }
868 914
915 void SafeBrowsingProtocolManager::HandleGetHashV4Error(const Time& now) {
916 DCHECK(CalledOnValidThread());
917 base::TimeDelta next = GetNextV4BackOffInterval(
918 &gethash_v4_error_count_, &gethash_v4_back_off_mult_);
919 next_gethash_v4_time_ = now + next;
920 }
921
869 void SafeBrowsingProtocolManager::UpdateFinished(bool success) { 922 void SafeBrowsingProtocolManager::UpdateFinished(bool success) {
870 UpdateFinished(success, !success); 923 UpdateFinished(success, !success);
871 } 924 }
872 925
873 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) { 926 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) {
874 DCHECK(CalledOnValidThread()); 927 DCHECK(CalledOnValidThread());
875 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_); 928 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_);
876 update_size_ = 0; 929 update_size_ = 0;
877 bool update_success = success || request_type_ == CHUNK_REQUEST; 930 bool update_success = success || request_type_ == CHUNK_REQUEST;
878 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) { 931 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) {
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after
958 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( 1011 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails(
959 FullHashCallback callback, 1012 FullHashCallback callback,
960 bool is_download) 1013 bool is_download)
961 : callback(callback), is_download(is_download) {} 1014 : callback(callback), is_download(is_download) {}
962 1015
963 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} 1016 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {}
964 1017
965 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} 1018 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {}
966 1019
967 } // namespace safe_browsing 1020 } // namespace safe_browsing
OLDNEW
« no previous file with comments | « chrome/browser/safe_browsing/protocol_manager.h ('k') | chrome/browser/safe_browsing/protocol_manager_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698