OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #include "base/environment.h" | 7 #include "base/environment.h" |
8 #include "base/logging.h" | 8 #include "base/logging.h" |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/metrics/histogram_macros.h" | 10 #include "base/metrics/histogram_macros.h" |
(...skipping 15 matching lines...) Expand all Loading... |
26 #include "net/base/net_errors.h" | 26 #include "net/base/net_errors.h" |
27 #include "net/http/http_response_headers.h" | 27 #include "net/http/http_response_headers.h" |
28 #include "net/http/http_status_code.h" | 28 #include "net/http/http_status_code.h" |
29 #include "net/url_request/url_fetcher.h" | 29 #include "net/url_request/url_fetcher.h" |
30 #include "net/url_request/url_request_context_getter.h" | 30 #include "net/url_request/url_request_context_getter.h" |
31 #include "net/url_request/url_request_status.h" | 31 #include "net/url_request/url_request_status.h" |
32 | 32 |
33 using base::Time; | 33 using base::Time; |
34 using base::TimeDelta; | 34 using base::TimeDelta; |
35 | 35 |
36 namespace { | 36 namespace safe_browsing { |
37 | 37 |
38 // UpdateResult indicates what happened with the primary and/or backup update | 38 // UpdateResult indicates what happened with the primary and/or backup update |
39 // requests. The ordering of the values must stay the same for UMA consistency, | 39 // requests. The ordering of the values must stay the same for UMA consistency, |
40 // and is also ordered in this way to match ProtocolManager::BackupUpdateReason. | 40 // and is also ordered in this way to match ProtocolManager::BackupUpdateReason. |
41 enum UpdateResult { | 41 enum UpdateResult { |
42 UPDATE_RESULT_FAIL, | 42 UPDATE_RESULT_FAIL, |
43 UPDATE_RESULT_SUCCESS, | 43 UPDATE_RESULT_SUCCESS, |
44 UPDATE_RESULT_BACKUP_CONNECT_FAIL, | 44 UPDATE_RESULT_BACKUP_CONNECT_FAIL, |
45 UPDATE_RESULT_BACKUP_CONNECT_SUCCESS, | 45 UPDATE_RESULT_BACKUP_CONNECT_SUCCESS, |
46 UPDATE_RESULT_BACKUP_HTTP_FAIL, | 46 UPDATE_RESULT_BACKUP_HTTP_FAIL, |
(...skipping 18 matching lines...) Expand all Loading... |
65 base::TimeDelta GetNextUpdateIntervalFromFinch() { | 65 base::TimeDelta GetNextUpdateIntervalFromFinch() { |
66 std::string num_str = variations::GetVariationParamValue( | 66 std::string num_str = variations::GetVariationParamValue( |
67 kSBUpdateFrequencyFinchExperiment, kSBUpdateFrequencyFinchParam); | 67 kSBUpdateFrequencyFinchExperiment, kSBUpdateFrequencyFinchParam); |
68 int finch_next_update_interval_minutes = 0; | 68 int finch_next_update_interval_minutes = 0; |
69 if (!base::StringToInt(num_str, &finch_next_update_interval_minutes)) { | 69 if (!base::StringToInt(num_str, &finch_next_update_interval_minutes)) { |
70 finch_next_update_interval_minutes = 0; // Defaults to 0. | 70 finch_next_update_interval_minutes = 0; // Defaults to 0. |
71 } | 71 } |
72 return base::TimeDelta::FromMinutes(finch_next_update_interval_minutes); | 72 return base::TimeDelta::FromMinutes(finch_next_update_interval_minutes); |
73 } | 73 } |
74 | 74 |
75 } // namespace | |
76 | |
77 // Minimum time, in seconds, from start up before we must issue an update query. | 75 // Minimum time, in seconds, from start up before we must issue an update query. |
78 static const int kSbTimerStartIntervalSecMin = 60; | 76 static const int kSbTimerStartIntervalSecMin = 60; |
79 | 77 |
80 // Maximum time, in seconds, from start up before we must issue an update query. | 78 // Maximum time, in seconds, from start up before we must issue an update query. |
81 static const int kSbTimerStartIntervalSecMax = 300; | 79 static const int kSbTimerStartIntervalSecMax = 300; |
82 | 80 |
83 // The maximum time, in seconds, to wait for a response to an update request. | 81 // The maximum time, in seconds, to wait for a response to an update request. |
84 static const int kSbMaxUpdateWaitSec = 30; | 82 static const int kSbMaxUpdateWaitSec = 30; |
85 | 83 |
86 // Maximum back off multiplier. | 84 // Maximum back off multiplier. |
87 static const size_t kSbMaxBackOff = 8; | 85 static const size_t kSbMaxBackOff = 8; |
88 | 86 |
89 const char kUmaHashResponseMetricName[] = "SB2.GetHashResponseOrErrorCode"; | 87 const char kUmaHashResponseMetricName[] = "SB2.GetHashResponseOrErrorCode"; |
90 | 88 |
91 // The default SBProtocolManagerFactory. | 89 // The default SBProtocolManagerFactory. |
92 class SBProtocolManagerFactoryImpl : public SBProtocolManagerFactory { | 90 class SBProtocolManagerFactoryImpl : public SBProtocolManagerFactory { |
93 public: | 91 public: |
94 SBProtocolManagerFactoryImpl() { } | 92 SBProtocolManagerFactoryImpl() {} |
95 ~SBProtocolManagerFactoryImpl() override {} | 93 ~SBProtocolManagerFactoryImpl() override {} |
96 SafeBrowsingProtocolManager* CreateProtocolManager( | 94 SafeBrowsingProtocolManager* CreateProtocolManager( |
97 SafeBrowsingProtocolManagerDelegate* delegate, | 95 SafeBrowsingProtocolManagerDelegate* delegate, |
98 net::URLRequestContextGetter* request_context_getter, | 96 net::URLRequestContextGetter* request_context_getter, |
99 const SafeBrowsingProtocolConfig& config) override { | 97 const SafeBrowsingProtocolConfig& config) override { |
100 return new SafeBrowsingProtocolManager( | 98 return new SafeBrowsingProtocolManager(delegate, request_context_getter, |
101 delegate, request_context_getter, config); | 99 config); |
102 } | 100 } |
| 101 |
103 private: | 102 private: |
104 DISALLOW_COPY_AND_ASSIGN(SBProtocolManagerFactoryImpl); | 103 DISALLOW_COPY_AND_ASSIGN(SBProtocolManagerFactoryImpl); |
105 }; | 104 }; |
106 | 105 |
107 // SafeBrowsingProtocolManager implementation ---------------------------------- | 106 // SafeBrowsingProtocolManager implementation ---------------------------------- |
108 | 107 |
109 // static | 108 // static |
110 SBProtocolManagerFactory* SafeBrowsingProtocolManager::factory_ = NULL; | 109 SBProtocolManagerFactory* SafeBrowsingProtocolManager::factory_ = NULL; |
111 | 110 |
112 // static | 111 // static |
113 SafeBrowsingProtocolManager* SafeBrowsingProtocolManager::Create( | 112 SafeBrowsingProtocolManager* SafeBrowsingProtocolManager::Create( |
114 SafeBrowsingProtocolManagerDelegate* delegate, | 113 SafeBrowsingProtocolManagerDelegate* delegate, |
115 net::URLRequestContextGetter* request_context_getter, | 114 net::URLRequestContextGetter* request_context_getter, |
116 const SafeBrowsingProtocolConfig& config) { | 115 const SafeBrowsingProtocolConfig& config) { |
117 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed. | 116 // TODO(cbentzel): Remove ScopedTracker below once crbug.com/483689 is fixed. |
118 tracked_objects::ScopedTracker tracking_profile( | 117 tracked_objects::ScopedTracker tracking_profile( |
119 FROM_HERE_WITH_EXPLICIT_FUNCTION( | 118 FROM_HERE_WITH_EXPLICIT_FUNCTION( |
120 "483689 SafeBrowsingProtocolManager::Create")); | 119 "483689 SafeBrowsingProtocolManager::Create")); |
121 if (!factory_) | 120 if (!factory_) |
122 factory_ = new SBProtocolManagerFactoryImpl(); | 121 factory_ = new SBProtocolManagerFactoryImpl(); |
123 return factory_->CreateProtocolManager( | 122 return factory_->CreateProtocolManager(delegate, request_context_getter, |
124 delegate, request_context_getter, config); | 123 config); |
125 } | 124 } |
126 | 125 |
127 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager( | 126 SafeBrowsingProtocolManager::SafeBrowsingProtocolManager( |
128 SafeBrowsingProtocolManagerDelegate* delegate, | 127 SafeBrowsingProtocolManagerDelegate* delegate, |
129 net::URLRequestContextGetter* request_context_getter, | 128 net::URLRequestContextGetter* request_context_getter, |
130 const SafeBrowsingProtocolConfig& config) | 129 const SafeBrowsingProtocolConfig& config) |
131 : delegate_(delegate), | 130 : delegate_(delegate), |
132 request_type_(NO_REQUEST), | 131 request_type_(NO_REQUEST), |
133 update_error_count_(0), | 132 update_error_count_(0), |
134 gethash_error_count_(0), | 133 gethash_error_count_(0), |
(...skipping 21 matching lines...) Expand all Loading... |
156 backup_url_prefixes_[BACKUP_UPDATE_REASON_NETWORK] = | 155 backup_url_prefixes_[BACKUP_UPDATE_REASON_NETWORK] = |
157 config.backup_network_error_url_prefix; | 156 config.backup_network_error_url_prefix; |
158 | 157 |
159 // Set the backoff multiplier fuzz to a random value between 0 and 1. | 158 // Set the backoff multiplier fuzz to a random value between 0 and 1. |
160 back_off_fuzz_ = static_cast<float>(base::RandDouble()); | 159 back_off_fuzz_ = static_cast<float>(base::RandDouble()); |
161 if (version_.empty()) | 160 if (version_.empty()) |
162 version_ = SafeBrowsingProtocolManagerHelper::Version(); | 161 version_ = SafeBrowsingProtocolManagerHelper::Version(); |
163 } | 162 } |
164 | 163 |
165 // static | 164 // static |
166 void SafeBrowsingProtocolManager::RecordGetHashResult( | 165 void SafeBrowsingProtocolManager::RecordGetHashResult(bool is_download, |
167 bool is_download, ResultType result_type) { | 166 ResultType result_type) { |
168 if (is_download) { | 167 if (is_download) { |
169 UMA_HISTOGRAM_ENUMERATION("SB2.GetHashResultDownload", result_type, | 168 UMA_HISTOGRAM_ENUMERATION("SB2.GetHashResultDownload", result_type, |
170 GET_HASH_RESULT_MAX); | 169 GET_HASH_RESULT_MAX); |
171 } else { | 170 } else { |
172 UMA_HISTOGRAM_ENUMERATION("SB2.GetHashResult", result_type, | 171 UMA_HISTOGRAM_ENUMERATION("SB2.GetHashResult", result_type, |
173 GET_HASH_RESULT_MAX); | 172 GET_HASH_RESULT_MAX); |
174 } | 173 } |
175 } | 174 } |
176 | 175 |
177 void SafeBrowsingProtocolManager::RecordHttpResponseOrErrorCode( | 176 void SafeBrowsingProtocolManager::RecordHttpResponseOrErrorCode( |
178 const char* metric_name, const net::URLRequestStatus& status, | 177 const char* metric_name, |
| 178 const net::URLRequestStatus& status, |
179 int response_code) { | 179 int response_code) { |
180 UMA_HISTOGRAM_SPARSE_SLOWLY( | 180 UMA_HISTOGRAM_SPARSE_SLOWLY( |
181 metric_name, status.is_success() ? response_code : status.error()); | 181 metric_name, status.is_success() ? response_code : status.error()); |
182 } | 182 } |
183 | 183 |
184 bool SafeBrowsingProtocolManager::IsUpdateScheduled() const { | 184 bool SafeBrowsingProtocolManager::IsUpdateScheduled() const { |
185 return update_timer_.IsRunning(); | 185 return update_timer_.IsRunning(); |
186 } | 186 } |
187 | 187 |
188 SafeBrowsingProtocolManager::~SafeBrowsingProtocolManager() { | 188 SafeBrowsingProtocolManager::~SafeBrowsingProtocolManager() { |
(...skipping 17 matching lines...) Expand all Loading... |
206 // required to return empty results (i.e. treat the page as safe). | 206 // required to return empty results (i.e. treat the page as safe). |
207 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { | 207 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { |
208 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR); | 208 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR); |
209 std::vector<SBFullHashResult> full_hashes; | 209 std::vector<SBFullHashResult> full_hashes; |
210 callback.Run(full_hashes, base::TimeDelta()); | 210 callback.Run(full_hashes, base::TimeDelta()); |
211 return; | 211 return; |
212 } | 212 } |
213 GURL gethash_url = GetHashUrl(is_extended_reporting); | 213 GURL gethash_url = GetHashUrl(is_extended_reporting); |
214 net::URLFetcher* fetcher = | 214 net::URLFetcher* fetcher = |
215 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, | 215 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, |
216 net::URLFetcher::POST, this).release(); | 216 net::URLFetcher::POST, this) |
| 217 .release(); |
217 hash_requests_[fetcher] = FullHashDetails(callback, is_download); | 218 hash_requests_[fetcher] = FullHashDetails(callback, is_download); |
218 | 219 |
219 const std::string get_hash = safe_browsing::FormatGetHash(prefixes); | 220 const std::string get_hash = FormatGetHash(prefixes); |
220 | 221 |
221 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 222 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
222 fetcher->SetRequestContext(request_context_getter_.get()); | 223 fetcher->SetRequestContext(request_context_getter_.get()); |
223 fetcher->SetUploadData("text/plain", get_hash); | 224 fetcher->SetUploadData("text/plain", get_hash); |
224 fetcher->Start(); | 225 fetcher->Start(); |
225 } | 226 } |
226 | 227 |
227 void SafeBrowsingProtocolManager::GetNextUpdate() { | 228 void SafeBrowsingProtocolManager::GetNextUpdate() { |
228 DCHECK(CalledOnValidThread()); | 229 DCHECK(CalledOnValidThread()); |
229 if (request_.get() || request_type_ != NO_REQUEST) | 230 if (request_.get() || request_type_ != NO_REQUEST) |
(...skipping 13 matching lines...) Expand all Loading... |
243 // do will report all the chunks we have. If that chunk is still | 244 // do will report all the chunks we have. If that chunk is still |
244 // required, the SafeBrowsing servers will tell us to get it again. | 245 // required, the SafeBrowsing servers will tell us to get it again. |
245 void SafeBrowsingProtocolManager::OnURLFetchComplete( | 246 void SafeBrowsingProtocolManager::OnURLFetchComplete( |
246 const net::URLFetcher* source) { | 247 const net::URLFetcher* source) { |
247 DCHECK(CalledOnValidThread()); | 248 DCHECK(CalledOnValidThread()); |
248 scoped_ptr<const net::URLFetcher> fetcher; | 249 scoped_ptr<const net::URLFetcher> fetcher; |
249 | 250 |
250 HashRequests::iterator it = hash_requests_.find(source); | 251 HashRequests::iterator it = hash_requests_.find(source); |
251 int response_code = source->GetResponseCode(); | 252 int response_code = source->GetResponseCode(); |
252 net::URLRequestStatus status = source->GetStatus(); | 253 net::URLRequestStatus status = source->GetStatus(); |
253 RecordHttpResponseOrErrorCode( | 254 RecordHttpResponseOrErrorCode(kUmaHashResponseMetricName, status, |
254 kUmaHashResponseMetricName, status, response_code); | 255 response_code); |
255 if (it != hash_requests_.end()) { | 256 if (it != hash_requests_.end()) { |
256 // GetHash response. | 257 // GetHash response. |
257 fetcher.reset(it->first); | 258 fetcher.reset(it->first); |
258 const FullHashDetails& details = it->second; | 259 const FullHashDetails& details = it->second; |
259 std::vector<SBFullHashResult> full_hashes; | 260 std::vector<SBFullHashResult> full_hashes; |
260 base::TimeDelta cache_lifetime; | 261 base::TimeDelta cache_lifetime; |
261 if (status.is_success() && | 262 if (status.is_success() && (response_code == net::HTTP_OK || |
262 (response_code == net::HTTP_OK || | 263 response_code == net::HTTP_NO_CONTENT)) { |
263 response_code == net::HTTP_NO_CONTENT)) { | |
264 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, | 264 // For tracking our GetHash false positive (net::HTTP_NO_CONTENT) rate, |
265 // compared to real (net::HTTP_OK) responses. | 265 // compared to real (net::HTTP_OK) responses. |
266 if (response_code == net::HTTP_OK) | 266 if (response_code == net::HTTP_OK) |
267 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); | 267 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); |
268 else | 268 else |
269 RecordGetHashResult(details.is_download, GET_HASH_STATUS_204); | 269 RecordGetHashResult(details.is_download, GET_HASH_STATUS_204); |
270 | 270 |
271 gethash_error_count_ = 0; | 271 gethash_error_count_ = 0; |
272 gethash_back_off_mult_ = 1; | 272 gethash_back_off_mult_ = 1; |
273 std::string data; | 273 std::string data; |
274 source->GetResponseAsString(&data); | 274 source->GetResponseAsString(&data); |
275 if (!safe_browsing::ParseGetHash( | 275 if (!ParseGetHash(data.data(), data.length(), &cache_lifetime, |
276 data.data(), data.length(), &cache_lifetime, &full_hashes)) { | 276 &full_hashes)) { |
277 full_hashes.clear(); | 277 full_hashes.clear(); |
278 RecordGetHashResult(details.is_download, GET_HASH_PARSE_ERROR); | 278 RecordGetHashResult(details.is_download, GET_HASH_PARSE_ERROR); |
279 // TODO(cbentzel): Should cache_lifetime be set to 0 here? (See | 279 // TODO(cbentzel): Should cache_lifetime be set to 0 here? (See |
280 // http://crbug.com/360232.) | 280 // http://crbug.com/360232.) |
281 } | 281 } |
282 } else { | 282 } else { |
283 HandleGetHashError(Time::Now()); | 283 HandleGetHashError(Time::Now()); |
284 if (status.status() == net::URLRequestStatus::FAILED) { | 284 if (status.status() == net::URLRequestStatus::FAILED) { |
285 RecordGetHashResult(details.is_download, GET_HASH_NETWORK_ERROR); | 285 RecordGetHashResult(details.is_download, GET_HASH_NETWORK_ERROR); |
286 DVLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() | 286 DVLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() |
(...skipping 27 matching lines...) Expand all Loading... |
314 timeout_timer_.Stop(); | 314 timeout_timer_.Stop(); |
315 } | 315 } |
316 | 316 |
317 if (status.is_success() && response_code == net::HTTP_OK) { | 317 if (status.is_success() && response_code == net::HTTP_OK) { |
318 // We have data from the SafeBrowsing service. | 318 // We have data from the SafeBrowsing service. |
319 std::string data; | 319 std::string data; |
320 source->GetResponseAsString(&data); | 320 source->GetResponseAsString(&data); |
321 | 321 |
322 // TODO(shess): Cleanup the flow of this code so that |parsed_ok| can be | 322 // TODO(shess): Cleanup the flow of this code so that |parsed_ok| can be |
323 // removed or omitted. | 323 // removed or omitted. |
324 const bool parsed_ok = HandleServiceResponse( | 324 const bool parsed_ok = |
325 source->GetURL(), data.data(), data.length()); | 325 HandleServiceResponse(source->GetURL(), data.data(), data.length()); |
326 if (!parsed_ok) { | 326 if (!parsed_ok) { |
327 DVLOG(1) << "SafeBrowsing request for: " << source->GetURL() | 327 DVLOG(1) << "SafeBrowsing request for: " << source->GetURL() |
328 << " failed parse."; | 328 << " failed parse."; |
329 chunk_request_urls_.clear(); | 329 chunk_request_urls_.clear(); |
330 if (request_type_ == UPDATE_REQUEST && | 330 if (request_type_ == UPDATE_REQUEST && |
331 IssueBackupUpdateRequest(BACKUP_UPDATE_REASON_HTTP)) { | 331 IssueBackupUpdateRequest(BACKUP_UPDATE_REASON_HTTP)) { |
332 return; | 332 return; |
333 } | 333 } |
334 UpdateFinished(false); | 334 UpdateFinished(false); |
335 } | 335 } |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
389 } | 389 } |
390 } | 390 } |
391 UpdateFinished(false); | 391 UpdateFinished(false); |
392 } | 392 } |
393 } | 393 } |
394 | 394 |
395 // Get the next chunk if available. | 395 // Get the next chunk if available. |
396 IssueChunkRequest(); | 396 IssueChunkRequest(); |
397 } | 397 } |
398 | 398 |
399 bool SafeBrowsingProtocolManager::HandleServiceResponse( | 399 bool SafeBrowsingProtocolManager::HandleServiceResponse(const GURL& url, |
400 const GURL& url, const char* data, size_t length) { | 400 const char* data, |
| 401 size_t length) { |
401 DCHECK(CalledOnValidThread()); | 402 DCHECK(CalledOnValidThread()); |
402 | 403 |
403 switch (request_type_) { | 404 switch (request_type_) { |
404 case UPDATE_REQUEST: | 405 case UPDATE_REQUEST: |
405 case BACKUP_UPDATE_REQUEST: { | 406 case BACKUP_UPDATE_REQUEST: { |
406 size_t next_update_sec = 0; | 407 size_t next_update_sec = 0; |
407 bool reset = false; | 408 bool reset = false; |
408 scoped_ptr<std::vector<SBChunkDelete> > chunk_deletes( | 409 scoped_ptr<std::vector<SBChunkDelete>> chunk_deletes( |
409 new std::vector<SBChunkDelete>); | 410 new std::vector<SBChunkDelete>); |
410 std::vector<ChunkUrl> chunk_urls; | 411 std::vector<ChunkUrl> chunk_urls; |
411 if (!safe_browsing::ParseUpdate(data, length, &next_update_sec, &reset, | 412 if (!ParseUpdate(data, length, &next_update_sec, &reset, |
412 chunk_deletes.get(), &chunk_urls)) { | 413 chunk_deletes.get(), &chunk_urls)) { |
413 return false; | 414 return false; |
414 } | 415 } |
415 | 416 |
416 // New time for the next update. | 417 // New time for the next update. |
417 base::TimeDelta finch_next_update_interval = | 418 base::TimeDelta finch_next_update_interval = |
418 GetNextUpdateIntervalFromFinch(); | 419 GetNextUpdateIntervalFromFinch(); |
419 if (finch_next_update_interval > base::TimeDelta()) { | 420 if (finch_next_update_interval > base::TimeDelta()) { |
420 next_update_interval_ = finch_next_update_interval; | 421 next_update_interval_ = finch_next_update_interval; |
421 } else { | 422 } else { |
422 base::TimeDelta next_update_interval = | 423 base::TimeDelta next_update_interval = |
423 base::TimeDelta::FromSeconds(next_update_sec); | 424 base::TimeDelta::FromSeconds(next_update_sec); |
424 if (next_update_interval > base::TimeDelta()) { | 425 if (next_update_interval > base::TimeDelta()) { |
425 next_update_interval_ = next_update_interval; | 426 next_update_interval_ = next_update_interval; |
426 } | 427 } |
427 } | 428 } |
428 last_update_ = Time::Now(); | 429 last_update_ = Time::Now(); |
429 | 430 |
430 // New chunks to download. | 431 // New chunks to download. |
(...skipping 14 matching lines...) Expand all Loading... |
445 if (!chunk_deletes->empty()) | 446 if (!chunk_deletes->empty()) |
446 delegate_->DeleteChunks(chunk_deletes.Pass()); | 447 delegate_->DeleteChunks(chunk_deletes.Pass()); |
447 | 448 |
448 break; | 449 break; |
449 } | 450 } |
450 case CHUNK_REQUEST: { | 451 case CHUNK_REQUEST: { |
451 UMA_HISTOGRAM_TIMES("SB2.ChunkRequest", | 452 UMA_HISTOGRAM_TIMES("SB2.ChunkRequest", |
452 base::Time::Now() - chunk_request_start_); | 453 base::Time::Now() - chunk_request_start_); |
453 | 454 |
454 const ChunkUrl chunk_url = chunk_request_urls_.front(); | 455 const ChunkUrl chunk_url = chunk_request_urls_.front(); |
455 scoped_ptr<ScopedVector<SBChunkData> > | 456 scoped_ptr<ScopedVector<SBChunkData>> chunks( |
456 chunks(new ScopedVector<SBChunkData>); | 457 new ScopedVector<SBChunkData>); |
457 UMA_HISTOGRAM_COUNTS("SB2.ChunkSize", length); | 458 UMA_HISTOGRAM_COUNTS("SB2.ChunkSize", length); |
458 update_size_ += length; | 459 update_size_ += length; |
459 if (!safe_browsing::ParseChunk(data, length, chunks.get())) | 460 if (!ParseChunk(data, length, chunks.get())) |
460 return false; | 461 return false; |
461 | 462 |
462 // Chunks to add to storage. Pass ownership of |chunks|. | 463 // Chunks to add to storage. Pass ownership of |chunks|. |
463 if (!chunks->empty()) { | 464 if (!chunks->empty()) { |
464 chunk_pending_to_write_ = true; | 465 chunk_pending_to_write_ = true; |
465 delegate_->AddChunks( | 466 delegate_->AddChunks( |
466 chunk_url.list_name, chunks.Pass(), | 467 chunk_url.list_name, chunks.Pass(), |
467 base::Bind(&SafeBrowsingProtocolManager::OnAddChunksComplete, | 468 base::Bind(&SafeBrowsingProtocolManager::OnAddChunksComplete, |
468 base::Unretained(this))); | 469 base::Unretained(this))); |
469 } | 470 } |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
525 next = GetNextBackOffInterval(&update_error_count_, &update_back_off_mult_); | 526 next = GetNextBackOffInterval(&update_error_count_, &update_back_off_mult_); |
526 } else { | 527 } else { |
527 // Successful response means error reset. | 528 // Successful response means error reset. |
528 update_error_count_ = 0; | 529 update_error_count_ = 0; |
529 update_back_off_mult_ = 1; | 530 update_back_off_mult_ = 1; |
530 } | 531 } |
531 return next; | 532 return next; |
532 } | 533 } |
533 | 534 |
534 base::TimeDelta SafeBrowsingProtocolManager::GetNextBackOffInterval( | 535 base::TimeDelta SafeBrowsingProtocolManager::GetNextBackOffInterval( |
535 size_t* error_count, size_t* multiplier) const { | 536 size_t* error_count, |
| 537 size_t* multiplier) const { |
536 DCHECK(CalledOnValidThread()); | 538 DCHECK(CalledOnValidThread()); |
537 DCHECK(multiplier && error_count); | 539 DCHECK(multiplier && error_count); |
538 (*error_count)++; | 540 (*error_count)++; |
539 if (*error_count > 1 && *error_count < 6) { | 541 if (*error_count > 1 && *error_count < 6) { |
540 base::TimeDelta next = base::TimeDelta::FromMinutes( | 542 base::TimeDelta next = |
541 *multiplier * (1 + back_off_fuzz_) * 30); | 543 base::TimeDelta::FromMinutes(*multiplier * (1 + back_off_fuzz_) * 30); |
542 *multiplier *= 2; | 544 *multiplier *= 2; |
543 if (*multiplier > kSbMaxBackOff) | 545 if (*multiplier > kSbMaxBackOff) |
544 *multiplier = kSbMaxBackOff; | 546 *multiplier = kSbMaxBackOff; |
545 return next; | 547 return next; |
546 } | 548 } |
547 if (*error_count >= 6) | 549 if (*error_count >= 6) |
548 return base::TimeDelta::FromHours(8); | 550 return base::TimeDelta::FromHours(8); |
549 return base::TimeDelta::FromMinutes(1); | 551 return base::TimeDelta::FromMinutes(1); |
550 } | 552 } |
551 | 553 |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
624 if (database_error) { | 626 if (database_error) { |
625 // The update was not successful, but don't back off. | 627 // The update was not successful, but don't back off. |
626 UpdateFinished(false, false); | 628 UpdateFinished(false, false); |
627 return; | 629 return; |
628 } | 630 } |
629 | 631 |
630 // Format our stored chunks: | 632 // Format our stored chunks: |
631 bool found_malware = false; | 633 bool found_malware = false; |
632 bool found_phishing = false; | 634 bool found_phishing = false; |
633 for (size_t i = 0; i < lists.size(); ++i) { | 635 for (size_t i = 0; i < lists.size(); ++i) { |
634 update_list_data_.append(safe_browsing::FormatList(lists[i])); | 636 update_list_data_.append(FormatList(lists[i])); |
635 if (lists[i].name == safe_browsing::kPhishingList) | 637 if (lists[i].name == kPhishingList) |
636 found_phishing = true; | 638 found_phishing = true; |
637 | 639 |
638 if (lists[i].name == safe_browsing::kMalwareList) | 640 if (lists[i].name == kMalwareList) |
639 found_malware = true; | 641 found_malware = true; |
640 } | 642 } |
641 | 643 |
642 // If we have an empty database, let the server know we want data for these | 644 // If we have an empty database, let the server know we want data for these |
643 // lists. | 645 // lists. |
644 // TODO(shess): These cases never happen because the database fills in the | 646 // TODO(shess): These cases never happen because the database fills in the |
645 // lists in GetChunks(). Refactor the unit tests so that this code can be | 647 // lists in GetChunks(). Refactor the unit tests so that this code can be |
646 // removed. | 648 // removed. |
647 if (!found_phishing) { | 649 if (!found_phishing) { |
648 update_list_data_.append(safe_browsing::FormatList( | 650 update_list_data_.append(FormatList(SBListChunkRanges(kPhishingList))); |
649 SBListChunkRanges(safe_browsing::kPhishingList))); | |
650 } | 651 } |
651 if (!found_malware) { | 652 if (!found_malware) { |
652 update_list_data_.append(safe_browsing::FormatList( | 653 update_list_data_.append(FormatList(SBListChunkRanges(kMalwareList))); |
653 SBListChunkRanges(safe_browsing::kMalwareList))); | |
654 } | 654 } |
655 | 655 |
656 // Large requests are (probably) a sign of database corruption. | 656 // Large requests are (probably) a sign of database corruption. |
657 // Record stats to inform decisions about whether to automate | 657 // Record stats to inform decisions about whether to automate |
658 // deletion of such databases. http://crbug.com/120219 | 658 // deletion of such databases. http://crbug.com/120219 |
659 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size()); | 659 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size()); |
660 | 660 |
661 GURL update_url = UpdateUrl(is_extended_reporting); | 661 GURL update_url = UpdateUrl(is_extended_reporting); |
662 request_ = net::URLFetcher::Create(url_fetcher_id_++, update_url, | 662 request_ = net::URLFetcher::Create(url_fetcher_id_++, update_url, |
663 net::URLFetcher::POST, this); | 663 net::URLFetcher::POST, this); |
(...skipping 29 matching lines...) Expand all Loading... |
693 if (chunk_request_urls_.empty()) { | 693 if (chunk_request_urls_.empty()) { |
694 UMA_HISTOGRAM_LONG_TIMES("SB2.Update", Time::Now() - last_update_); | 694 UMA_HISTOGRAM_LONG_TIMES("SB2.Update", Time::Now() - last_update_); |
695 UpdateFinished(true); | 695 UpdateFinished(true); |
696 } else { | 696 } else { |
697 IssueChunkRequest(); | 697 IssueChunkRequest(); |
698 } | 698 } |
699 } | 699 } |
700 | 700 |
701 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) { | 701 void SafeBrowsingProtocolManager::HandleGetHashError(const Time& now) { |
702 DCHECK(CalledOnValidThread()); | 702 DCHECK(CalledOnValidThread()); |
703 base::TimeDelta next = GetNextBackOffInterval( | 703 base::TimeDelta next = |
704 &gethash_error_count_, &gethash_back_off_mult_); | 704 GetNextBackOffInterval(&gethash_error_count_, &gethash_back_off_mult_); |
705 next_gethash_time_ = now + next; | 705 next_gethash_time_ = now + next; |
706 } | 706 } |
707 | 707 |
708 void SafeBrowsingProtocolManager::UpdateFinished(bool success) { | 708 void SafeBrowsingProtocolManager::UpdateFinished(bool success) { |
709 UpdateFinished(success, !success); | 709 UpdateFinished(success, !success); |
710 } | 710 } |
711 | 711 |
712 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) { | 712 void SafeBrowsingProtocolManager::UpdateFinished(bool success, bool back_off) { |
713 DCHECK(CalledOnValidThread()); | 713 DCHECK(CalledOnValidThread()); |
714 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_); | 714 UMA_HISTOGRAM_COUNTS("SB2.UpdateSize", update_size_); |
715 update_size_ = 0; | 715 update_size_ = 0; |
716 bool update_success = success || request_type_ == CHUNK_REQUEST; | 716 bool update_success = success || request_type_ == CHUNK_REQUEST; |
717 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) { | 717 if (backup_update_reason_ == BACKUP_UPDATE_REASON_MAX) { |
718 RecordUpdateResult( | 718 RecordUpdateResult(update_success ? UPDATE_RESULT_SUCCESS |
719 update_success ? UPDATE_RESULT_SUCCESS : UPDATE_RESULT_FAIL); | 719 : UPDATE_RESULT_FAIL); |
720 } else { | 720 } else { |
721 UpdateResult update_result = static_cast<UpdateResult>( | 721 UpdateResult update_result = static_cast<UpdateResult>( |
722 UPDATE_RESULT_BACKUP_START + | 722 UPDATE_RESULT_BACKUP_START + |
723 (static_cast<int>(backup_update_reason_) * 2) + | 723 (static_cast<int>(backup_update_reason_) * 2) + update_success); |
724 update_success); | |
725 RecordUpdateResult(update_result); | 724 RecordUpdateResult(update_result); |
726 } | 725 } |
727 backup_update_reason_ = BACKUP_UPDATE_REASON_MAX; | 726 backup_update_reason_ = BACKUP_UPDATE_REASON_MAX; |
728 request_type_ = NO_REQUEST; | 727 request_type_ = NO_REQUEST; |
729 update_list_data_.clear(); | 728 update_list_data_.clear(); |
730 delegate_->UpdateFinished(success); | 729 delegate_->UpdateFinished(success); |
731 ScheduleNextUpdate(back_off); | 730 ScheduleNextUpdate(back_off); |
732 } | 731 } |
733 | 732 |
734 GURL SafeBrowsingProtocolManager::UpdateUrl(bool is_extended_reporting) const { | 733 GURL SafeBrowsingProtocolManager::UpdateUrl(bool is_extended_reporting) const { |
(...skipping 17 matching lines...) Expand all Loading... |
752 GURL SafeBrowsingProtocolManager::GetHashUrl(bool is_extended_reporting) const { | 751 GURL SafeBrowsingProtocolManager::GetHashUrl(bool is_extended_reporting) const { |
753 std::string url = SafeBrowsingProtocolManagerHelper::ComposeUrl( | 752 std::string url = SafeBrowsingProtocolManagerHelper::ComposeUrl( |
754 url_prefix_, "gethash", client_name_, version_, additional_query_, | 753 url_prefix_, "gethash", client_name_, version_, additional_query_, |
755 is_extended_reporting); | 754 is_extended_reporting); |
756 return GURL(url); | 755 return GURL(url); |
757 } | 756 } |
758 | 757 |
759 GURL SafeBrowsingProtocolManager::NextChunkUrl(const std::string& url) const { | 758 GURL SafeBrowsingProtocolManager::NextChunkUrl(const std::string& url) const { |
760 DCHECK(CalledOnValidThread()); | 759 DCHECK(CalledOnValidThread()); |
761 std::string next_url; | 760 std::string next_url; |
762 if (!base::StartsWith(url, "http://", | 761 if (!base::StartsWith(url, "http://", base::CompareCase::INSENSITIVE_ASCII) && |
763 base::CompareCase::INSENSITIVE_ASCII) && | |
764 !base::StartsWith(url, "https://", | 762 !base::StartsWith(url, "https://", |
765 base::CompareCase::INSENSITIVE_ASCII)) { | 763 base::CompareCase::INSENSITIVE_ASCII)) { |
766 // Use https if we updated via https, otherwise http (useful for testing). | 764 // Use https if we updated via https, otherwise http (useful for testing). |
767 if (base::StartsWith(url_prefix_, "https://", | 765 if (base::StartsWith(url_prefix_, "https://", |
768 base::CompareCase::INSENSITIVE_ASCII)) | 766 base::CompareCase::INSENSITIVE_ASCII)) |
769 next_url.append("https://"); | 767 next_url.append("https://"); |
770 else | 768 else |
771 next_url.append("http://"); | 769 next_url.append("http://"); |
772 next_url.append(url); | 770 next_url.append(url); |
773 } else { | 771 } else { |
774 next_url = url; | 772 next_url = url; |
775 } | 773 } |
776 if (!additional_query_.empty()) { | 774 if (!additional_query_.empty()) { |
777 if (next_url.find("?") != std::string::npos) { | 775 if (next_url.find("?") != std::string::npos) { |
778 next_url.append("&"); | 776 next_url.append("&"); |
779 } else { | 777 } else { |
780 next_url.append("?"); | 778 next_url.append("?"); |
781 } | 779 } |
782 next_url.append(additional_query_); | 780 next_url.append(additional_query_); |
783 } | 781 } |
784 return GURL(next_url); | 782 return GURL(next_url); |
785 } | 783 } |
786 | 784 |
787 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails() | 785 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails() |
788 : callback(), | 786 : callback(), is_download(false) {} |
789 is_download(false) { | |
790 } | |
791 | 787 |
792 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( | 788 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( |
793 FullHashCallback callback, bool is_download) | 789 FullHashCallback callback, |
794 : callback(callback), | 790 bool is_download) |
795 is_download(is_download) { | 791 : callback(callback), is_download(is_download) {} |
796 } | |
797 | 792 |
798 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() { | 793 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} |
799 } | |
800 | 794 |
801 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() { | 795 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} |
802 } | 796 |
| 797 } // namespace safe_browsing |
OLD | NEW |