| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
| 6 | 6 |
| 7 #ifndef NDEBUG | 7 #ifndef NDEBUG |
| 8 #include "base/base64.h" | 8 #include "base/base64.h" |
| 9 #endif | 9 #endif |
| 10 #include "base/environment.h" | 10 #include "base/environment.h" |
| (...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 127 safebrowsing_reports_.end()); | 127 safebrowsing_reports_.end()); |
| 128 safebrowsing_reports_.clear(); | 128 safebrowsing_reports_.clear(); |
| 129 } | 129 } |
| 130 | 130 |
| 131 // Public API used by the SafeBrowsingService ---------------------------------- | 131 // Public API used by the SafeBrowsingService ---------------------------------- |
| 132 | 132 |
| 133 // We can only have one update or chunk request outstanding, but there may be | 133 // We can only have one update or chunk request outstanding, but there may be |
| 134 // multiple GetHash requests pending since we don't want to serialize them and | 134 // multiple GetHash requests pending since we don't want to serialize them and |
| 135 // slow down the user. | 135 // slow down the user. |
| 136 void SafeBrowsingProtocolManager::GetFullHash( | 136 void SafeBrowsingProtocolManager::GetFullHash( |
| 137 SafeBrowsingService::SafeBrowsingCheck* check, | 137 const std::vector<SBPrefix>& prefixes, |
| 138 const std::vector<SBPrefix>& prefixes) { | 138 FullHashCallback callback, |
| 139 bool is_download) { |
| 139 // If we are in GetHash backoff, we need to check if we're past the next | 140 // If we are in GetHash backoff, we need to check if we're past the next |
| 140 // allowed time. If we are, we can proceed with the request. If not, we are | 141 // allowed time. If we are, we can proceed with the request. If not, we are |
| 141 // required to return empty results (i.e. treat the page as safe). | 142 // required to return empty results (i.e. treat the page as safe). |
| 142 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { | 143 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { |
| 143 std::vector<SBFullHashResult> full_hashes; | 144 std::vector<SBFullHashResult> full_hashes; |
| 144 sb_service_->HandleGetHashResults(check, full_hashes, false); | 145 callback.Run(full_hashes, false); |
| 145 return; | 146 return; |
| 146 } | 147 } |
| 147 GURL gethash_url = GetHashUrl(); | 148 GURL gethash_url = GetHashUrl(); |
| 148 net::URLFetcher* fetcher = net::URLFetcher::Create( | 149 net::URLFetcher* fetcher = net::URLFetcher::Create( |
| 149 gethash_url, net::URLFetcher::POST, this); | 150 gethash_url, net::URLFetcher::POST, this); |
| 150 hash_requests_[fetcher] = check; | 151 hash_requests_[fetcher] = FullHashDetails(callback, is_download); |
| 151 | 152 |
| 152 std::string get_hash; | 153 std::string get_hash; |
| 153 SafeBrowsingProtocolParser parser; | 154 SafeBrowsingProtocolParser parser; |
| 154 parser.FormatGetHash(prefixes, &get_hash); | 155 parser.FormatGetHash(prefixes, &get_hash); |
| 155 | 156 |
| 156 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); | 157 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
| 157 fetcher->SetRequestContext(request_context_getter_); | 158 fetcher->SetRequestContext(request_context_getter_); |
| 158 fetcher->SetUploadData("text/plain", get_hash); | 159 fetcher->SetUploadData("text/plain", get_hash); |
| 159 fetcher->Start(); | 160 fetcher->Start(); |
| 160 } | 161 } |
| (...skipping 27 matching lines...) Expand all Loading... |
| 188 const net::URLFetcher* report = *sit; | 189 const net::URLFetcher* report = *sit; |
| 189 safebrowsing_reports_.erase(sit); | 190 safebrowsing_reports_.erase(sit); |
| 190 delete report; | 191 delete report; |
| 191 return; | 192 return; |
| 192 } | 193 } |
| 193 | 194 |
| 194 HashRequests::iterator it = hash_requests_.find(source); | 195 HashRequests::iterator it = hash_requests_.find(source); |
| 195 if (it != hash_requests_.end()) { | 196 if (it != hash_requests_.end()) { |
| 196 // GetHash response. | 197 // GetHash response. |
| 197 fetcher.reset(it->first); | 198 fetcher.reset(it->first); |
| 198 SafeBrowsingService::SafeBrowsingCheck* check = it->second; | 199 const FullHashDetails& details = it->second; |
| 199 std::vector<SBFullHashResult> full_hashes; | 200 std::vector<SBFullHashResult> full_hashes; |
| 200 bool can_cache = false; | 201 bool can_cache = false; |
| 201 if (source->GetStatus().is_success() && | 202 if (source->GetStatus().is_success() && |
| 202 (source->GetResponseCode() == 200 || | 203 (source->GetResponseCode() == 200 || |
| 203 source->GetResponseCode() == 204)) { | 204 source->GetResponseCode() == 204)) { |
| 204 // For tracking our GetHash false positive (204) rate, compared to real | 205 // For tracking our GetHash false positive (204) rate, compared to real |
| 205 // (200) responses. | 206 // (200) responses. |
| 206 if (source->GetResponseCode() == 200) | 207 if (source->GetResponseCode() == 200) |
| 207 RecordGetHashResult(check->is_download, GET_HASH_STATUS_200); | 208 RecordGetHashResult(details.is_download, GET_HASH_STATUS_200); |
| 208 else | 209 else |
| 209 RecordGetHashResult(check->is_download, GET_HASH_STATUS_204); | 210 RecordGetHashResult(details.is_download, GET_HASH_STATUS_204); |
| 210 can_cache = true; | 211 can_cache = true; |
| 211 gethash_error_count_ = 0; | 212 gethash_error_count_ = 0; |
| 212 gethash_back_off_mult_ = 1; | 213 gethash_back_off_mult_ = 1; |
| 213 SafeBrowsingProtocolParser parser; | 214 SafeBrowsingProtocolParser parser; |
| 214 std::string data; | 215 std::string data; |
| 215 source->GetResponseAsString(&data); | 216 source->GetResponseAsString(&data); |
| 216 parsed_ok = parser.ParseGetHash( | 217 parsed_ok = parser.ParseGetHash( |
| 217 data.data(), | 218 data.data(), |
| 218 static_cast<int>(data.length()), | 219 static_cast<int>(data.length()), |
| 219 &full_hashes); | 220 &full_hashes); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 230 << " failed with error: " << source->GetStatus().error(); | 231 << " failed with error: " << source->GetStatus().error(); |
| 231 } else { | 232 } else { |
| 232 VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() | 233 VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetURL() |
| 233 << " failed with error: " << source->GetResponseCode(); | 234 << " failed with error: " << source->GetResponseCode(); |
| 234 } | 235 } |
| 235 } | 236 } |
| 236 | 237 |
| 237 // Call back the SafeBrowsingService with full_hashes, even if there was a | 238 // Call back the SafeBrowsingService with full_hashes, even if there was a |
| 238 // parse error or an error response code (in which case full_hashes will be | 239 // parse error or an error response code (in which case full_hashes will be |
| 239 // empty). We can't block the user regardless of the error status. | 240 // empty). We can't block the user regardless of the error status. |
| 240 sb_service_->HandleGetHashResults(check, full_hashes, can_cache); | 241 details.callback.Run(full_hashes, can_cache); |
| 241 | 242 |
| 242 hash_requests_.erase(it); | 243 hash_requests_.erase(it); |
| 243 } else { | 244 } else { |
| 244 // Update or chunk response. | 245 // Update or chunk response. |
| 245 fetcher.reset(request_.release()); | 246 fetcher.reset(request_.release()); |
| 246 | 247 |
| 247 if (request_type_ == UPDATE_REQUEST) { | 248 if (request_type_ == UPDATE_REQUEST) { |
| 248 if (!fetcher.get()) { | 249 if (!fetcher.get()) { |
| 249 // We've timed out waiting for an update response, so we've cancelled | 250 // We've timed out waiting for an update response, so we've cancelled |
| 250 // the update request and scheduled a new one. Ignore this response. | 251 // the update request and scheduled a new one. Ignore this response. |
| (...skipping 485 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 736 if (!additional_query_.empty()) { | 737 if (!additional_query_.empty()) { |
| 737 if (next_url.find("?") != std::string::npos) { | 738 if (next_url.find("?") != std::string::npos) { |
| 738 next_url.append("&"); | 739 next_url.append("&"); |
| 739 } else { | 740 } else { |
| 740 next_url.append("?"); | 741 next_url.append("?"); |
| 741 } | 742 } |
| 742 next_url.append(additional_query_); | 743 next_url.append(additional_query_); |
| 743 } | 744 } |
| 744 return GURL(next_url); | 745 return GURL(next_url); |
| 745 } | 746 } |
| 747 |
| 748 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails() |
| 749 : callback(), |
| 750 is_download(false) { |
| 751 } |
| 752 |
| 753 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( |
| 754 FullHashCallback callback, bool is_download) |
| 755 : callback(callback), |
| 756 is_download(is_download) { |
| 757 } |
| 758 |
| 759 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() { |
| 760 } |
| OLD | NEW |