OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #ifndef NDEBUG | 7 #ifndef NDEBUG |
8 #include "base/base64.h" | 8 #include "base/base64.h" |
9 #endif | 9 #endif |
10 #include "base/environment.h" | 10 #include "base/environment.h" |
11 #include "base/logging.h" | 11 #include "base/logging.h" |
12 #include "base/metrics/histogram.h" | 12 #include "base/metrics/histogram.h" |
13 #include "base/rand_util.h" | 13 #include "base/rand_util.h" |
14 #include "base/stl_util.h" | 14 #include "base/stl_util.h" |
15 #include "base/string_util.h" | 15 #include "base/string_util.h" |
16 #include "base/stringprintf.h" | 16 #include "base/stringprintf.h" |
17 #include "base/task.h" | 17 #include "base/task.h" |
18 #include "base/timer.h" | 18 #include "base/timer.h" |
19 #include "chrome/browser/safe_browsing/protocol_parser.h" | 19 #include "chrome/browser/safe_browsing/protocol_parser.h" |
20 #include "chrome/browser/safe_browsing/safe_browsing_service.h" | 20 #include "chrome/browser/safe_browsing/safe_browsing_service.h" |
21 #include "chrome/common/chrome_version_info.h" | 21 #include "chrome/common/chrome_version_info.h" |
22 #include "chrome/common/env_vars.h" | 22 #include "chrome/common/env_vars.h" |
23 #include "content/browser/browser_thread.h" | 23 #include "content/browser/browser_thread.h" |
| 24 #include "content/common/net/url_fetcher.h" |
24 #include "net/base/escape.h" | 25 #include "net/base/escape.h" |
25 #include "net/base/load_flags.h" | 26 #include "net/base/load_flags.h" |
26 #include "net/url_request/url_request_context_getter.h" | 27 #include "net/url_request/url_request_context_getter.h" |
27 #include "net/url_request/url_request_status.h" | 28 #include "net/url_request/url_request_status.h" |
28 | 29 |
29 using base::Time; | 30 using base::Time; |
30 using base::TimeDelta; | 31 using base::TimeDelta; |
31 | 32 |
32 // Maximum time, in seconds, from start up before we must issue an update query. | 33 // Maximum time, in seconds, from start up before we must issue an update query. |
33 static const int kSbTimerStartIntervalSec = 5 * 60; | 34 static const int kSbTimerStartIntervalSec = 5 * 60; |
(...skipping 154 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
188 return; | 189 return; |
189 } else { | 190 } else { |
190 initial_request_ = false; | 191 initial_request_ = false; |
191 } | 192 } |
192 } | 193 } |
193 | 194 |
194 if (!request_.get()) | 195 if (!request_.get()) |
195 IssueUpdateRequest(); | 196 IssueUpdateRequest(); |
196 } | 197 } |
197 | 198 |
198 // URLFetcher::Delegate implementation ----------------------------------------- | 199 // content::URLFetcherDelegate implementation ---------------------------------- |
199 | 200 |
200 // All SafeBrowsing request responses are handled here. | 201 // All SafeBrowsing request responses are handled here. |
201 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a | 202 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a |
202 // chunk should retry the download and parse of that chunk (and | 203 // chunk should retry the download and parse of that chunk (and |
203 // what back off / how many times to try), and if that effects the | 204 // what back off / how many times to try), and if that effects the |
204 // update back off. For now, a failed parse of the chunk means we | 205 // update back off. For now, a failed parse of the chunk means we |
205 // drop it. This isn't so bad because the next UPDATE_REQUEST we | 206 // drop it. This isn't so bad because the next UPDATE_REQUEST we |
206 // do will report all the chunks we have. If that chunk is still | 207 // do will report all the chunks we have. If that chunk is still |
207 // required, the SafeBrowsing servers will tell us to get it again. | 208 // required, the SafeBrowsing servers will tell us to get it again. |
208 void SafeBrowsingProtocolManager::OnURLFetchComplete( | 209 void SafeBrowsingProtocolManager::OnURLFetchComplete(const URLFetcher* source) { |
209 const URLFetcher* source, | |
210 const GURL& url, | |
211 const net::URLRequestStatus& status, | |
212 int response_code, | |
213 const net::ResponseCookies& cookies, | |
214 const std::string& data) { | |
215 scoped_ptr<const URLFetcher> fetcher; | 210 scoped_ptr<const URLFetcher> fetcher; |
216 bool parsed_ok = true; | 211 bool parsed_ok = true; |
217 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. | 212 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. |
218 | 213 |
219 // See if this is a safebrowsing report fetcher. We don't take any action for | 214 // See if this is a safebrowsing report fetcher. We don't take any action for |
220 // the response to those. | 215 // the response to those. |
221 std::set<const URLFetcher*>::iterator sit = safebrowsing_reports_.find( | 216 std::set<const URLFetcher*>::iterator sit = safebrowsing_reports_.find( |
222 source); | 217 source); |
223 if (sit != safebrowsing_reports_.end()) { | 218 if (sit != safebrowsing_reports_.end()) { |
224 const URLFetcher* report = *sit; | 219 const URLFetcher* report = *sit; |
225 safebrowsing_reports_.erase(sit); | 220 safebrowsing_reports_.erase(sit); |
226 delete report; | 221 delete report; |
227 return; | 222 return; |
228 } | 223 } |
229 | 224 |
230 HashRequests::iterator it = hash_requests_.find(source); | 225 HashRequests::iterator it = hash_requests_.find(source); |
231 if (it != hash_requests_.end()) { | 226 if (it != hash_requests_.end()) { |
232 // GetHash response. | 227 // GetHash response. |
233 fetcher.reset(it->first); | 228 fetcher.reset(it->first); |
234 SafeBrowsingService::SafeBrowsingCheck* check = it->second; | 229 SafeBrowsingService::SafeBrowsingCheck* check = it->second; |
235 std::vector<SBFullHashResult> full_hashes; | 230 std::vector<SBFullHashResult> full_hashes; |
236 bool can_cache = false; | 231 bool can_cache = false; |
237 if (response_code == 200 || response_code == 204) { | 232 if (source->response_code() == 200 || source->response_code() == 204) { |
238 // For tracking our GetHash false positive (204) rate, compared to real | 233 // For tracking our GetHash false positive (204) rate, compared to real |
239 // (200) responses. | 234 // (200) responses. |
240 if (response_code == 200) | 235 if (source->response_code() == 200) |
241 RecordGetHashResult(check->is_download, GET_HASH_STATUS_200); | 236 RecordGetHashResult(check->is_download, GET_HASH_STATUS_200); |
242 else | 237 else |
243 RecordGetHashResult(check->is_download, GET_HASH_STATUS_204); | 238 RecordGetHashResult(check->is_download, GET_HASH_STATUS_204); |
244 can_cache = true; | 239 can_cache = true; |
245 gethash_error_count_ = 0; | 240 gethash_error_count_ = 0; |
246 gethash_back_off_mult_ = 1; | 241 gethash_back_off_mult_ = 1; |
247 bool re_key = false; | 242 bool re_key = false; |
248 SafeBrowsingProtocolParser parser; | 243 SafeBrowsingProtocolParser parser; |
249 parsed_ok = parser.ParseGetHash(data.data(), | 244 std::string data; |
250 static_cast<int>(data.length()), | 245 source->GetResponseAsString(&data); |
251 client_key_, | 246 parsed_ok = parser.ParseGetHash( |
252 &re_key, | 247 data.data(), |
253 &full_hashes); | 248 static_cast<int>(data.length()), |
| 249 client_key_, |
| 250 &re_key, |
| 251 &full_hashes); |
254 if (!parsed_ok) { | 252 if (!parsed_ok) { |
255 // If we fail to parse it, we must still inform the SafeBrowsingService | 253 // If we fail to parse it, we must still inform the SafeBrowsingService |
256 // so that it doesn't hold up the user's request indefinitely. Not sure | 254 // so that it doesn't hold up the user's request indefinitely. Not sure |
257 // what to do at that point though! | 255 // what to do at that point though! |
258 full_hashes.clear(); | 256 full_hashes.clear(); |
259 } else { | 257 } else { |
260 if (re_key) | 258 if (re_key) |
261 HandleReKey(); | 259 HandleReKey(); |
262 } | 260 } |
263 } else { | 261 } else { |
264 HandleGetHashError(Time::Now()); | 262 HandleGetHashError(Time::Now()); |
265 if (status.status() == net::URLRequestStatus::FAILED) { | 263 if (source->status().status() == net::URLRequestStatus::FAILED) { |
266 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 264 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() |
267 << " failed with error: " << status.error(); | 265 << " failed with error: " << source->status().error(); |
268 } else { | 266 } else { |
269 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 267 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() |
270 << " failed with error: " << response_code; | 268 << " failed with error: " << source->response_code(); |
271 } | 269 } |
272 } | 270 } |
273 | 271 |
274 // Call back the SafeBrowsingService with full_hashes, even if there was a | 272 // Call back the SafeBrowsingService with full_hashes, even if there was a |
275 // parse error or an error response code (in which case full_hashes will be | 273 // parse error or an error response code (in which case full_hashes will be |
276 // empty). We can't block the user regardless of the error status. | 274 // empty). We can't block the user regardless of the error status. |
277 sb_service_->HandleGetHashResults(check, full_hashes, can_cache); | 275 sb_service_->HandleGetHashResults(check, full_hashes, can_cache); |
278 | 276 |
279 hash_requests_.erase(it); | 277 hash_requests_.erase(it); |
280 } else { | 278 } else { |
281 // Update, chunk or key response. | 279 // Update, chunk or key response. |
282 fetcher.reset(request_.release()); | 280 fetcher.reset(request_.release()); |
283 | 281 |
284 if (request_type_ == UPDATE_REQUEST) { | 282 if (request_type_ == UPDATE_REQUEST) { |
285 if (!fetcher.get()) { | 283 if (!fetcher.get()) { |
286 // We've timed out waiting for an update response, so we've cancelled | 284 // We've timed out waiting for an update response, so we've cancelled |
287 // the update request and scheduled a new one. Ignore this response. | 285 // the update request and scheduled a new one. Ignore this response. |
288 return; | 286 return; |
289 } | 287 } |
290 | 288 |
291 // Cancel the update response timeout now that we have the response. | 289 // Cancel the update response timeout now that we have the response. |
292 update_timer_.Stop(); | 290 update_timer_.Stop(); |
293 } | 291 } |
294 | 292 |
295 if (response_code == 200) { | 293 if (source->response_code() == 200) { |
296 // We have data from the SafeBrowsing service. | 294 // We have data from the SafeBrowsing service. |
297 parsed_ok = HandleServiceResponse(source->url(), | 295 std::string data; |
298 data.data(), | 296 source->GetResponseAsString(&data); |
299 static_cast<int>(data.length())); | 297 parsed_ok = HandleServiceResponse( |
| 298 source->url(), data.data(), static_cast<int>(data.length())); |
300 if (!parsed_ok) { | 299 if (!parsed_ok) { |
301 VLOG(1) << "SafeBrowsing request for: " << source->url() | 300 VLOG(1) << "SafeBrowsing request for: " << source->url() |
302 << " failed parse."; | 301 << " failed parse."; |
303 must_back_off = true; | 302 must_back_off = true; |
304 chunk_request_urls_.clear(); | 303 chunk_request_urls_.clear(); |
305 UpdateFinished(false); | 304 UpdateFinished(false); |
306 } | 305 } |
307 | 306 |
308 switch (request_type_) { | 307 switch (request_type_) { |
309 case CHUNK_REQUEST: | 308 case CHUNK_REQUEST: |
(...skipping 19 matching lines...) Expand all Loading... |
329 default: | 328 default: |
330 NOTREACHED(); | 329 NOTREACHED(); |
331 break; | 330 break; |
332 } | 331 } |
333 } else { | 332 } else { |
334 // The SafeBrowsing service error, or very bad response code: back off. | 333 // The SafeBrowsing service error, or very bad response code: back off. |
335 must_back_off = true; | 334 must_back_off = true; |
336 if (request_type_ == CHUNK_REQUEST) | 335 if (request_type_ == CHUNK_REQUEST) |
337 chunk_request_urls_.clear(); | 336 chunk_request_urls_.clear(); |
338 UpdateFinished(false); | 337 UpdateFinished(false); |
339 if (status.status() == net::URLRequestStatus::FAILED) { | 338 if (source->status().status() == net::URLRequestStatus::FAILED) { |
340 VLOG(1) << "SafeBrowsing request for: " << source->url() | 339 VLOG(1) << "SafeBrowsing request for: " << source->url() |
341 << " failed with error: " << status.error(); | 340 << " failed with error: " << source->status().error(); |
342 } else { | 341 } else { |
343 VLOG(1) << "SafeBrowsing request for: " << source->url() | 342 VLOG(1) << "SafeBrowsing request for: " << source->url() |
344 << " failed with error: " << response_code; | 343 << " failed with error: " << source->response_code(); |
345 } | 344 } |
346 } | 345 } |
347 } | 346 } |
348 | 347 |
349 // Schedule a new update request if we've finished retrieving all the chunks | 348 // Schedule a new update request if we've finished retrieving all the chunks |
350 // from the previous update. We treat the update request and the chunk URLs it | 349 // from the previous update. We treat the update request and the chunk URLs it |
351 // contains as an atomic unit as far as back off is concerned. | 350 // contains as an atomic unit as far as back off is concerned. |
352 if (chunk_request_urls_.empty() && | 351 if (chunk_request_urls_.empty() && |
353 (request_type_ == CHUNK_REQUEST || request_type_ == UPDATE_REQUEST)) | 352 (request_type_ == CHUNK_REQUEST || request_type_ == UPDATE_REQUEST)) |
354 ScheduleNextUpdate(must_back_off); | 353 ScheduleNextUpdate(must_back_off); |
(...skipping 470 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
825 if (!additional_query_.empty()) { | 824 if (!additional_query_.empty()) { |
826 if (next_url.find("?") != std::string::npos) { | 825 if (next_url.find("?") != std::string::npos) { |
827 next_url.append("&"); | 826 next_url.append("&"); |
828 } else { | 827 } else { |
829 next_url.append("?"); | 828 next_url.append("?"); |
830 } | 829 } |
831 next_url.append(additional_query_); | 830 next_url.append(additional_query_); |
832 } | 831 } |
833 return GURL(next_url); | 832 return GURL(next_url); |
834 } | 833 } |
OLD | NEW |