OLD | NEW |
1 // Copyright (c) 2010 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #ifndef NDEBUG | 7 #ifndef NDEBUG |
8 #include "base/base64.h" | 8 #include "base/base64.h" |
9 #endif | 9 #endif |
10 #include "base/environment.h" | 10 #include "base/environment.h" |
11 #include "base/logging.h" | 11 #include "base/logging.h" |
(...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
194 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a | 194 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a |
195 // chunk should retry the download and parse of that chunk (and | 195 // chunk should retry the download and parse of that chunk (and |
196 // what back off / how many times to try), and if that effects the | 196 // what back off / how many times to try), and if that effects the |
197 // update back off. For now, a failed parse of the chunk means we | 197 // update back off. For now, a failed parse of the chunk means we |
198 // drop it. This isn't so bad because the next UPDATE_REQUEST we | 198 // drop it. This isn't so bad because the next UPDATE_REQUEST we |
199 // do will report all the chunks we have. If that chunk is still | 199 // do will report all the chunks we have. If that chunk is still |
200 // required, the SafeBrowsing servers will tell us to get it again. | 200 // required, the SafeBrowsing servers will tell us to get it again. |
201 void SafeBrowsingProtocolManager::OnURLFetchComplete( | 201 void SafeBrowsingProtocolManager::OnURLFetchComplete( |
202 const URLFetcher* source, | 202 const URLFetcher* source, |
203 const GURL& url, | 203 const GURL& url, |
204 const URLRequestStatus& status, | 204 const net::URLRequestStatus& status, |
205 int response_code, | 205 int response_code, |
206 const ResponseCookies& cookies, | 206 const ResponseCookies& cookies, |
207 const std::string& data) { | 207 const std::string& data) { |
208 scoped_ptr<const URLFetcher> fetcher; | 208 scoped_ptr<const URLFetcher> fetcher; |
209 bool parsed_ok = true; | 209 bool parsed_ok = true; |
210 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. | 210 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. |
211 | 211 |
212 // See if this is a safebrowsing report fetcher. We don't take any action for | 212 // See if this is a safebrowsing report fetcher. We don't take any action for |
213 // the response to those. | 213 // the response to those. |
214 std::set<const URLFetcher*>::iterator sit = safebrowsing_reports_.find( | 214 std::set<const URLFetcher*>::iterator sit = safebrowsing_reports_.find( |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
248 // If we fail to parse it, we must still inform the SafeBrowsingService | 248 // If we fail to parse it, we must still inform the SafeBrowsingService |
249 // so that it doesn't hold up the user's request indefinitely. Not sure | 249 // so that it doesn't hold up the user's request indefinitely. Not sure |
250 // what to do at that point though! | 250 // what to do at that point though! |
251 full_hashes.clear(); | 251 full_hashes.clear(); |
252 } else { | 252 } else { |
253 if (re_key) | 253 if (re_key) |
254 HandleReKey(); | 254 HandleReKey(); |
255 } | 255 } |
256 } else { | 256 } else { |
257 HandleGetHashError(Time::Now()); | 257 HandleGetHashError(Time::Now()); |
258 if (status.status() == URLRequestStatus::FAILED) { | 258 if (status.status() == net::URLRequestStatus::FAILED) { |
259 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 259 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() |
260 << " failed with os error: " << status.os_error(); | 260 << " failed with os error: " << status.os_error(); |
261 } else { | 261 } else { |
262 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 262 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() |
263 << " failed with error: " << response_code; | 263 << " failed with error: " << response_code; |
264 } | 264 } |
265 } | 265 } |
266 | 266 |
267 // Call back the SafeBrowsingService with full_hashes, even if there was a | 267 // Call back the SafeBrowsingService with full_hashes, even if there was a |
268 // parse error or an error response code (in which case full_hashes will be | 268 // parse error or an error response code (in which case full_hashes will be |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
322 default: | 322 default: |
323 NOTREACHED(); | 323 NOTREACHED(); |
324 break; | 324 break; |
325 } | 325 } |
326 } else { | 326 } else { |
327 // The SafeBrowsing service error, or very bad response code: back off. | 327 // The SafeBrowsing service error, or very bad response code: back off. |
328 must_back_off = true; | 328 must_back_off = true; |
329 if (request_type_ == CHUNK_REQUEST) | 329 if (request_type_ == CHUNK_REQUEST) |
330 chunk_request_urls_.clear(); | 330 chunk_request_urls_.clear(); |
331 UpdateFinished(false); | 331 UpdateFinished(false); |
332 if (status.status() == URLRequestStatus::FAILED) { | 332 if (status.status() == net::URLRequestStatus::FAILED) { |
333 VLOG(1) << "SafeBrowsing request for: " << source->url() | 333 VLOG(1) << "SafeBrowsing request for: " << source->url() |
334 << " failed with os error: " << status.os_error(); | 334 << " failed with os error: " << status.os_error(); |
335 } else { | 335 } else { |
336 VLOG(1) << "SafeBrowsing request for: " << source->url() | 336 VLOG(1) << "SafeBrowsing request for: " << source->url() |
337 << " failed with error: " << response_code; | 337 << " failed with error: " << response_code; |
338 } | 338 } |
339 } | 339 } |
340 } | 340 } |
341 | 341 |
342 // Schedule a new update request if we've finished retrieving all the chunks | 342 // Schedule a new update request if we've finished retrieving all the chunks |
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
792 if (!additional_query_.empty()) { | 792 if (!additional_query_.empty()) { |
793 if (next_url.find("?") != std::string::npos) { | 793 if (next_url.find("?") != std::string::npos) { |
794 next_url.append("&"); | 794 next_url.append("&"); |
795 } else { | 795 } else { |
796 next_url.append("?"); | 796 next_url.append("?"); |
797 } | 797 } |
798 next_url.append(additional_query_); | 798 next_url.append(additional_query_); |
799 } | 799 } |
800 return GURL(next_url); | 800 return GURL(next_url); |
801 } | 801 } |
OLD | NEW |