OLD | NEW |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "chrome/browser/safe_browsing/protocol_manager.h" | 5 #include "chrome/browser/safe_browsing/protocol_manager.h" |
6 | 6 |
7 #ifndef NDEBUG | 7 #ifndef NDEBUG |
8 #include "base/base64.h" | 8 #include "base/base64.h" |
9 #endif | 9 #endif |
10 #include "base/environment.h" | 10 #include "base/environment.h" |
(...skipping 158 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
169 } | 169 } |
170 bool use_mac = !client_key_.empty(); | 170 bool use_mac = !client_key_.empty(); |
171 GURL gethash_url = GetHashUrl(use_mac); | 171 GURL gethash_url = GetHashUrl(use_mac); |
172 URLFetcher* fetcher = new URLFetcher(gethash_url, URLFetcher::POST, this); | 172 URLFetcher* fetcher = new URLFetcher(gethash_url, URLFetcher::POST, this); |
173 hash_requests_[fetcher] = check; | 173 hash_requests_[fetcher] = check; |
174 | 174 |
175 std::string get_hash; | 175 std::string get_hash; |
176 SafeBrowsingProtocolParser parser; | 176 SafeBrowsingProtocolParser parser; |
177 parser.FormatGetHash(prefixes, &get_hash); | 177 parser.FormatGetHash(prefixes, &get_hash); |
178 | 178 |
179 fetcher->set_load_flags(net::LOAD_DISABLE_CACHE); | 179 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
180 fetcher->set_request_context(request_context_getter_); | 180 fetcher->SetRequestContext(request_context_getter_); |
181 fetcher->set_upload_data("text/plain", get_hash); | 181 fetcher->SetUploadData("text/plain", get_hash); |
182 fetcher->Start(); | 182 fetcher->Start(); |
183 } | 183 } |
184 | 184 |
185 void SafeBrowsingProtocolManager::GetNextUpdate() { | 185 void SafeBrowsingProtocolManager::GetNextUpdate() { |
186 if (initial_request_) { | 186 if (initial_request_) { |
187 if (client_key_.empty() || wrapped_key_.empty()) { | 187 if (client_key_.empty() || wrapped_key_.empty()) { |
188 IssueKeyRequest(); | 188 IssueKeyRequest(); |
189 return; | 189 return; |
190 } else { | 190 } else { |
191 initial_request_ = false; | 191 initial_request_ = false; |
192 } | 192 } |
193 } | 193 } |
194 | 194 |
195 if (!request_.get()) | 195 if (!request_.get()) |
196 IssueUpdateRequest(); | 196 IssueUpdateRequest(); |
197 } | 197 } |
198 | 198 |
199 // content::URLFetcherDelegate implementation ---------------------------------- | 199 // content::URLFetcherDelegate implementation ---------------------------------- |
200 | 200 |
201 // All SafeBrowsing request responses are handled here. | 201 // All SafeBrowsing request responses are handled here. |
202 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a | 202 // TODO(paulg): Clarify with the SafeBrowsing team whether a failed parse of a |
203 // chunk should retry the download and parse of that chunk (and | 203 // chunk should retry the download and parse of that chunk (and |
204 // what back off / how many times to try), and if that effects the | 204 // what back off / how many times to try), and if that effects the |
205 // update back off. For now, a failed parse of the chunk means we | 205 // update back off. For now, a failed parse of the chunk means we |
206 // drop it. This isn't so bad because the next UPDATE_REQUEST we | 206 // drop it. This isn't so bad because the next UPDATE_REQUEST we |
207 // do will report all the chunks we have. If that chunk is still | 207 // do will report all the chunks we have. If that chunk is still |
208 // required, the SafeBrowsing servers will tell us to get it again. | 208 // required, the SafeBrowsing servers will tell us to get it again. |
209 void SafeBrowsingProtocolManager::OnURLFetchComplete(const URLFetcher* source) { | 209 void SafeBrowsingProtocolManager::OnURLFetchComplete( |
210 scoped_ptr<const URLFetcher> fetcher; | 210 const content::URLFetcher* source) { |
| 211 scoped_ptr<const content::URLFetcher> fetcher; |
211 bool parsed_ok = true; | 212 bool parsed_ok = true; |
212 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. | 213 bool must_back_off = false; // Reduce SafeBrowsing service query frequency. |
213 | 214 |
214 // See if this is a safebrowsing report fetcher. We don't take any action for | 215 // See if this is a safebrowsing report fetcher. We don't take any action for |
215 // the response to those. | 216 // the response to those. |
216 std::set<const URLFetcher*>::iterator sit = safebrowsing_reports_.find( | 217 std::set<const content::URLFetcher*>::iterator sit = |
217 source); | 218 safebrowsing_reports_.find(source); |
218 if (sit != safebrowsing_reports_.end()) { | 219 if (sit != safebrowsing_reports_.end()) { |
219 const URLFetcher* report = *sit; | 220 const content::URLFetcher* report = *sit; |
220 safebrowsing_reports_.erase(sit); | 221 safebrowsing_reports_.erase(sit); |
221 delete report; | 222 delete report; |
222 return; | 223 return; |
223 } | 224 } |
224 | 225 |
225 HashRequests::iterator it = hash_requests_.find(source); | 226 HashRequests::iterator it = hash_requests_.find(source); |
226 if (it != hash_requests_.end()) { | 227 if (it != hash_requests_.end()) { |
227 // GetHash response. | 228 // GetHash response. |
228 fetcher.reset(it->first); | 229 fetcher.reset(it->first); |
229 SafeBrowsingService::SafeBrowsingCheck* check = it->second; | 230 SafeBrowsingService::SafeBrowsingCheck* check = it->second; |
230 std::vector<SBFullHashResult> full_hashes; | 231 std::vector<SBFullHashResult> full_hashes; |
231 bool can_cache = false; | 232 bool can_cache = false; |
232 if (source->response_code() == 200 || source->response_code() == 204) { | 233 if (source->GetResponseCode() == 200 || source->GetResponseCode() == 204) { |
233 // For tracking our GetHash false positive (204) rate, compared to real | 234 // For tracking our GetHash false positive (204) rate, compared to real |
234 // (200) responses. | 235 // (200) responses. |
235 if (source->response_code() == 200) | 236 if (source->GetResponseCode() == 200) |
236 RecordGetHashResult(check->is_download, GET_HASH_STATUS_200); | 237 RecordGetHashResult(check->is_download, GET_HASH_STATUS_200); |
237 else | 238 else |
238 RecordGetHashResult(check->is_download, GET_HASH_STATUS_204); | 239 RecordGetHashResult(check->is_download, GET_HASH_STATUS_204); |
239 can_cache = true; | 240 can_cache = true; |
240 gethash_error_count_ = 0; | 241 gethash_error_count_ = 0; |
241 gethash_back_off_mult_ = 1; | 242 gethash_back_off_mult_ = 1; |
242 bool re_key = false; | 243 bool re_key = false; |
243 SafeBrowsingProtocolParser parser; | 244 SafeBrowsingProtocolParser parser; |
244 std::string data; | 245 std::string data; |
245 source->GetResponseAsString(&data); | 246 source->GetResponseAsString(&data); |
246 parsed_ok = parser.ParseGetHash( | 247 parsed_ok = parser.ParseGetHash( |
247 data.data(), | 248 data.data(), |
248 static_cast<int>(data.length()), | 249 static_cast<int>(data.length()), |
249 client_key_, | 250 client_key_, |
250 &re_key, | 251 &re_key, |
251 &full_hashes); | 252 &full_hashes); |
252 if (!parsed_ok) { | 253 if (!parsed_ok) { |
253 // If we fail to parse it, we must still inform the SafeBrowsingService | 254 // If we fail to parse it, we must still inform the SafeBrowsingService |
254 // so that it doesn't hold up the user's request indefinitely. Not sure | 255 // so that it doesn't hold up the user's request indefinitely. Not sure |
255 // what to do at that point though! | 256 // what to do at that point though! |
256 full_hashes.clear(); | 257 full_hashes.clear(); |
257 } else { | 258 } else { |
258 if (re_key) | 259 if (re_key) |
259 HandleReKey(); | 260 HandleReKey(); |
260 } | 261 } |
261 } else { | 262 } else { |
262 HandleGetHashError(Time::Now()); | 263 HandleGetHashError(Time::Now()); |
263 if (source->status().status() == net::URLRequestStatus::FAILED) { | 264 if (source->GetStatus().status() == net::URLRequestStatus::FAILED) { |
264 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 265 VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetUrl() |
265 << " failed with error: " << source->status().error(); | 266 << " failed with error: " << source->GetStatus().error(); |
266 } else { | 267 } else { |
267 VLOG(1) << "SafeBrowsing GetHash request for: " << source->url() | 268 VLOG(1) << "SafeBrowsing GetHash request for: " << source->GetUrl() |
268 << " failed with error: " << source->response_code(); | 269 << " failed with error: " << source->GetResponseCode(); |
269 } | 270 } |
270 } | 271 } |
271 | 272 |
272 // Call back the SafeBrowsingService with full_hashes, even if there was a | 273 // Call back the SafeBrowsingService with full_hashes, even if there was a |
273 // parse error or an error response code (in which case full_hashes will be | 274 // parse error or an error response code (in which case full_hashes will be |
274 // empty). We can't block the user regardless of the error status. | 275 // empty). We can't block the user regardless of the error status. |
275 sb_service_->HandleGetHashResults(check, full_hashes, can_cache); | 276 sb_service_->HandleGetHashResults(check, full_hashes, can_cache); |
276 | 277 |
277 hash_requests_.erase(it); | 278 hash_requests_.erase(it); |
278 } else { | 279 } else { |
279 // Update, chunk or key response. | 280 // Update, chunk or key response. |
280 fetcher.reset(request_.release()); | 281 fetcher.reset(request_.release()); |
281 | 282 |
282 if (request_type_ == UPDATE_REQUEST) { | 283 if (request_type_ == UPDATE_REQUEST) { |
283 if (!fetcher.get()) { | 284 if (!fetcher.get()) { |
284 // We've timed out waiting for an update response, so we've cancelled | 285 // We've timed out waiting for an update response, so we've cancelled |
285 // the update request and scheduled a new one. Ignore this response. | 286 // the update request and scheduled a new one. Ignore this response. |
286 return; | 287 return; |
287 } | 288 } |
288 | 289 |
289 // Cancel the update response timeout now that we have the response. | 290 // Cancel the update response timeout now that we have the response. |
290 update_timer_.Stop(); | 291 update_timer_.Stop(); |
291 } | 292 } |
292 | 293 |
293 if (source->response_code() == 200) { | 294 if (source->GetResponseCode() == 200) { |
294 // We have data from the SafeBrowsing service. | 295 // We have data from the SafeBrowsing service. |
295 std::string data; | 296 std::string data; |
296 source->GetResponseAsString(&data); | 297 source->GetResponseAsString(&data); |
297 parsed_ok = HandleServiceResponse( | 298 parsed_ok = HandleServiceResponse( |
298 source->url(), data.data(), static_cast<int>(data.length())); | 299 source->GetUrl(), data.data(), static_cast<int>(data.length())); |
299 if (!parsed_ok) { | 300 if (!parsed_ok) { |
300 VLOG(1) << "SafeBrowsing request for: " << source->url() | 301 VLOG(1) << "SafeBrowsing request for: " << source->GetUrl() |
301 << " failed parse."; | 302 << " failed parse."; |
302 must_back_off = true; | 303 must_back_off = true; |
303 chunk_request_urls_.clear(); | 304 chunk_request_urls_.clear(); |
304 UpdateFinished(false); | 305 UpdateFinished(false); |
305 } | 306 } |
306 | 307 |
307 switch (request_type_) { | 308 switch (request_type_) { |
308 case CHUNK_REQUEST: | 309 case CHUNK_REQUEST: |
309 if (parsed_ok) | 310 if (parsed_ok) |
310 chunk_request_urls_.pop_front(); | 311 chunk_request_urls_.pop_front(); |
(...skipping 17 matching lines...) Expand all Loading... |
328 default: | 329 default: |
329 NOTREACHED(); | 330 NOTREACHED(); |
330 break; | 331 break; |
331 } | 332 } |
332 } else { | 333 } else { |
333 // The SafeBrowsing service error, or very bad response code: back off. | 334 // The SafeBrowsing service error, or very bad response code: back off. |
334 must_back_off = true; | 335 must_back_off = true; |
335 if (request_type_ == CHUNK_REQUEST) | 336 if (request_type_ == CHUNK_REQUEST) |
336 chunk_request_urls_.clear(); | 337 chunk_request_urls_.clear(); |
337 UpdateFinished(false); | 338 UpdateFinished(false); |
338 if (source->status().status() == net::URLRequestStatus::FAILED) { | 339 if (source->GetStatus().status() == net::URLRequestStatus::FAILED) { |
339 VLOG(1) << "SafeBrowsing request for: " << source->url() | 340 VLOG(1) << "SafeBrowsing request for: " << source->GetUrl() |
340 << " failed with error: " << source->status().error(); | 341 << " failed with error: " << source->GetStatus().error(); |
341 } else { | 342 } else { |
342 VLOG(1) << "SafeBrowsing request for: " << source->url() | 343 VLOG(1) << "SafeBrowsing request for: " << source->GetUrl() |
343 << " failed with error: " << source->response_code(); | 344 << " failed with error: " << source->GetResponseCode(); |
344 } | 345 } |
345 } | 346 } |
346 } | 347 } |
347 | 348 |
348 // Schedule a new update request if we've finished retrieving all the chunks | 349 // Schedule a new update request if we've finished retrieving all the chunks |
349 // from the previous update. We treat the update request and the chunk URLs it | 350 // from the previous update. We treat the update request and the chunk URLs it |
350 // contains as an atomic unit as far as back off is concerned. | 351 // contains as an atomic unit as far as back off is concerned. |
351 if (chunk_request_urls_.empty() && | 352 if (chunk_request_urls_.empty() && |
352 (request_type_ == CHUNK_REQUEST || request_type_ == UPDATE_REQUEST)) | 353 (request_type_ == CHUNK_REQUEST || request_type_ == UPDATE_REQUEST)) |
353 ScheduleNextUpdate(must_back_off); | 354 ScheduleNextUpdate(must_back_off); |
(...skipping 201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
555 // don't get the next url until the previous one has been written to disk so | 556 // don't get the next url until the previous one has been written to disk so |
556 // that we don't use too much memory. | 557 // that we don't use too much memory. |
557 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_) | 558 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_) |
558 return; | 559 return; |
559 | 560 |
560 ChunkUrl next_chunk = chunk_request_urls_.front(); | 561 ChunkUrl next_chunk = chunk_request_urls_.front(); |
561 DCHECK(!next_chunk.url.empty()); | 562 DCHECK(!next_chunk.url.empty()); |
562 GURL chunk_url = NextChunkUrl(next_chunk.url); | 563 GURL chunk_url = NextChunkUrl(next_chunk.url); |
563 request_type_ = CHUNK_REQUEST; | 564 request_type_ = CHUNK_REQUEST; |
564 request_.reset(new URLFetcher(chunk_url, URLFetcher::GET, this)); | 565 request_.reset(new URLFetcher(chunk_url, URLFetcher::GET, this)); |
565 request_->set_load_flags(net::LOAD_DISABLE_CACHE); | 566 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
566 request_->set_request_context(request_context_getter_); | 567 request_->SetRequestContext(request_context_getter_); |
567 chunk_request_start_ = base::Time::Now(); | 568 chunk_request_start_ = base::Time::Now(); |
568 request_->Start(); | 569 request_->Start(); |
569 } | 570 } |
570 | 571 |
571 void SafeBrowsingProtocolManager::IssueKeyRequest() { | 572 void SafeBrowsingProtocolManager::IssueKeyRequest() { |
572 GURL key_url = MacKeyUrl(); | 573 GURL key_url = MacKeyUrl(); |
573 request_type_ = GETKEY_REQUEST; | 574 request_type_ = GETKEY_REQUEST; |
574 request_.reset(new URLFetcher(key_url, URLFetcher::GET, this)); | 575 request_.reset(new URLFetcher(key_url, URLFetcher::GET, this)); |
575 request_->set_load_flags(net::LOAD_DISABLE_CACHE); | 576 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
576 request_->set_request_context(request_context_getter_); | 577 request_->SetRequestContext(request_context_getter_); |
577 request_->Start(); | 578 request_->Start(); |
578 } | 579 } |
579 | 580 |
580 void SafeBrowsingProtocolManager::OnGetChunksComplete( | 581 void SafeBrowsingProtocolManager::OnGetChunksComplete( |
581 const std::vector<SBListChunkRanges>& lists, bool database_error) { | 582 const std::vector<SBListChunkRanges>& lists, bool database_error) { |
582 DCHECK_EQ(request_type_, UPDATE_REQUEST); | 583 DCHECK_EQ(request_type_, UPDATE_REQUEST); |
583 if (database_error) { | 584 if (database_error) { |
584 UpdateFinished(false); | 585 UpdateFinished(false); |
585 ScheduleNextUpdate(false); | 586 ScheduleNextUpdate(false); |
586 return; | 587 return; |
(...skipping 19 matching lines...) Expand all Loading... |
606 if (!found_phishing) | 607 if (!found_phishing) |
607 list_data.append(FormatList( | 608 list_data.append(FormatList( |
608 SBListChunkRanges(safe_browsing_util::kPhishingList), use_mac)); | 609 SBListChunkRanges(safe_browsing_util::kPhishingList), use_mac)); |
609 | 610 |
610 if (!found_malware) | 611 if (!found_malware) |
611 list_data.append(FormatList( | 612 list_data.append(FormatList( |
612 SBListChunkRanges(safe_browsing_util::kMalwareList), use_mac)); | 613 SBListChunkRanges(safe_browsing_util::kMalwareList), use_mac)); |
613 | 614 |
614 GURL update_url = UpdateUrl(use_mac); | 615 GURL update_url = UpdateUrl(use_mac); |
615 request_.reset(new URLFetcher(update_url, URLFetcher::POST, this)); | 616 request_.reset(new URLFetcher(update_url, URLFetcher::POST, this)); |
616 request_->set_load_flags(net::LOAD_DISABLE_CACHE); | 617 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
617 request_->set_request_context(request_context_getter_); | 618 request_->SetRequestContext(request_context_getter_); |
618 request_->set_upload_data("text/plain", list_data); | 619 request_->SetUploadData("text/plain", list_data); |
619 request_->Start(); | 620 request_->Start(); |
620 | 621 |
621 // Begin the update request timeout. | 622 // Begin the update request timeout. |
622 update_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), | 623 update_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), |
623 this, | 624 this, |
624 &SafeBrowsingProtocolManager::UpdateResponseTimeout); | 625 &SafeBrowsingProtocolManager::UpdateResponseTimeout); |
625 } | 626 } |
626 | 627 |
627 // If we haven't heard back from the server with an update response, this method | 628 // If we haven't heard back from the server with an update response, this method |
628 // will run. Close the current update session and schedule another update. | 629 // will run. Close the current update session and schedule another update. |
(...skipping 21 matching lines...) Expand all Loading... |
650 const GURL& page_url, | 651 const GURL& page_url, |
651 const GURL& referrer_url, | 652 const GURL& referrer_url, |
652 bool is_subresource, | 653 bool is_subresource, |
653 SafeBrowsingService::UrlCheckResult threat_type, | 654 SafeBrowsingService::UrlCheckResult threat_type, |
654 const std::string& post_data) { | 655 const std::string& post_data) { |
655 GURL report_url = SafeBrowsingHitUrl(malicious_url, page_url, | 656 GURL report_url = SafeBrowsingHitUrl(malicious_url, page_url, |
656 referrer_url, is_subresource, | 657 referrer_url, is_subresource, |
657 threat_type); | 658 threat_type); |
658 URLFetcher* report = new URLFetcher( | 659 URLFetcher* report = new URLFetcher( |
659 report_url, post_data.empty() ? URLFetcher::GET : URLFetcher::POST, this); | 660 report_url, post_data.empty() ? URLFetcher::GET : URLFetcher::POST, this); |
660 report->set_load_flags(net::LOAD_DISABLE_CACHE); | 661 report->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
661 report->set_request_context(request_context_getter_); | 662 report->SetRequestContext(request_context_getter_); |
662 if (!post_data.empty()) | 663 if (!post_data.empty()) |
663 report->set_upload_data("text/plain", post_data); | 664 report->SetUploadData("text/plain", post_data); |
664 report->Start(); | 665 report->Start(); |
665 safebrowsing_reports_.insert(report); | 666 safebrowsing_reports_.insert(report); |
666 } | 667 } |
667 | 668 |
668 // Sends malware details for users who opt-in. | 669 // Sends malware details for users who opt-in. |
669 void SafeBrowsingProtocolManager::ReportMalwareDetails( | 670 void SafeBrowsingProtocolManager::ReportMalwareDetails( |
670 const std::string& report) { | 671 const std::string& report) { |
671 GURL report_url = MalwareDetailsUrl(); | 672 GURL report_url = MalwareDetailsUrl(); |
672 URLFetcher* fetcher = new URLFetcher(report_url, URLFetcher::POST, this); | 673 URLFetcher* fetcher = new URLFetcher(report_url, URLFetcher::POST, this); |
673 fetcher->set_load_flags(net::LOAD_DISABLE_CACHE); | 674 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); |
674 fetcher->set_request_context(request_context_getter_); | 675 fetcher->SetRequestContext(request_context_getter_); |
675 fetcher->set_upload_data("application/octet-stream", report); | 676 fetcher->SetUploadData("application/octet-stream", report); |
676 // Don't try too hard to send reports on failures. | 677 // Don't try too hard to send reports on failures. |
677 fetcher->set_automatically_retry_on_5xx(false); | 678 fetcher->SetAutomaticallyRetryOn5xx(false); |
678 fetcher->Start(); | 679 fetcher->Start(); |
679 safebrowsing_reports_.insert(fetcher); | 680 safebrowsing_reports_.insert(fetcher); |
680 } | 681 } |
681 | 682 |
682 | 683 |
683 // static | 684 // static |
684 std::string SafeBrowsingProtocolManager::FormatList( | 685 std::string SafeBrowsingProtocolManager::FormatList( |
685 const SBListChunkRanges& list, bool use_mac) { | 686 const SBListChunkRanges& list, bool use_mac) { |
686 std::string formatted_results; | 687 std::string formatted_results; |
687 formatted_results.append(list.name); | 688 formatted_results.append(list.name); |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
824 if (!additional_query_.empty()) { | 825 if (!additional_query_.empty()) { |
825 if (next_url.find("?") != std::string::npos) { | 826 if (next_url.find("?") != std::string::npos) { |
826 next_url.append("&"); | 827 next_url.append("&"); |
827 } else { | 828 } else { |
828 next_url.append("?"); | 829 next_url.append("?"); |
829 } | 830 } |
830 next_url.append(additional_query_); | 831 next_url.append(additional_query_); |
831 } | 832 } |
832 return GURL(next_url); | 833 return GURL(next_url); |
833 } | 834 } |
OLD | NEW |