Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(397)

Side by Side Diff: chrome/browser/safe_browsing/protocol_manager.cc

Issue 1117703002: Adjust URLFetcher::Create API so that object is returned as scoped_ptr. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Remove unneeded Pass() calls Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/safe_browsing/protocol_manager.h" 5 #include "chrome/browser/safe_browsing/protocol_manager.h"
6 6
7 #include "base/environment.h" 7 #include "base/environment.h"
8 #include "base/logging.h" 8 #include "base/logging.h"
9 #include "base/memory/scoped_vector.h" 9 #include "base/memory/scoped_vector.h"
10 #include "base/metrics/histogram.h" 10 #include "base/metrics/histogram.h"
(...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after
176 // If we are in GetHash backoff, we need to check if we're past the next 176 // If we are in GetHash backoff, we need to check if we're past the next
177 // allowed time. If we are, we can proceed with the request. If not, we are 177 // allowed time. If we are, we can proceed with the request. If not, we are
178 // required to return empty results (i.e. treat the page as safe). 178 // required to return empty results (i.e. treat the page as safe).
179 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) { 179 if (gethash_error_count_ && Time::Now() <= next_gethash_time_) {
180 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR); 180 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR);
181 std::vector<SBFullHashResult> full_hashes; 181 std::vector<SBFullHashResult> full_hashes;
182 callback.Run(full_hashes, base::TimeDelta()); 182 callback.Run(full_hashes, base::TimeDelta());
183 return; 183 return;
184 } 184 }
185 GURL gethash_url = GetHashUrl(); 185 GURL gethash_url = GetHashUrl();
186 net::URLFetcher* fetcher = net::URLFetcher::Create( 186 net::URLFetcher* fetcher =
187 url_fetcher_id_++, gethash_url, net::URLFetcher::POST, this); 187 net::URLFetcher::Create(url_fetcher_id_++, gethash_url,
188 net::URLFetcher::POST, this).release();
188 hash_requests_[fetcher] = FullHashDetails(callback, is_download); 189 hash_requests_[fetcher] = FullHashDetails(callback, is_download);
189 190
190 const std::string get_hash = safe_browsing::FormatGetHash(prefixes); 191 const std::string get_hash = safe_browsing::FormatGetHash(prefixes);
191 192
192 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); 193 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE);
193 fetcher->SetRequestContext(request_context_getter_.get()); 194 fetcher->SetRequestContext(request_context_getter_.get());
194 fetcher->SetUploadData("text/plain", get_hash); 195 fetcher->SetUploadData("text/plain", get_hash);
195 fetcher->Start(); 196 fetcher->Start();
196 } 197 }
197 198
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after
550 DCHECK(CalledOnValidThread()); 551 DCHECK(CalledOnValidThread());
551 DCHECK_EQ(request_type_, UPDATE_REQUEST); 552 DCHECK_EQ(request_type_, UPDATE_REQUEST);
552 DCHECK(backup_update_reason >= 0 && 553 DCHECK(backup_update_reason >= 0 &&
553 backup_update_reason < BACKUP_UPDATE_REASON_MAX); 554 backup_update_reason < BACKUP_UPDATE_REASON_MAX);
554 if (backup_url_prefixes_[backup_update_reason].empty()) 555 if (backup_url_prefixes_[backup_update_reason].empty())
555 return false; 556 return false;
556 request_type_ = BACKUP_UPDATE_REQUEST; 557 request_type_ = BACKUP_UPDATE_REQUEST;
557 backup_update_reason_ = backup_update_reason; 558 backup_update_reason_ = backup_update_reason;
558 559
559 GURL backup_update_url = BackupUpdateUrl(backup_update_reason); 560 GURL backup_update_url = BackupUpdateUrl(backup_update_reason);
560 request_.reset(net::URLFetcher::Create( 561 request_ = net::URLFetcher::Create(url_fetcher_id_++, backup_update_url,
561 url_fetcher_id_++, backup_update_url, net::URLFetcher::POST, this)); 562 net::URLFetcher::POST, this);
562 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 563 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
563 request_->SetRequestContext(request_context_getter_.get()); 564 request_->SetRequestContext(request_context_getter_.get());
564 request_->SetUploadData("text/plain", update_list_data_); 565 request_->SetUploadData("text/plain", update_list_data_);
565 request_->Start(); 566 request_->Start();
566 567
567 // Begin the update request timeout. 568 // Begin the update request timeout.
568 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), 569 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec),
569 this, 570 this,
570 &SafeBrowsingProtocolManager::UpdateResponseTimeout); 571 &SafeBrowsingProtocolManager::UpdateResponseTimeout);
571 572
572 return true; 573 return true;
573 } 574 }
574 575
575 void SafeBrowsingProtocolManager::IssueChunkRequest() { 576 void SafeBrowsingProtocolManager::IssueChunkRequest() {
576 DCHECK(CalledOnValidThread()); 577 DCHECK(CalledOnValidThread());
577 // We are only allowed to have one request outstanding at any time. Also, 578 // We are only allowed to have one request outstanding at any time. Also,
578 // don't get the next url until the previous one has been written to disk so 579 // don't get the next url until the previous one has been written to disk so
579 // that we don't use too much memory. 580 // that we don't use too much memory.
580 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_) 581 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_)
581 return; 582 return;
582 583
583 ChunkUrl next_chunk = chunk_request_urls_.front(); 584 ChunkUrl next_chunk = chunk_request_urls_.front();
584 DCHECK(!next_chunk.url.empty()); 585 DCHECK(!next_chunk.url.empty());
585 GURL chunk_url = NextChunkUrl(next_chunk.url); 586 GURL chunk_url = NextChunkUrl(next_chunk.url);
586 request_type_ = CHUNK_REQUEST; 587 request_type_ = CHUNK_REQUEST;
587 request_.reset(net::URLFetcher::Create( 588 request_ = net::URLFetcher::Create(url_fetcher_id_++, chunk_url,
588 url_fetcher_id_++, chunk_url, net::URLFetcher::GET, this)); 589 net::URLFetcher::GET, this);
589 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 590 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
590 request_->SetRequestContext(request_context_getter_.get()); 591 request_->SetRequestContext(request_context_getter_.get());
591 chunk_request_start_ = base::Time::Now(); 592 chunk_request_start_ = base::Time::Now();
592 request_->Start(); 593 request_->Start();
593 } 594 }
594 595
595 void SafeBrowsingProtocolManager::OnGetChunksComplete( 596 void SafeBrowsingProtocolManager::OnGetChunksComplete(
596 const std::vector<SBListChunkRanges>& lists, bool database_error) { 597 const std::vector<SBListChunkRanges>& lists, bool database_error) {
597 DCHECK(CalledOnValidThread()); 598 DCHECK(CalledOnValidThread());
598 DCHECK_EQ(request_type_, UPDATE_REQUEST); 599 DCHECK_EQ(request_type_, UPDATE_REQUEST);
(...skipping 29 matching lines...) Expand all
628 update_list_data_.append(safe_browsing::FormatList( 629 update_list_data_.append(safe_browsing::FormatList(
629 SBListChunkRanges(safe_browsing_util::kMalwareList))); 630 SBListChunkRanges(safe_browsing_util::kMalwareList)));
630 } 631 }
631 632
632 // Large requests are (probably) a sign of database corruption. 633 // Large requests are (probably) a sign of database corruption.
633 // Record stats to inform decisions about whether to automate 634 // Record stats to inform decisions about whether to automate
634 // deletion of such databases. http://crbug.com/120219 635 // deletion of such databases. http://crbug.com/120219
635 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size()); 636 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size());
636 637
637 GURL update_url = UpdateUrl(); 638 GURL update_url = UpdateUrl();
638 request_.reset(net::URLFetcher::Create( 639 request_ = net::URLFetcher::Create(url_fetcher_id_++, update_url,
639 url_fetcher_id_++, update_url, net::URLFetcher::POST, this)); 640 net::URLFetcher::POST, this);
640 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 641 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
641 request_->SetRequestContext(request_context_getter_.get()); 642 request_->SetRequestContext(request_context_getter_.get());
642 request_->SetUploadData("text/plain", update_list_data_); 643 request_->SetUploadData("text/plain", update_list_data_);
643 request_->Start(); 644 request_->Start();
644 645
645 // Begin the update request timeout. 646 // Begin the update request timeout.
646 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), 647 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec),
647 this, 648 this,
648 &SafeBrowsingProtocolManager::UpdateResponseTimeout); 649 &SafeBrowsingProtocolManager::UpdateResponseTimeout);
649 } 650 }
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
770 FullHashCallback callback, bool is_download) 771 FullHashCallback callback, bool is_download)
771 : callback(callback), 772 : callback(callback),
772 is_download(is_download) { 773 is_download(is_download) {
773 } 774 }
774 775
775 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() { 776 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {
776 } 777 }
777 778
778 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() { 779 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {
779 } 780 }
OLDNEW
« no previous file with comments | « chrome/browser/safe_browsing/ping_manager.cc ('k') | chrome/browser/safe_browsing/safe_browsing_test.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698