Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(341)

Side by Side Diff: chrome/browser/safe_browsing/protocol_manager.cc

Issue 2276933003: Add data usage tracking for safe browsing (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "chrome/browser/safe_browsing/protocol_manager.h" 5 #include "chrome/browser/safe_browsing/protocol_manager.h"
6 6
7 #include <utility> 7 #include <utility>
8 8
9 #include "base/environment.h" 9 #include "base/environment.h"
10 #include "base/logging.h" 10 #include "base/logging.h"
(...skipping 203 matching lines...) Expand 10 before | Expand all | Expand 10 after
214 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR); 214 RecordGetHashResult(is_download, GET_HASH_BACKOFF_ERROR);
215 std::vector<SBFullHashResult> full_hashes; 215 std::vector<SBFullHashResult> full_hashes;
216 callback.Run(full_hashes, base::TimeDelta()); 216 callback.Run(full_hashes, base::TimeDelta());
217 return; 217 return;
218 } 218 }
219 GURL gethash_url = GetHashUrl(is_extended_reporting); 219 GURL gethash_url = GetHashUrl(is_extended_reporting);
220 net::URLFetcher* fetcher = 220 net::URLFetcher* fetcher =
221 net::URLFetcher::Create(url_fetcher_id_++, gethash_url, 221 net::URLFetcher::Create(url_fetcher_id_++, gethash_url,
222 net::URLFetcher::POST, this) 222 net::URLFetcher::POST, this)
223 .release(); 223 .release();
224 data_use_measurement::DataUseUserData::AttachToFetcher(
Jialiu Lin 2016/08/24 19:10:45 We are going to deprecate this protocol manager so
Raj 2016/08/24 20:00:01 Thanks for the heads up. Added to that code as wel
225 fetcher, data_use_measurement::DataUseUserData::SAFE_BROWSING);
224 hash_requests_[fetcher] = FullHashDetails(callback, is_download); 226 hash_requests_[fetcher] = FullHashDetails(callback, is_download);
225 227
226 const std::string get_hash = FormatGetHash(prefixes); 228 const std::string get_hash = FormatGetHash(prefixes);
227 229
228 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE); 230 fetcher->SetLoadFlags(net::LOAD_DISABLE_CACHE);
229 fetcher->SetRequestContext(request_context_getter_.get()); 231 fetcher->SetRequestContext(request_context_getter_.get());
230 fetcher->SetUploadData("text/plain", get_hash); 232 fetcher->SetUploadData("text/plain", get_hash);
231 fetcher->Start(); 233 fetcher->Start();
232 } 234 }
233 235
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after
582 DCHECK(backup_update_reason >= 0 && 584 DCHECK(backup_update_reason >= 0 &&
583 backup_update_reason < BACKUP_UPDATE_REASON_MAX); 585 backup_update_reason < BACKUP_UPDATE_REASON_MAX);
584 if (backup_url_prefixes_[backup_update_reason].empty()) 586 if (backup_url_prefixes_[backup_update_reason].empty())
585 return false; 587 return false;
586 request_type_ = BACKUP_UPDATE_REQUEST; 588 request_type_ = BACKUP_UPDATE_REQUEST;
587 backup_update_reason_ = backup_update_reason; 589 backup_update_reason_ = backup_update_reason;
588 590
589 GURL backup_update_url = BackupUpdateUrl(backup_update_reason); 591 GURL backup_update_url = BackupUpdateUrl(backup_update_reason);
590 request_ = net::URLFetcher::Create(url_fetcher_id_++, backup_update_url, 592 request_ = net::URLFetcher::Create(url_fetcher_id_++, backup_update_url,
591 net::URLFetcher::POST, this); 593 net::URLFetcher::POST, this);
594 data_use_measurement::DataUseUserData::AttachToFetcher(
595 request_.get(), data_use_measurement::DataUseUserData::SAFE_BROWSING);
592 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 596 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
593 request_->SetRequestContext(request_context_getter_.get()); 597 request_->SetRequestContext(request_context_getter_.get());
594 request_->SetUploadData("text/plain", update_list_data_); 598 request_->SetUploadData("text/plain", update_list_data_);
595 request_->Start(); 599 request_->Start();
596 600
597 // Begin the update request timeout. 601 // Begin the update request timeout.
598 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), 602 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec),
599 this, 603 this,
600 &SafeBrowsingProtocolManager::UpdateResponseTimeout); 604 &SafeBrowsingProtocolManager::UpdateResponseTimeout);
601 605
602 return true; 606 return true;
603 } 607 }
604 608
605 void SafeBrowsingProtocolManager::IssueChunkRequest() { 609 void SafeBrowsingProtocolManager::IssueChunkRequest() {
606 DCHECK_CURRENTLY_ON(content::BrowserThread::IO); 610 DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
607 // We are only allowed to have one request outstanding at any time. Also, 611 // We are only allowed to have one request outstanding at any time. Also,
608 // don't get the next url until the previous one has been written to disk so 612 // don't get the next url until the previous one has been written to disk so
609 // that we don't use too much memory. 613 // that we don't use too much memory.
610 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_) 614 if (request_.get() || chunk_request_urls_.empty() || chunk_pending_to_write_)
611 return; 615 return;
612 616
613 ChunkUrl next_chunk = chunk_request_urls_.front(); 617 ChunkUrl next_chunk = chunk_request_urls_.front();
614 DCHECK(!next_chunk.url.empty()); 618 DCHECK(!next_chunk.url.empty());
615 GURL chunk_url = NextChunkUrl(next_chunk.url); 619 GURL chunk_url = NextChunkUrl(next_chunk.url);
616 request_type_ = CHUNK_REQUEST; 620 request_type_ = CHUNK_REQUEST;
617 request_ = net::URLFetcher::Create(url_fetcher_id_++, chunk_url, 621 request_ = net::URLFetcher::Create(url_fetcher_id_++, chunk_url,
618 net::URLFetcher::GET, this); 622 net::URLFetcher::GET, this);
623 data_use_measurement::DataUseUserData::AttachToFetcher(
624 request_.get(), data_use_measurement::DataUseUserData::SAFE_BROWSING);
619 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 625 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
620 request_->SetRequestContext(request_context_getter_.get()); 626 request_->SetRequestContext(request_context_getter_.get());
621 chunk_request_start_ = base::Time::Now(); 627 chunk_request_start_ = base::Time::Now();
622 request_->Start(); 628 request_->Start();
623 } 629 }
624 630
625 void SafeBrowsingProtocolManager::OnGetChunksComplete( 631 void SafeBrowsingProtocolManager::OnGetChunksComplete(
626 const std::vector<SBListChunkRanges>& lists, 632 const std::vector<SBListChunkRanges>& lists,
627 bool database_error, 633 bool database_error,
628 bool is_extended_reporting) { 634 bool is_extended_reporting) {
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
660 } 666 }
661 667
662 // Large requests are (probably) a sign of database corruption. 668 // Large requests are (probably) a sign of database corruption.
663 // Record stats to inform decisions about whether to automate 669 // Record stats to inform decisions about whether to automate
664 // deletion of such databases. http://crbug.com/120219 670 // deletion of such databases. http://crbug.com/120219
665 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size()); 671 UMA_HISTOGRAM_COUNTS("SB2.UpdateRequestSize", update_list_data_.size());
666 672
667 GURL update_url = UpdateUrl(is_extended_reporting); 673 GURL update_url = UpdateUrl(is_extended_reporting);
668 request_ = net::URLFetcher::Create(url_fetcher_id_++, update_url, 674 request_ = net::URLFetcher::Create(url_fetcher_id_++, update_url,
669 net::URLFetcher::POST, this); 675 net::URLFetcher::POST, this);
676 data_use_measurement::DataUseUserData::AttachToFetcher(
677 request_.get(), data_use_measurement::DataUseUserData::SAFE_BROWSING);
670 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE); 678 request_->SetLoadFlags(net::LOAD_DISABLE_CACHE);
671 request_->SetRequestContext(request_context_getter_.get()); 679 request_->SetRequestContext(request_context_getter_.get());
672 request_->SetUploadData("text/plain", update_list_data_); 680 request_->SetUploadData("text/plain", update_list_data_);
673 request_->Start(); 681 request_->Start();
674 682
675 // Begin the update request timeout. 683 // Begin the update request timeout.
676 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec), 684 timeout_timer_.Start(FROM_HERE, TimeDelta::FromSeconds(kSbMaxUpdateWaitSec),
677 this, 685 this,
678 &SafeBrowsingProtocolManager::UpdateResponseTimeout); 686 &SafeBrowsingProtocolManager::UpdateResponseTimeout);
679 } 687 }
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
797 : callback(callback), is_download(is_download) {} 805 : callback(callback), is_download(is_download) {}
798 806
799 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails( 807 SafeBrowsingProtocolManager::FullHashDetails::FullHashDetails(
800 const FullHashDetails& other) = default; 808 const FullHashDetails& other) = default;
801 809
802 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {} 810 SafeBrowsingProtocolManager::FullHashDetails::~FullHashDetails() {}
803 811
804 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {} 812 SafeBrowsingProtocolManagerDelegate::~SafeBrowsingProtocolManagerDelegate() {}
805 813
806 } // namespace safe_browsing 814 } // namespace safe_browsing
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698