Chromium Code Reviews| Index: chrome/browser/search/suggestions/suggestions_service.cc |
| diff --git a/chrome/browser/search/suggestions/suggestions_service.cc b/chrome/browser/search/suggestions/suggestions_service.cc |
| index d4e47452c8498f4857c595f9f96d5118654bbe6f..0fd34b18e0ee2793fee0a620c8488acb5c5731d2 100644 |
| --- a/chrome/browser/search/suggestions/suggestions_service.cc |
| +++ b/chrome/browser/search/suggestions/suggestions_service.cc |
| @@ -4,6 +4,8 @@ |
| #include "chrome/browser/search/suggestions/suggestions_service.h" |
| +#include <string> |
| + |
| #include "base/memory/scoped_ptr.h" |
| #include "base/metrics/field_trial.h" |
| #include "base/metrics/histogram.h" |
| @@ -14,6 +16,7 @@ |
| #include "chrome/browser/history/history_types.h" |
| #include "chrome/browser/metrics/variations/variations_http_header_provider.h" |
| #include "chrome/browser/profiles/profile.h" |
| +#include "chrome/browser/search/suggestions/blacklist_store.h" |
| #include "chrome/browser/search/suggestions/suggestions_store.h" |
| #include "components/pref_registry/pref_registry_syncable.h" |
| #include "components/variations/variations_associated_data.h" |
| @@ -21,6 +24,7 @@ |
| #include "net/base/escape.h" |
| #include "net/base/load_flags.h" |
| #include "net/base/net_errors.h" |
| +#include "net/base/url_util.h" |
| #include "net/http/http_response_headers.h" |
| #include "net/http/http_status_code.h" |
| #include "net/http/http_util.h" |
| @@ -62,7 +66,7 @@ void DispatchRequestsAndClear( |
| std::vector<SuggestionsService::ResponseCallback>* requestors) { |
| std::vector<SuggestionsService::ResponseCallback>::iterator it; |
| for (it = requestors->begin(); it != requestors->end(); ++it) { |
| - it->Run(suggestions); |
| + if (!it->is_null()) it->Run(suggestions); |
| } |
| std::vector<SuggestionsService::ResponseCallback>().swap(*requestors); |
| } |
| @@ -72,6 +76,18 @@ void DispatchRequestsAndClear( |
| // TODO(manzagop): make this a Variations parameter to enable tweaking. |
| const unsigned int kRequestTimeoutMs = 200; |
| +// Initial delay used when scheduling a blacklist request. |
|
Mathieu
2014/06/17 14:51:15
nit: Initial -> Default?
Also remove Initial from
manzagop (departed)
2014/06/17 15:42:23
Done.
|
| +const unsigned int kBlacklistInitialDelaySec = 1; |
| + |
| +// Multiplier on the delay used when scheduling a blacklist request, in case the |
| +// last observed request was unsuccessful. |
| +const unsigned int kBlacklistBackoffMultiplier = 2; |
| + |
| +// Maximum valid delay for scheduling a request. Candidate delays larger than |
| +// this are rejected. This means the maximum backoff is at least 5400 / 2, ie |
|
Mathieu
2014/06/17 14:51:15
*i.e.
manzagop (departed)
2014/06/17 15:42:23
Done.
|
| +// 45 minutes. |
| +const unsigned int kBacklistMaxDelaySec = 5400; |
|
Mathieu
2014/06/17 14:51:14
*Blacklist
Mathieu
2014/06/17 14:51:15
How do you pick this value? I think it should be m
manzagop (departed)
2014/06/17 15:42:23
Can this keep the phone awake and nuke the battery
manzagop (departed)
2014/06/17 15:42:23
Done.
Mathieu
2014/06/17 19:54:36
I don't think so, because this will not run if Chr
|
| + |
| } // namespace |
| const char kSuggestionsFieldTrialName[] = "ChromeSuggestions"; |
| @@ -83,10 +99,13 @@ const char kSuggestionsFieldTrialStateParam[] = "state"; |
| const char kSuggestionsFieldTrialStateEnabled[] = "enabled"; |
| SuggestionsService::SuggestionsService( |
|
Mathieu
2014/06/17 14:51:14
I think somewhere in the construction path (or som
manzagop (departed)
2014/06/17 15:42:23
An upload will happen after the first attempt to g
Mathieu
2014/06/17 19:54:36
No, but we should make sure that the blacklist doe
|
| - Profile* profile, scoped_ptr<SuggestionsStore> suggestions_store) |
| + Profile* profile, scoped_ptr<SuggestionsStore> suggestions_store, |
| + scoped_ptr<BlacklistStore> blacklist_store) |
| : suggestions_store_(suggestions_store.Pass()), |
| + blacklist_store_(blacklist_store.Pass()), |
| thumbnail_manager_(new ThumbnailManager(profile)), |
| profile_(profile), |
| + blacklist_delay_sec_(kBlacklistInitialDelaySec), |
| weak_ptr_factory_(this) { |
| // Obtain the URL to use to fetch suggestions data from the Variations param. |
| suggestions_url_ = |
| @@ -132,10 +151,7 @@ void SuggestionsService::FetchSuggestionsDataNoTimeout( |
| // Form new request. |
| DCHECK(waiting_requestors_.empty()); |
| waiting_requestors_.push_back(callback); |
| - |
| - pending_request_.reset(CreateSuggestionsRequest(suggestions_url_)); |
| - pending_request_->Start(); |
| - last_request_started_time_ = base::TimeTicks::Now(); |
| + IssueRequest(suggestions_url_); |
| } |
| void SuggestionsService::GetPageThumbnail( |
| @@ -145,38 +161,70 @@ void SuggestionsService::GetPageThumbnail( |
| } |
| void SuggestionsService::BlacklistURL( |
| - const GURL& candidate_url, SuggestionsService::ResponseCallback callback) { |
| + const GURL& candidate_url, |
| + const SuggestionsService::ResponseCallback& callback) { |
| DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI)); |
| waiting_requestors_.push_back(callback); |
| - if (pending_request_.get()) { |
| - if (IsBlacklistRequest(pending_request_.get())) { |
| - // Pending request is a blacklist request. Silently drop the new blacklist |
| - // request. TODO - handle this case. |
| - return; |
| - } else { |
| - // Pending request is not a blacklist request - cancel it and go on to |
| - // issuing a blacklist request. Also ensure the timeout closure does not |
| - // run; instead we'll wait for the updated suggestions before servicing |
| - // requestors. |
| - pending_request_.reset(NULL); |
| - pending_timeout_closure_.reset(NULL); |
| - } |
| - } |
| + // Blacklist locally, for immediate effect. |
| + if (!blacklist_store_->BlacklistUrl(candidate_url)) |
| + DVLOG(1) << "Failed blacklisting attempt."; |
| + |
| + // If there's an ongoing request, let it complete. |
| + if (pending_request_.get()) return; |
| // Send blacklisting request. |
| - // TODO(manzagop): make this a PUT request instead of a GET request. |
| GURL url(blacklist_url_prefix_ + |
| net::EscapeQueryParamValue(candidate_url.spec(), true)); |
| - pending_request_.reset(CreateSuggestionsRequest(url)); |
| - pending_request_->Start(); |
| - last_request_started_time_ = base::TimeTicks::Now(); |
| + IssueRequest(url); |
| +} |
| + |
| +void SuggestionsService::UploadBlacklist() { |
| + DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI)); |
| + |
| + // If there's an ongoing request, let it complete. |
| + if (pending_request_.get()) return; |
| + |
| + GURL blacklist_url; |
| + if (!blacklist_store_->GetSomeBlacklistedUrl(&blacklist_url)) |
| + return; // Local blacklist is empty. |
| + |
| + // Send blacklisting request. |
| + GURL url(blacklist_url_prefix_ + |
| + net::EscapeQueryParamValue(blacklist_url.spec(), true)); |
| + IssueRequest(url); |
| +} |
| + |
| +// static |
| +bool SuggestionsService::IsBlacklistRequest(const net::URLFetcher& request) { |
| + std::string blacklist_url_prefix = |
| + GetExperimentParam(kSuggestionsFieldTrialURLParam) + |
| + GetExperimentParam(kSuggestionsFieldTrialBlacklistSuffixParam); |
| + return StartsWithASCII(request.GetOriginalURL().spec(), blacklist_url_prefix, |
| + true); |
| +} |
| + |
| +// static |
| +bool SuggestionsService::GetBlacklistedUrl(const net::URLFetcher& request, |
| + GURL* url) { |
| + if (!IsBlacklistRequest(request)) return false; |
| + |
| + std::string blacklisted; |
| + // TODO(manzagop): don't hardcode the parameter name. DONOTSUBMIT! |
| + if (!net::GetValueForKeyInQuery(request.GetOriginalURL(), std::string("url"), |
| + &blacklisted)) |
| + return false; |
| + |
| + GURL blacklisted_url(blacklisted); |
| + blacklisted_url.Swap(url); |
| + return true; |
| } |
| // static |
| void SuggestionsService::RegisterProfilePrefs( |
| user_prefs::PrefRegistrySyncable* registry) { |
| SuggestionsStore::RegisterProfilePrefs(registry); |
| + BlacklistStore::RegisterProfilePrefs(registry); |
| } |
| void SuggestionsService::OnRequestTimeout() { |
| @@ -188,8 +236,8 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { |
| DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI)); |
| DCHECK_EQ(pending_request_.get(), source); |
| - // We no longer need the timeout closure. Delete it whether or not it has run |
| - // (if it hasn't, this cancels it). |
| + // We no longer need the timeout closure. Delete it whether or not it has run. |
| + // If it hasn't, this cancels it. |
| pending_timeout_closure_.reset(); |
| // The fetcher will be deleted when the request is handled. |
| @@ -203,6 +251,7 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { |
| << net::ErrorToString(request_status.error()); |
| // Dispatch the cached profile on error. |
| ServeFromCache(); |
| + ScheduleBlacklistUpload(false); |
| return; |
| } |
| @@ -212,7 +261,9 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { |
| if (response_code != net::HTTP_OK) { |
| // Aggressively clear the store. |
| suggestions_store_->ClearSuggestions(); |
| - DispatchRequestsAndClear(SuggestionsProfile(), &waiting_requestors_); |
| + SuggestionsProfile suggestions; |
| + FilterAndServe(&suggestions); |
|
Mathieu
2014/06/17 14:51:14
If it's an empty profile, why not keep DispatchReq
manzagop (departed)
2014/06/17 15:42:23
Done.
|
| + ScheduleBlacklistUpload(false); |
| return; |
| } |
| @@ -220,6 +271,12 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { |
| base::TimeTicks::Now() - last_request_started_time_; |
| UMA_HISTOGRAM_MEDIUM_TIMES("Suggestions.FetchSuccessLatency", latency); |
| + // Handle a successful blacklisting. |
| + GURL blacklisted_url; |
| + if (GetBlacklistedUrl(*source, &blacklisted_url)) { |
| + blacklist_store_->RemoveUrl(blacklisted_url); |
| + } |
| + |
| std::string suggestions_data; |
| bool success = request->GetResponseAsString(&suggestions_data); |
| DCHECK(success); |
| @@ -239,7 +296,8 @@ void SuggestionsService::OnURLFetchComplete(const net::URLFetcher* source) { |
| suggestions_store_->LoadSuggestions(&suggestions); |
| } |
| - DispatchRequestsAndClear(suggestions, &waiting_requestors_); |
| + FilterAndServe(&suggestions); |
| + ScheduleBlacklistUpload(true); |
| } |
| void SuggestionsService::Shutdown() { |
| @@ -250,12 +308,6 @@ void SuggestionsService::Shutdown() { |
| ServeFromCache(); |
| } |
| -bool SuggestionsService::IsBlacklistRequest(net::URLFetcher* request) const { |
| - DCHECK(request); |
| - return StartsWithASCII(request->GetOriginalURL().spec(), |
| - blacklist_url_prefix_, true); |
| -} |
| - |
| net::URLFetcher* SuggestionsService::CreateSuggestionsRequest(const GURL& url) { |
| net::URLFetcher* request = |
| net::URLFetcher::Create(0, url, net::URLFetcher::GET, this); |
| @@ -269,10 +321,45 @@ net::URLFetcher* SuggestionsService::CreateSuggestionsRequest(const GURL& url) { |
| return request; |
| } |
| +void SuggestionsService::IssueRequest(const GURL& url) { |
|
Mathieu
2014/06/17 14:51:14
very nit: I would have this before OnURLFetchCompl
manzagop (departed)
2014/06/17 15:42:24
Done.
|
| + pending_request_.reset(CreateSuggestionsRequest(url)); |
| + pending_request_->Start(); |
| + last_request_started_time_ = base::TimeTicks::Now(); |
| +} |
| + |
| void SuggestionsService::ServeFromCache() { |
| SuggestionsProfile suggestions; |
| suggestions_store_->LoadSuggestions(&suggestions); |
| - DispatchRequestsAndClear(suggestions, &waiting_requestors_); |
| + FilterAndServe(&suggestions); |
| +} |
| + |
| +void SuggestionsService::FilterAndServe(SuggestionsProfile* suggestions) { |
| + blacklist_store_->FilterSuggestions(suggestions); |
| + DispatchRequestsAndClear(*suggestions, &waiting_requestors_); |
| +} |
| + |
| +void SuggestionsService::ScheduleBlacklistUpload(bool last_request_successful) { |
| + DCHECK(content::BrowserThread::CurrentlyOn(content::BrowserThread::UI)); |
| + |
| + // Update the scheduling delay. |
| + if (last_request_successful) { |
| + blacklist_delay_sec_ = kBlacklistInitialDelaySec; |
| + } else { |
| + unsigned int candidate_delay = |
| + blacklist_delay_sec_ * kBlacklistBackoffMultiplier; |
| + if (candidate_delay < kBacklistMaxDelaySec) |
| + blacklist_delay_sec_ = candidate_delay; |
| + } |
| + |
| + // Schedule a blacklist upload task. |
| + GURL blacklist_url; |
| + if (blacklist_store_->GetSomeBlacklistedUrl(&blacklist_url)) { |
|
Mathieu
2014/06/17 14:51:14
Again "Some" seems random.
manzagop (departed)
2014/06/17 15:42:23
Done.
|
| + base::Closure blacklist_cb = base::Bind( |
| + &SuggestionsService::UploadBlacklist, weak_ptr_factory_.GetWeakPtr()); |
| + BrowserThread::PostDelayedTask( |
| + BrowserThread::UI, FROM_HERE, blacklist_cb, |
| + base::TimeDelta::FromSeconds(blacklist_delay_sec_)); |
| + } |
| } |
| } // namespace suggestions |