Index: chrome/browser/extensions/updater/extension_downloader.cc |
diff --git a/chrome/browser/extensions/updater/extension_downloader.cc b/chrome/browser/extensions/updater/extension_downloader.cc |
index 361d41ead591e515e359bd8ec513739b2716fd7c..2278e98b6ef80093624286b2a6d593e4bcf0ea9b 100644 |
--- a/chrome/browser/extensions/updater/extension_downloader.cc |
+++ b/chrome/browser/extensions/updater/extension_downloader.cc |
@@ -31,9 +31,11 @@ |
#include "content/public/browser/browser_thread.h" |
#include "content/public/browser/notification_details.h" |
#include "content/public/browser/notification_service.h" |
+#include "extensions/browser/webstore_oauth2_token_provider.h" |
#include "net/base/backoff_entry.h" |
#include "net/base/load_flags.h" |
#include "net/base/net_errors.h" |
+#include "net/http/http_request_headers.h" |
#include "net/url_request/url_fetcher.h" |
#include "net/url_request/url_request_context_getter.h" |
#include "net/url_request/url_request_status.h" |
@@ -81,16 +83,22 @@ const int kMaxAuthUserValue = 10; |
const char kNotFromWebstoreInstallSource[] = "notfromwebstore"; |
const char kDefaultInstallSource[] = ""; |
-#define RETRY_HISTOGRAM(name, retry_count, url) \ |
- if ((url).DomainIs("google.com")) { \ |
- UMA_HISTOGRAM_CUSTOM_COUNTS( \ |
- "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \ |
- kMaxRetries, kMaxRetries+1); \ |
- } else { \ |
- UMA_HISTOGRAM_CUSTOM_COUNTS( \ |
- "Extensions." name "RetryCountOtherUrl", retry_count, 1, \ |
- kMaxRetries, kMaxRetries+1); \ |
- } |
+const char kGoogleDotCom[] = "google.com"; |
+ |
+#define RETRY_HISTOGRAM(name, retry_count, url) \ |
+ if ((url).DomainIs(kGoogleDotCom)) { \ |
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \ |
+ retry_count, \ |
+ 1, \ |
+ kMaxRetries, \ |
+ kMaxRetries + 1); \ |
+ } else { \ |
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \ |
+ retry_count, \ |
+ 1, \ |
+ kMaxRetries, \ |
+ kMaxRetries + 1); \ |
+ } |
bool ShouldRetryRequest(const net::URLRequestStatus& status, |
int response_code) { |
@@ -100,31 +108,6 @@ bool ShouldRetryRequest(const net::URLRequestStatus& status, |
status.status() == net::URLRequestStatus::FAILED); |
} |
-bool ShouldRetryRequestWithCookies(const net::URLRequestStatus& status, |
- int response_code, |
- bool included_cookies) { |
- if (included_cookies) |
- return false; |
- |
- if (status.status() == net::URLRequestStatus::CANCELED) |
- return true; |
- |
- // Retry if a 401 or 403 is received. |
- return (status.status() == net::URLRequestStatus::SUCCESS && |
- (response_code == 401 || response_code == 403)); |
-} |
- |
-bool ShouldRetryRequestWithNextUser(const net::URLRequestStatus& status, |
- int response_code, |
- bool included_cookies) { |
- // Retry if a 403 is received in response to a request including cookies. |
- // Note that receiving a 401 in response to a request which included cookies |
- // should indicate that the |authuser| index was out of bounds for the profile |
- // and therefore Chrome should NOT retry with another index. |
- return (status.status() == net::URLRequestStatus::SUCCESS && |
- response_code == 403 && included_cookies); |
-} |
- |
// This parses and updates a URL query such that the value of the |authuser| |
// query parameter is incremented by 1. If parameter was not present in the URL, |
// it will be added with a value of 1. All other query keys and values are |
@@ -166,7 +149,8 @@ UpdateDetails::UpdateDetails(const std::string& id, const Version& version) |
UpdateDetails::~UpdateDetails() {} |
ExtensionDownloader::ExtensionFetch::ExtensionFetch() |
- : url(), is_protected(false) {} |
+ : url(), credentials(CREDENTIALS_NONE) { |
+} |
ExtensionDownloader::ExtensionFetch::ExtensionFetch( |
const std::string& id, |
@@ -174,24 +158,66 @@ ExtensionDownloader::ExtensionFetch::ExtensionFetch( |
const std::string& package_hash, |
const std::string& version, |
const std::set<int>& request_ids) |
- : id(id), url(url), package_hash(package_hash), version(version), |
- request_ids(request_ids), is_protected(false) {} |
+ : id(id), |
+ url(url), |
+ package_hash(package_hash), |
+ version(version), |
+ request_ids(request_ids), |
+ credentials(CREDENTIALS_NONE) { |
+} |
ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {} |
+bool ExtensionDownloader::ExtensionFetch::IterateCredentialsAfterFailure( |
+ const net::URLRequestStatus& status, |
+ int response_code, |
+ bool support_oauth2) { |
+ bool auth_failure = status.status() == net::URLRequestStatus::CANCELED || |
+ (status.status() == net::URLRequestStatus::SUCCESS && |
+ (response_code == 401 || response_code == 403)); |
+ if (!auth_failure) { |
+ return false; |
+ } |
+ switch (credentials) { |
+ case CREDENTIALS_NONE: |
+ if (url.DomainIs(kGoogleDotCom) && support_oauth2) { |
+ credentials = CREDENTIALS_OAUTH2_TOKEN; |
+ } else { |
+ credentials = CREDENTIALS_COOKIES; |
+ } |
+ break; |
+ case CREDENTIALS_OAUTH2_TOKEN: |
+ credentials = CREDENTIALS_COOKIES; |
+ break; |
+ case CREDENTIALS_COOKIES: |
+ if (response_code == 403) { |
+ return IncrementAuthUserIndex(&url); |
+ } |
+ return false; |
+ default: |
+ NOTREACHED(); |
+ } |
+ return true; |
+} |
asargent_no_longer_on_chrome
2014/08/06 00:26:29
I think this function pushes ExtensionFetch over t
Ken Rockot(use gerrit already)
2014/08/06 16:49:55
Actually, I feel like the requirement for this spe
|
+ |
ExtensionDownloader::ExtensionDownloader( |
ExtensionDownloaderDelegate* delegate, |
- net::URLRequestContextGetter* request_context) |
+ net::URLRequestContextGetter* request_context, |
+ IdentityProvider* webstore_identity_provider) |
: delegate_(delegate), |
request_context_(request_context), |
weak_ptr_factory_(this), |
manifests_queue_(&kDefaultBackoffPolicy, |
- base::Bind(&ExtensionDownloader::CreateManifestFetcher, |
- base::Unretained(this))), |
+ base::Bind(&ExtensionDownloader::CreateManifestFetcher, |
+ base::Unretained(this))), |
extensions_queue_(&kDefaultBackoffPolicy, |
- base::Bind(&ExtensionDownloader::CreateExtensionFetcher, |
- base::Unretained(this))), |
+ base::Bind(&ExtensionDownloader::CreateExtensionFetcher, |
+ base::Unretained(this))), |
extension_cache_(NULL) { |
+ if (webstore_identity_provider) { |
+ webstore_token_provider_.reset( |
+ new WebstoreOAuth2TokenProvider(webstore_identity_provider)); |
+ } |
DCHECK(delegate_); |
DCHECK(request_context_); |
} |
@@ -306,7 +332,7 @@ bool ExtensionDownloader::AddExtensionData(const std::string& id, |
return false; |
} |
- if (update_url.DomainIs("google.com")) { |
+ if (update_url.DomainIs(kGoogleDotCom)) { |
url_stats_.google_url_count++; |
} else if (update_url.is_empty()) { |
url_stats_.no_url_count++; |
@@ -580,7 +606,7 @@ void ExtensionDownloader::HandleManifestResults( |
// If the manifest response included a <daystart> element, we want to save |
// that value for any extensions which had sent a ping in the request. |
- if (fetch_data.base_url().DomainIs("google.com") && |
+ if (fetch_data.base_url().DomainIs(kGoogleDotCom) && |
results->daystart_elapsed_seconds >= 0) { |
Time day_start = |
Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds); |
@@ -722,16 +748,19 @@ void ExtensionDownloader::NotifyDelegateDownloadFinished( |
void ExtensionDownloader::CreateExtensionFetcher() { |
const ExtensionFetch* fetch = extensions_queue_.active_request(); |
+ extension_fetcher_.reset(net::URLFetcher::Create( |
+ kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this)); |
+ extension_fetcher_->SetRequestContext(request_context_); |
+ extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); |
+ |
int load_flags = net::LOAD_DISABLE_CACHE; |
- if (!fetch->is_protected || !fetch->url.SchemeIs("https")) { |
+ bool is_https = fetch->url.SchemeIs("https"); |
asargent_no_longer_on_chrome
2014/08/06 00:26:29
can you switch this to url::kHttpsScheme while you
Ken Rockot(use gerrit already)
2014/08/06 16:49:55
Done.
|
+ if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_https) { |
load_flags |= net::LOAD_DO_NOT_SEND_COOKIES | |
net::LOAD_DO_NOT_SAVE_COOKIES; |
} |
- extension_fetcher_.reset(net::URLFetcher::Create( |
- kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this)); |
- extension_fetcher_->SetRequestContext(request_context_); |
extension_fetcher_->SetLoadFlags(load_flags); |
- extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); |
+ |
// Download CRX files to a temp file. The blacklist is small and will be |
// processed in memory, so it is fetched into a string. |
if (fetch->id != kBlacklistAppID) { |
@@ -739,9 +768,16 @@ void ExtensionDownloader::CreateExtensionFetcher() { |
BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE)); |
} |
- VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id; |
- |
- extension_fetcher_->Start(); |
+ // If we should use OAuth2, fetch a token first. |
+ if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN && |
+ is_https && webstore_token_provider_) { |
+ webstore_token_provider_->FetchToken( |
+ base::Bind(&ExtensionDownloader::OnWebstoreOAuth2TokenReceived, |
+ weak_ptr_factory_.GetWeakPtr())); |
+ } else { |
+ VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id; |
+ extension_fetcher_->Start(); |
+ } |
} |
void ExtensionDownloader::OnCRXFetchComplete( |
@@ -750,7 +786,8 @@ void ExtensionDownloader::OnCRXFetchComplete( |
const net::URLRequestStatus& status, |
int response_code, |
const base::TimeDelta& backoff_delay) { |
- const std::string& id = extensions_queue_.active_request()->id; |
+ ExtensionFetch& active_request = *extensions_queue_.active_request(); |
+ const std::string& id = active_request.id; |
if (status.status() == net::URLRequestStatus::SUCCESS && |
(response_code == 200 || url.SchemeIsFile())) { |
RETRY_HISTOGRAM("CrxFetchSuccess", |
@@ -769,22 +806,13 @@ void ExtensionDownloader::OnCRXFetchComplete( |
} else { |
NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true); |
} |
- } else if (ShouldRetryRequestWithCookies( |
- status, |
- response_code, |
- extensions_queue_.active_request()->is_protected)) { |
- // Requeue the fetch with |is_protected| set, enabling cookies. |
- extensions_queue_.active_request()->is_protected = true; |
- extensions_queue_.RetryRequest(backoff_delay); |
- } else if (ShouldRetryRequestWithNextUser( |
- status, |
- response_code, |
- extensions_queue_.active_request()->is_protected) && |
- IncrementAuthUserIndex(&extensions_queue_.active_request()->url)) { |
+ } else if (active_request.IterateCredentialsAfterFailure( |
+ status, |
+ response_code, |
+ webstore_token_provider_ != NULL)) { |
extensions_queue_.RetryRequest(backoff_delay); |
} else { |
- const std::set<int>& request_ids = |
- extensions_queue_.active_request()->request_ids; |
+ const std::set<int>& request_ids = active_request.request_ids; |
const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id]; |
VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec() |
<< "' response code:" << response_code; |
@@ -830,4 +858,20 @@ void ExtensionDownloader::NotifyUpdateFound(const std::string& id, |
content::Details<UpdateDetails>(&updateInfo)); |
} |
+void ExtensionDownloader::OnWebstoreOAuth2TokenReceived( |
+ bool success, |
+ const std::string& token) { |
+ // If for whatever reason the token fetch fails, the fetch will still be |
+ // retried with no Authorization header; in such cases the download is |
+ // probably going to fail again, but the fetcher will then fall back onto |
+ // cookie-based authentication. |
+ if (success) { |
+ extension_fetcher_->AddExtraRequestHeader( |
+ base::StringPrintf("%s:Bearer %s", |
+ net::HttpRequestHeaders::kAuthorization, |
+ token.c_str())); |
+ } |
+ extension_fetcher_->Start(); |
+} |
+ |
} // namespace extensions |