Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(360)

Unified Diff: chrome/browser/extensions/updater/extension_downloader.cc

Issue 434493002: OAuth2 support for Webstore downloads. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/browser/extensions/updater/extension_downloader.cc
diff --git a/chrome/browser/extensions/updater/extension_downloader.cc b/chrome/browser/extensions/updater/extension_downloader.cc
index 361d41ead591e515e359bd8ec513739b2716fd7c..318cbfe7f65b066bdf091c4ec35e1a8e40b0190a 100644
--- a/chrome/browser/extensions/updater/extension_downloader.cc
+++ b/chrome/browser/extensions/updater/extension_downloader.cc
@@ -31,9 +31,11 @@
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/notification_details.h"
#include "content/public/browser/notification_service.h"
+#include "extensions/browser/webstore_oauth2_token_provider.h"
#include "net/base/backoff_entry.h"
#include "net/base/load_flags.h"
#include "net/base/net_errors.h"
+#include "net/http/http_request_headers.h"
#include "net/url_request/url_fetcher.h"
#include "net/url_request/url_request_context_getter.h"
#include "net/url_request/url_request_status.h"
@@ -81,16 +83,22 @@ const int kMaxAuthUserValue = 10;
const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
const char kDefaultInstallSource[] = "";
-#define RETRY_HISTOGRAM(name, retry_count, url) \
- if ((url).DomainIs("google.com")) { \
- UMA_HISTOGRAM_CUSTOM_COUNTS( \
- "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \
- kMaxRetries, kMaxRetries+1); \
- } else { \
- UMA_HISTOGRAM_CUSTOM_COUNTS( \
- "Extensions." name "RetryCountOtherUrl", retry_count, 1, \
- kMaxRetries, kMaxRetries+1); \
- }
+const char kGoogleDotCom[] = "google.com";
+
+#define RETRY_HISTOGRAM(name, retry_count, url) \
+ if ((url).DomainIs(kGoogleDotCom)) { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ } else { \
+ UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
+ retry_count, \
+ 1, \
+ kMaxRetries, \
+ kMaxRetries + 1); \
+ }
bool ShouldRetryRequest(const net::URLRequestStatus& status,
int response_code) {
@@ -100,31 +108,6 @@ bool ShouldRetryRequest(const net::URLRequestStatus& status,
status.status() == net::URLRequestStatus::FAILED);
}
-bool ShouldRetryRequestWithCookies(const net::URLRequestStatus& status,
- int response_code,
- bool included_cookies) {
- if (included_cookies)
- return false;
-
- if (status.status() == net::URLRequestStatus::CANCELED)
- return true;
-
- // Retry if a 401 or 403 is received.
- return (status.status() == net::URLRequestStatus::SUCCESS &&
- (response_code == 401 || response_code == 403));
-}
-
-bool ShouldRetryRequestWithNextUser(const net::URLRequestStatus& status,
- int response_code,
- bool included_cookies) {
- // Retry if a 403 is received in response to a request including cookies.
- // Note that receiving a 401 in response to a request which included cookies
- // should indicate that the |authuser| index was out of bounds for the profile
- // and therefore Chrome should NOT retry with another index.
- return (status.status() == net::URLRequestStatus::SUCCESS &&
- response_code == 403 && included_cookies);
-}
-
// This parses and updates a URL query such that the value of the |authuser|
// query parameter is incremented by 1. If parameter was not present in the URL,
// it will be added with a value of 1. All other query keys and values are
@@ -166,7 +149,8 @@ UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
UpdateDetails::~UpdateDetails() {}
ExtensionDownloader::ExtensionFetch::ExtensionFetch()
- : url(), is_protected(false) {}
+ : url(), credentials(CREDENTIALS_NONE) {
+}
ExtensionDownloader::ExtensionFetch::ExtensionFetch(
const std::string& id,
@@ -174,24 +158,34 @@ ExtensionDownloader::ExtensionFetch::ExtensionFetch(
const std::string& package_hash,
const std::string& version,
const std::set<int>& request_ids)
- : id(id), url(url), package_hash(package_hash), version(version),
- request_ids(request_ids), is_protected(false) {}
+ : id(id),
+ url(url),
+ package_hash(package_hash),
+ version(version),
+ request_ids(request_ids),
+ credentials(CREDENTIALS_NONE) {
+}
ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
ExtensionDownloader::ExtensionDownloader(
ExtensionDownloaderDelegate* delegate,
- net::URLRequestContextGetter* request_context)
+ net::URLRequestContextGetter* request_context,
+ IdentityProvider* webstore_identity_provider)
: delegate_(delegate),
request_context_(request_context),
weak_ptr_factory_(this),
manifests_queue_(&kDefaultBackoffPolicy,
- base::Bind(&ExtensionDownloader::CreateManifestFetcher,
- base::Unretained(this))),
+ base::Bind(&ExtensionDownloader::CreateManifestFetcher,
+ base::Unretained(this))),
extensions_queue_(&kDefaultBackoffPolicy,
- base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
- base::Unretained(this))),
+ base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
+ base::Unretained(this))),
extension_cache_(NULL) {
+ if (webstore_identity_provider) {
+ webstore_token_provider_.reset(
+ new WebstoreOAuth2TokenProvider(webstore_identity_provider));
+ }
DCHECK(delegate_);
DCHECK(request_context_);
}
@@ -306,7 +300,7 @@ bool ExtensionDownloader::AddExtensionData(const std::string& id,
return false;
}
- if (update_url.DomainIs("google.com")) {
+ if (update_url.DomainIs(kGoogleDotCom)) {
url_stats_.google_url_count++;
} else if (update_url.is_empty()) {
url_stats_.no_url_count++;
@@ -580,7 +574,7 @@ void ExtensionDownloader::HandleManifestResults(
// If the manifest response included a <daystart> element, we want to save
// that value for any extensions which had sent a ping in the request.
- if (fetch_data.base_url().DomainIs("google.com") &&
+ if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
results->daystart_elapsed_seconds >= 0) {
Time day_start =
Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
@@ -722,16 +716,19 @@ void ExtensionDownloader::NotifyDelegateDownloadFinished(
void ExtensionDownloader::CreateExtensionFetcher() {
const ExtensionFetch* fetch = extensions_queue_.active_request();
+ extension_fetcher_.reset(net::URLFetcher::Create(
+ kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
+ extension_fetcher_->SetRequestContext(request_context_);
+ extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
+
int load_flags = net::LOAD_DISABLE_CACHE;
- if (!fetch->is_protected || !fetch->url.SchemeIs("https")) {
+ bool is_secure = fetch->url.SchemeIsSecure();
+ if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
net::LOAD_DO_NOT_SAVE_COOKIES;
}
- extension_fetcher_.reset(net::URLFetcher::Create(
- kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
- extension_fetcher_->SetRequestContext(request_context_);
extension_fetcher_->SetLoadFlags(load_flags);
- extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
+
// Download CRX files to a temp file. The blacklist is small and will be
// processed in memory, so it is fetched into a string.
if (fetch->id != kBlacklistAppID) {
@@ -739,9 +736,16 @@ void ExtensionDownloader::CreateExtensionFetcher() {
BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
}
- VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
-
- extension_fetcher_->Start();
+ // If we should use OAuth2, fetch a token first.
+ if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
+ is_secure && webstore_token_provider_) {
+ webstore_token_provider_->FetchToken(
+ base::Bind(&ExtensionDownloader::OnWebstoreOAuth2TokenReceived,
+ weak_ptr_factory_.GetWeakPtr()));
+ } else {
+ VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
+ extension_fetcher_->Start();
+ }
}
void ExtensionDownloader::OnCRXFetchComplete(
@@ -750,7 +754,8 @@ void ExtensionDownloader::OnCRXFetchComplete(
const net::URLRequestStatus& status,
int response_code,
const base::TimeDelta& backoff_delay) {
- const std::string& id = extensions_queue_.active_request()->id;
+ ExtensionFetch& active_request = *extensions_queue_.active_request();
+ const std::string& id = active_request.id;
if (status.status() == net::URLRequestStatus::SUCCESS &&
(response_code == 200 || url.SchemeIsFile())) {
RETRY_HISTOGRAM("CrxFetchSuccess",
@@ -769,22 +774,13 @@ void ExtensionDownloader::OnCRXFetchComplete(
} else {
NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
}
- } else if (ShouldRetryRequestWithCookies(
- status,
- response_code,
- extensions_queue_.active_request()->is_protected)) {
- // Requeue the fetch with |is_protected| set, enabling cookies.
- extensions_queue_.active_request()->is_protected = true;
- extensions_queue_.RetryRequest(backoff_delay);
- } else if (ShouldRetryRequestWithNextUser(
- status,
- response_code,
- extensions_queue_.active_request()->is_protected) &&
- IncrementAuthUserIndex(&extensions_queue_.active_request()->url)) {
+ } else if (IterateFetchCredentialsAfterFailure(
+ &active_request,
+ status,
+ response_code)) {
extensions_queue_.RetryRequest(backoff_delay);
} else {
- const std::set<int>& request_ids =
- extensions_queue_.active_request()->request_ids;
+ const std::set<int>& request_ids = active_request.request_ids;
const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
<< "' response code:" << response_code;
@@ -830,4 +826,52 @@ void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
content::Details<UpdateDetails>(&updateInfo));
}
+bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
+ ExtensionFetch* fetch,
+ const net::URLRequestStatus& status,
+ int response_code) {
+ bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
+ (status.status() == net::URLRequestStatus::SUCCESS &&
+ (response_code == 401 || response_code == 403));
+ if (!auth_failure) {
+ return false;
+ }
+ switch (fetch->credentials) {
+ case ExtensionFetch::CREDENTIALS_NONE:
+ if (fetch->url.DomainIs(kGoogleDotCom) && webstore_token_provider_) {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
+ } else {
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ }
+ break;
+ case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
+ fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
+ break;
+ case ExtensionFetch::CREDENTIALS_COOKIES:
+ if (response_code == 403) {
+ return IncrementAuthUserIndex(&fetch->url);
+ }
+ return false;
+ default:
+ NOTREACHED();
+ }
+ return true;
+}
+
+void ExtensionDownloader::OnWebstoreOAuth2TokenReceived(
+ bool success,
+ const std::string& token) {
+ // If for whatever reason the token fetch fails, the fetch will still be
+ // retried with no Authorization header; in such cases the download is
+ // probably going to fail again, but the fetcher will then fall back onto
+ // cookie-based authentication.
+ if (success) {
+ extension_fetcher_->AddExtraRequestHeader(
+ base::StringPrintf("%s:Bearer %s",
Roger Tawa OOO till Jul 10th 2014/08/07 01:35:15 nit: space between : and Bearer
Ken Rockot(use gerrit already) 2014/08/07 16:49:09 Done.
+ net::HttpRequestHeaders::kAuthorization,
+ token.c_str()));
+ }
+ extension_fetcher_->Start();
+}
+
} // namespace extensions

Powered by Google App Engine
This is Rietveld 408576698