Chromium Code Reviews| Index: chrome/browser/extensions/updater/extension_downloader.cc |
| diff --git a/chrome/browser/extensions/updater/extension_downloader.cc b/chrome/browser/extensions/updater/extension_downloader.cc |
| index ca4193c6e5fb23c8dfc34f7ac07a581e3c6a64e5..e7ead1826c091d7b86d820cccf3956a7fc1b16a2 100644 |
| --- a/chrome/browser/extensions/updater/extension_downloader.cc |
| +++ b/chrome/browser/extensions/updater/extension_downloader.cc |
| @@ -77,14 +77,15 @@ const char kNotFromWebstoreInstallSource[] = "notfromwebstore"; |
| const char kDefaultInstallSource[] = ""; |
| #define RETRY_HISTOGRAM(name, retry_count, url) \ |
| - if ((url).DomainIs("google.com")) \ |
| + if ((url).DomainIs("google.com")) { \ |
|
Ken Rockot(use gerrit already)
2014/01/22 22:34:52
Just cleaning up an uninteresting presubmit warnin
|
| UMA_HISTOGRAM_CUSTOM_COUNTS( \ |
| "Extensions." name "RetryCountGoogleUrl", retry_count, 1, \ |
| kMaxRetries, kMaxRetries+1); \ |
| - else \ |
| + } else { \ |
| UMA_HISTOGRAM_CUSTOM_COUNTS( \ |
| "Extensions." name "RetryCountOtherUrl", retry_count, 1, \ |
| - kMaxRetries, kMaxRetries+1) |
| + kMaxRetries, kMaxRetries+1) \ |
| + } |
| bool ShouldRetryRequest(const net::URLRequestStatus& status, |
| int response_code) { |
| @@ -110,7 +111,7 @@ ExtensionDownloader::ExtensionFetch::ExtensionFetch( |
| const std::string& version, |
| const std::set<int>& request_ids) |
| : id(id), url(url), package_hash(package_hash), version(version), |
| - request_ids(request_ids) {} |
| + request_ids(request_ids), is_protected(false) {} |
| ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {} |
| @@ -624,23 +625,25 @@ void ExtensionDownloader::FetchUpdatedExtension( |
| } |
| void ExtensionDownloader::CreateExtensionFetcher() { |
| + const ExtensionFetch* fetch = extensions_queue_.active_request(); |
| + int load_flags = net::LOAD_DISABLE_CACHE; |
| + if (!fetch->is_protected) { |
| + load_flags |= net::LOAD_DO_NOT_SEND_COOKIES | |
| + net::LOAD_DO_NOT_SAVE_COOKIES; |
| + } |
| extension_fetcher_.reset(net::URLFetcher::Create( |
| - kExtensionFetcherId, extensions_queue_.active_request()->url, |
| - net::URLFetcher::GET, this)); |
| + kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this)); |
| extension_fetcher_->SetRequestContext(request_context_); |
| - extension_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES | |
| - net::LOAD_DO_NOT_SAVE_COOKIES | |
| - net::LOAD_DISABLE_CACHE); |
| + extension_fetcher_->SetLoadFlags(load_flags); |
| extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); |
| // Download CRX files to a temp file. The blacklist is small and will be |
| // processed in memory, so it is fetched into a string. |
| - if (extensions_queue_.active_request()->id != kBlacklistAppID) { |
| + if (fetch->id != kBlacklistAppID) { |
| extension_fetcher_->SaveResponseToTemporaryFile( |
| BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE)); |
| } |
| - VLOG(2) << "Starting fetch of " << extensions_queue_.active_request()->url |
| - << " for " << extensions_queue_.active_request()->id; |
| + VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id; |
| extension_fetcher_->Start(); |
| } |
| @@ -651,9 +654,9 @@ void ExtensionDownloader::OnCRXFetchComplete( |
| const net::URLRequestStatus& status, |
| int response_code, |
| const base::TimeDelta& backoff_delay) { |
| - const std::string& id = extensions_queue_.active_request()->id; |
| - const std::set<int>& request_ids = |
| - extensions_queue_.active_request()->request_ids; |
| + ExtensionFetch* fetch = extensions_queue_.active_request(); |
| + const std::string& id = fetch->id; |
| + const std::set<int>& request_ids = fetch->request_ids; |
| const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id]; |
| if (status.status() == net::URLRequestStatus::SUCCESS && |
| @@ -664,8 +667,12 @@ void ExtensionDownloader::OnCRXFetchComplete( |
| // Take ownership of the file at |crx_path|. |
| CHECK(source->GetResponseAsFilePath(true, &crx_path)); |
| delegate_->OnExtensionDownloadFinished( |
| - id, crx_path, url, extensions_queue_.active_request()->version, |
| - ping, request_ids); |
| + id, crx_path, url, fetch->version, ping, request_ids); |
| + } else if (status.status() == net::URLRequestStatus::SUCCESS && |
| + response_code == 401 && !fetch->is_protected) { |
| + // On 401, requeue this fetch with cookies enabled. |
| + fetch->is_protected = true; |
| + extensions_queue_.RetryRequest(backoff_delay); |
| } else { |
| VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec() |
| << "' response code:" << response_code; |