Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: chrome/browser/extensions/updater/extension_downloader.cc

Issue 654363002: Move ExtensionDownloader to //extensions (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 2 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "chrome/browser/extensions/updater/extension_downloader.h"
6
7 #include <utility>
8
9 #include "base/bind.h"
10 #include "base/command_line.h"
11 #include "base/files/file_path.h"
12 #include "base/location.h"
13 #include "base/logging.h"
14 #include "base/metrics/histogram.h"
15 #include "base/metrics/sparse_histogram.h"
16 #include "base/profiler/scoped_profile.h"
17 #include "base/stl_util.h"
18 #include "base/strings/string_number_conversions.h"
19 #include "base/strings/string_util.h"
20 #include "base/strings/stringprintf.h"
21 #include "base/time/time.h"
22 #include "base/version.h"
23 #include "content/public/browser/browser_thread.h"
24 #include "content/public/browser/notification_details.h"
25 #include "content/public/browser/notification_service.h"
26 #include "extensions/browser/extensions_browser_client.h"
27 #include "extensions/browser/notification_types.h"
28 #include "extensions/browser/updater/extension_cache.h"
29 #include "extensions/browser/updater/request_queue_impl.h"
30 #include "extensions/browser/updater/safe_manifest_parser.h"
31 #include "extensions/common/extension_urls.h"
32 #include "extensions/common/manifest_url_handlers.h"
33 #include "google_apis/gaia/identity_provider.h"
34 #include "net/base/backoff_entry.h"
35 #include "net/base/load_flags.h"
36 #include "net/base/net_errors.h"
37 #include "net/http/http_request_headers.h"
38 #include "net/http/http_status_code.h"
39 #include "net/url_request/url_fetcher.h"
40 #include "net/url_request/url_request_context_getter.h"
41 #include "net/url_request/url_request_status.h"
42
43 using base::Time;
44 using base::TimeDelta;
45 using content::BrowserThread;
46
47 namespace extensions {
48
49 const char ExtensionDownloader::kBlacklistAppID[] = "com.google.crx.blacklist";
50
51 namespace {
52
53 const net::BackoffEntry::Policy kDefaultBackoffPolicy = {
54 // Number of initial errors (in sequence) to ignore before applying
55 // exponential back-off rules.
56 0,
57
58 // Initial delay for exponential back-off in ms.
59 2000,
60
61 // Factor by which the waiting time will be multiplied.
62 2,
63
64 // Fuzzing percentage. ex: 10% will spread requests randomly
65 // between 90%-100% of the calculated time.
66 0.1,
67
68 // Maximum amount of time we are willing to delay our request in ms.
69 -1,
70
71 // Time to keep an entry from being discarded even when it
72 // has no significant state, -1 to never discard.
73 -1,
74
75 // Don't use initial delay unless the last request was an error.
76 false,
77 };
78
79 const char kAuthUserQueryKey[] = "authuser";
80
81 const int kMaxAuthUserValue = 10;
82 const int kMaxOAuth2Attempts = 3;
83
84 const char kNotFromWebstoreInstallSource[] = "notfromwebstore";
85 const char kDefaultInstallSource[] = "";
86
87 const char kGoogleDotCom[] = "google.com";
88 const char kTokenServiceConsumerId[] = "extension_downloader";
89 const char kWebstoreOAuth2Scope[] =
90 "https://www.googleapis.com/auth/chromewebstore.readonly";
91
92 #define RETRY_HISTOGRAM(name, retry_count, url) \
93 if ((url).DomainIs(kGoogleDotCom)) { \
94 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountGoogleUrl", \
95 retry_count, \
96 1, \
97 kMaxRetries, \
98 kMaxRetries + 1); \
99 } else { \
100 UMA_HISTOGRAM_CUSTOM_COUNTS("Extensions." name "RetryCountOtherUrl", \
101 retry_count, \
102 1, \
103 kMaxRetries, \
104 kMaxRetries + 1); \
105 }
106
107 bool ShouldRetryRequest(const net::URLRequestStatus& status,
108 int response_code) {
109 // Retry if the response code is a server error, or the request failed because
110 // of network errors as opposed to file errors.
111 return ((response_code >= 500 && status.is_success()) ||
112 status.status() == net::URLRequestStatus::FAILED);
113 }
114
115 // This parses and updates a URL query such that the value of the |authuser|
116 // query parameter is incremented by 1. If parameter was not present in the URL,
117 // it will be added with a value of 1. All other query keys and values are
118 // preserved as-is. Returns |false| if the user index exceeds a hard-coded
119 // maximum.
120 bool IncrementAuthUserIndex(GURL* url) {
121 int user_index = 0;
122 std::string old_query = url->query();
123 std::vector<std::string> new_query_parts;
124 url::Component query(0, old_query.length());
125 url::Component key, value;
126 while (url::ExtractQueryKeyValue(old_query.c_str(), &query, &key, &value)) {
127 std::string key_string = old_query.substr(key.begin, key.len);
128 std::string value_string = old_query.substr(value.begin, value.len);
129 if (key_string == kAuthUserQueryKey) {
130 base::StringToInt(value_string, &user_index);
131 } else {
132 new_query_parts.push_back(base::StringPrintf(
133 "%s=%s", key_string.c_str(), value_string.c_str()));
134 }
135 }
136 if (user_index >= kMaxAuthUserValue)
137 return false;
138 new_query_parts.push_back(
139 base::StringPrintf("%s=%d", kAuthUserQueryKey, user_index + 1));
140 std::string new_query_string = JoinString(new_query_parts, '&');
141 url::Component new_query(0, new_query_string.size());
142 url::Replacements<char> replacements;
143 replacements.SetQuery(new_query_string.c_str(), new_query);
144 *url = url->ReplaceComponents(replacements);
145 return true;
146 }
147
148 } // namespace
149
150 UpdateDetails::UpdateDetails(const std::string& id, const Version& version)
151 : id(id), version(version) {}
152
153 UpdateDetails::~UpdateDetails() {}
154
155 ExtensionDownloader::ExtensionFetch::ExtensionFetch()
156 : url(), credentials(CREDENTIALS_NONE) {
157 }
158
159 ExtensionDownloader::ExtensionFetch::ExtensionFetch(
160 const std::string& id,
161 const GURL& url,
162 const std::string& package_hash,
163 const std::string& version,
164 const std::set<int>& request_ids)
165 : id(id),
166 url(url),
167 package_hash(package_hash),
168 version(version),
169 request_ids(request_ids),
170 credentials(CREDENTIALS_NONE),
171 oauth2_attempt_count(0) {
172 }
173
174 ExtensionDownloader::ExtensionFetch::~ExtensionFetch() {}
175
176 ExtensionDownloader::ExtensionDownloader(
177 ExtensionDownloaderDelegate* delegate,
178 net::URLRequestContextGetter* request_context)
179 : OAuth2TokenService::Consumer(kTokenServiceConsumerId),
180 delegate_(delegate),
181 request_context_(request_context),
182 manifests_queue_(&kDefaultBackoffPolicy,
183 base::Bind(&ExtensionDownloader::CreateManifestFetcher,
184 base::Unretained(this))),
185 extensions_queue_(&kDefaultBackoffPolicy,
186 base::Bind(&ExtensionDownloader::CreateExtensionFetcher,
187 base::Unretained(this))),
188 extension_cache_(NULL),
189 enable_extra_update_metrics_(false),
190 weak_ptr_factory_(this) {
191 DCHECK(delegate_);
192 DCHECK(request_context_.get());
193 }
194
195 ExtensionDownloader::~ExtensionDownloader() {}
196
197 bool ExtensionDownloader::AddExtension(const Extension& extension,
198 int request_id) {
199 // Skip extensions with empty update URLs converted from user
200 // scripts.
201 if (extension.converted_from_user_script() &&
202 ManifestURL::GetUpdateURL(&extension).is_empty()) {
203 return false;
204 }
205
206 // If the extension updates itself from the gallery, ignore any update URL
207 // data. At the moment there is no extra data that an extension can
208 // communicate to the the gallery update servers.
209 std::string update_url_data;
210 if (!ManifestURL::UpdatesFromGallery(&extension))
211 update_url_data = delegate_->GetUpdateUrlData(extension.id());
212
213 std::string install_source;
214 bool force_update = delegate_->ShouldForceUpdate(extension.id(),
215 &install_source);
216 return AddExtensionData(extension.id(),
217 *extension.version(),
218 extension.GetType(),
219 ManifestURL::GetUpdateURL(&extension),
220 update_url_data,
221 request_id,
222 force_update,
223 install_source);
224 }
225
226 bool ExtensionDownloader::AddPendingExtension(const std::string& id,
227 const GURL& update_url,
228 int request_id) {
229 // Use a zero version to ensure that a pending extension will always
230 // be updated, and thus installed (assuming all extensions have
231 // non-zero versions).
232 Version version("0.0.0.0");
233 DCHECK(version.IsValid());
234
235 return AddExtensionData(id,
236 version,
237 Manifest::TYPE_UNKNOWN,
238 update_url,
239 std::string(),
240 request_id,
241 false,
242 std::string());
243 }
244
245 void ExtensionDownloader::StartAllPending(ExtensionCache* cache) {
246 if (cache) {
247 extension_cache_ = cache;
248 extension_cache_->Start(base::Bind(
249 &ExtensionDownloader::DoStartAllPending,
250 weak_ptr_factory_.GetWeakPtr()));
251 } else {
252 DoStartAllPending();
253 }
254 }
255
256 void ExtensionDownloader::DoStartAllPending() {
257 ReportStats();
258 url_stats_ = URLStats();
259
260 for (FetchMap::iterator it = fetches_preparing_.begin();
261 it != fetches_preparing_.end(); ++it) {
262 std::vector<linked_ptr<ManifestFetchData> >& list = it->second;
263 for (size_t i = 0; i < list.size(); ++i) {
264 StartUpdateCheck(scoped_ptr<ManifestFetchData>(list[i].release()));
265 }
266 }
267 fetches_preparing_.clear();
268 }
269
270 void ExtensionDownloader::StartBlacklistUpdate(
271 const std::string& version,
272 const ManifestFetchData::PingData& ping_data,
273 int request_id) {
274 // Note: it is very important that we use the https version of the update
275 // url here to avoid DNS hijacking of the blacklist, which is not validated
276 // by a public key signature like .crx files are.
277 scoped_ptr<ManifestFetchData> blacklist_fetch(CreateManifestFetchData(
278 extension_urls::GetWebstoreUpdateUrl(), request_id));
279 DCHECK(blacklist_fetch->base_url().SchemeIsSecure());
280 blacklist_fetch->AddExtension(kBlacklistAppID,
281 version,
282 &ping_data,
283 std::string(),
284 kDefaultInstallSource,
285 false);
286 StartUpdateCheck(blacklist_fetch.Pass());
287 }
288
289 void ExtensionDownloader::SetWebstoreIdentityProvider(
290 scoped_ptr<IdentityProvider> identity_provider) {
291 identity_provider_.swap(identity_provider);
292 }
293
294 bool ExtensionDownloader::AddExtensionData(
295 const std::string& id,
296 const Version& version,
297 Manifest::Type extension_type,
298 const GURL& extension_update_url,
299 const std::string& update_url_data,
300 int request_id,
301 bool force_update,
302 const std::string& install_source_override) {
303 GURL update_url(extension_update_url);
304 // Skip extensions with non-empty invalid update URLs.
305 if (!update_url.is_empty() && !update_url.is_valid()) {
306 LOG(WARNING) << "Extension " << id << " has invalid update url "
307 << update_url;
308 return false;
309 }
310
311 // Make sure we use SSL for store-hosted extensions.
312 if (extension_urls::IsWebstoreUpdateUrl(update_url) &&
313 !update_url.SchemeIsSecure())
314 update_url = extension_urls::GetWebstoreUpdateUrl();
315
316 // Skip extensions with empty IDs.
317 if (id.empty()) {
318 LOG(WARNING) << "Found extension with empty ID";
319 return false;
320 }
321
322 if (update_url.DomainIs(kGoogleDotCom)) {
323 url_stats_.google_url_count++;
324 } else if (update_url.is_empty()) {
325 url_stats_.no_url_count++;
326 // Fill in default update URL.
327 update_url = extension_urls::GetWebstoreUpdateUrl();
328 } else {
329 url_stats_.other_url_count++;
330 }
331
332 switch (extension_type) {
333 case Manifest::TYPE_THEME:
334 ++url_stats_.theme_count;
335 break;
336 case Manifest::TYPE_EXTENSION:
337 case Manifest::TYPE_USER_SCRIPT:
338 ++url_stats_.extension_count;
339 break;
340 case Manifest::TYPE_HOSTED_APP:
341 case Manifest::TYPE_LEGACY_PACKAGED_APP:
342 ++url_stats_.app_count;
343 break;
344 case Manifest::TYPE_PLATFORM_APP:
345 ++url_stats_.platform_app_count;
346 break;
347 case Manifest::TYPE_UNKNOWN:
348 default:
349 ++url_stats_.pending_count;
350 break;
351 }
352
353 std::vector<GURL> update_urls;
354 update_urls.push_back(update_url);
355 // If metrics are enabled, also add to ManifestFetchData for the
356 // webstore update URL.
357 if (!extension_urls::IsWebstoreUpdateUrl(update_url) &&
358 enable_extra_update_metrics_) {
359 update_urls.push_back(extension_urls::GetWebstoreUpdateUrl());
360 }
361
362 for (size_t i = 0; i < update_urls.size(); ++i) {
363 DCHECK(!update_urls[i].is_empty());
364 DCHECK(update_urls[i].is_valid());
365
366 std::string install_source = i == 0 ?
367 kDefaultInstallSource : kNotFromWebstoreInstallSource;
368 if (!install_source_override.empty()) {
369 install_source = install_source_override;
370 }
371
372 ManifestFetchData::PingData ping_data;
373 ManifestFetchData::PingData* optional_ping_data = NULL;
374 if (delegate_->GetPingDataForExtension(id, &ping_data))
375 optional_ping_data = &ping_data;
376
377 // Find or create a ManifestFetchData to add this extension to.
378 bool added = false;
379 FetchMap::iterator existing_iter = fetches_preparing_.find(
380 std::make_pair(request_id, update_urls[i]));
381 if (existing_iter != fetches_preparing_.end() &&
382 !existing_iter->second.empty()) {
383 // Try to add to the ManifestFetchData at the end of the list.
384 ManifestFetchData* existing_fetch = existing_iter->second.back().get();
385 if (existing_fetch->AddExtension(id, version.GetString(),
386 optional_ping_data, update_url_data,
387 install_source,
388 force_update)) {
389 added = true;
390 }
391 }
392 if (!added) {
393 // Otherwise add a new element to the list, if the list doesn't exist or
394 // if its last element is already full.
395 linked_ptr<ManifestFetchData> fetch(
396 CreateManifestFetchData(update_urls[i], request_id));
397 fetches_preparing_[std::make_pair(request_id, update_urls[i])].
398 push_back(fetch);
399 added = fetch->AddExtension(id, version.GetString(),
400 optional_ping_data,
401 update_url_data,
402 install_source,
403 force_update);
404 DCHECK(added);
405 }
406 }
407
408 return true;
409 }
410
411 void ExtensionDownloader::ReportStats() const {
412 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckExtension",
413 url_stats_.extension_count);
414 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckTheme",
415 url_stats_.theme_count);
416 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckApp",
417 url_stats_.app_count);
418 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPackagedApp",
419 url_stats_.platform_app_count);
420 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckPending",
421 url_stats_.pending_count);
422 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckGoogleUrl",
423 url_stats_.google_url_count);
424 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckOtherUrl",
425 url_stats_.other_url_count);
426 UMA_HISTOGRAM_COUNTS_100("Extensions.UpdateCheckNoUrl",
427 url_stats_.no_url_count);
428 }
429
430 void ExtensionDownloader::StartUpdateCheck(
431 scoped_ptr<ManifestFetchData> fetch_data) {
432 const std::set<std::string>& id_set(fetch_data->extension_ids());
433
434 if (!ExtensionsBrowserClient::Get()->IsBackgroundUpdateAllowed()) {
435 NotifyExtensionsDownloadFailed(id_set,
436 fetch_data->request_ids(),
437 ExtensionDownloaderDelegate::DISABLED);
438 }
439
440 RequestQueue<ManifestFetchData>::iterator i;
441 for (i = manifests_queue_.begin(); i != manifests_queue_.end(); ++i) {
442 if (fetch_data->full_url() == i->full_url()) {
443 // This url is already scheduled to be fetched.
444 i->Merge(*fetch_data);
445 return;
446 }
447 }
448
449 if (manifests_queue_.active_request() &&
450 manifests_queue_.active_request()->full_url() == fetch_data->full_url()) {
451 manifests_queue_.active_request()->Merge(*fetch_data);
452 } else {
453 UMA_HISTOGRAM_COUNTS("Extensions.UpdateCheckUrlLength",
454 fetch_data->full_url().possibly_invalid_spec().length());
455
456 manifests_queue_.ScheduleRequest(fetch_data.Pass());
457 }
458 }
459
460 void ExtensionDownloader::CreateManifestFetcher() {
461 if (VLOG_IS_ON(2)) {
462 std::vector<std::string> id_vector(
463 manifests_queue_.active_request()->extension_ids().begin(),
464 manifests_queue_.active_request()->extension_ids().end());
465 std::string id_list = JoinString(id_vector, ',');
466 VLOG(2) << "Fetching " << manifests_queue_.active_request()->full_url()
467 << " for " << id_list;
468 }
469
470 manifest_fetcher_.reset(net::URLFetcher::Create(
471 kManifestFetcherId, manifests_queue_.active_request()->full_url(),
472 net::URLFetcher::GET, this));
473 manifest_fetcher_->SetRequestContext(request_context_.get());
474 manifest_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES |
475 net::LOAD_DO_NOT_SAVE_COOKIES |
476 net::LOAD_DISABLE_CACHE);
477 // Update checks can be interrupted if a network change is detected; this is
478 // common for the retail mode AppPack on ChromeOS. Retrying once should be
479 // enough to recover in those cases; let the fetcher retry up to 3 times
480 // just in case. http://crosbug.com/130602
481 manifest_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
482 manifest_fetcher_->Start();
483 }
484
485 void ExtensionDownloader::OnURLFetchComplete(
486 const net::URLFetcher* source) {
487 // TODO(vadimt): Remove ScopedProfile below once crbug.com/422577 is fixed.
488 tracked_objects::ScopedProfile tracking_profile(
489 FROM_HERE_WITH_EXPLICIT_FUNCTION(
490 "422577 ExtensionDownloader::OnURLFetchComplete"));
491
492 VLOG(2) << source->GetResponseCode() << " " << source->GetURL();
493
494 if (source == manifest_fetcher_.get()) {
495 std::string data;
496 source->GetResponseAsString(&data);
497 OnManifestFetchComplete(source->GetURL(),
498 source->GetStatus(),
499 source->GetResponseCode(),
500 source->GetBackoffDelay(),
501 data);
502 } else if (source == extension_fetcher_.get()) {
503 OnCRXFetchComplete(source,
504 source->GetURL(),
505 source->GetStatus(),
506 source->GetResponseCode(),
507 source->GetBackoffDelay());
508 } else {
509 NOTREACHED();
510 }
511 }
512
513 void ExtensionDownloader::OnManifestFetchComplete(
514 const GURL& url,
515 const net::URLRequestStatus& status,
516 int response_code,
517 const base::TimeDelta& backoff_delay,
518 const std::string& data) {
519 // We want to try parsing the manifest, and if it indicates updates are
520 // available, we want to fire off requests to fetch those updates.
521 if (status.status() == net::URLRequestStatus::SUCCESS &&
522 (response_code == 200 || (url.SchemeIsFile() && data.length() > 0))) {
523 RETRY_HISTOGRAM("ManifestFetchSuccess",
524 manifests_queue_.active_request_failure_count(), url);
525 VLOG(2) << "beginning manifest parse for " << url;
526 scoped_refptr<SafeManifestParser> safe_parser(
527 new SafeManifestParser(
528 data,
529 manifests_queue_.reset_active_request().release(),
530 base::Bind(&ExtensionDownloader::HandleManifestResults,
531 weak_ptr_factory_.GetWeakPtr())));
532 safe_parser->Start();
533 } else {
534 VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
535 << "' response code:" << response_code;
536 if (ShouldRetryRequest(status, response_code) &&
537 manifests_queue_.active_request_failure_count() < kMaxRetries) {
538 manifests_queue_.RetryRequest(backoff_delay);
539 } else {
540 RETRY_HISTOGRAM("ManifestFetchFailure",
541 manifests_queue_.active_request_failure_count(), url);
542 NotifyExtensionsDownloadFailed(
543 manifests_queue_.active_request()->extension_ids(),
544 manifests_queue_.active_request()->request_ids(),
545 ExtensionDownloaderDelegate::MANIFEST_FETCH_FAILED);
546 }
547 }
548 manifest_fetcher_.reset();
549 manifests_queue_.reset_active_request();
550
551 // If we have any pending manifest requests, fire off the next one.
552 manifests_queue_.StartNextRequest();
553 }
554
555 void ExtensionDownloader::HandleManifestResults(
556 const ManifestFetchData& fetch_data,
557 const UpdateManifest::Results* results) {
558 // Keep a list of extensions that will not be updated, so that the |delegate_|
559 // can be notified once we're done here.
560 std::set<std::string> not_updated(fetch_data.extension_ids());
561
562 if (!results) {
563 NotifyExtensionsDownloadFailed(
564 not_updated,
565 fetch_data.request_ids(),
566 ExtensionDownloaderDelegate::MANIFEST_INVALID);
567 return;
568 }
569
570 // Examine the parsed manifest and kick off fetches of any new crx files.
571 std::vector<int> updates;
572 DetermineUpdates(fetch_data, *results, &updates);
573 for (size_t i = 0; i < updates.size(); i++) {
574 const UpdateManifest::Result* update = &(results->list.at(updates[i]));
575 const std::string& id = update->extension_id;
576 not_updated.erase(id);
577
578 GURL crx_url = update->crx_url;
579 if (id != kBlacklistAppID) {
580 NotifyUpdateFound(update->extension_id, update->version);
581 } else {
582 // The URL of the blacklist file is returned by the server and we need to
583 // be sure that we continue to be able to reliably detect whether a URL
584 // references a blacklist file.
585 DCHECK(extension_urls::IsBlacklistUpdateUrl(crx_url)) << crx_url;
586
587 // Force https (crbug.com/129587).
588 if (!crx_url.SchemeIsSecure()) {
589 url::Replacements<char> replacements;
590 std::string scheme("https");
591 replacements.SetScheme(scheme.c_str(),
592 url::Component(0, scheme.size()));
593 crx_url = crx_url.ReplaceComponents(replacements);
594 }
595 }
596 scoped_ptr<ExtensionFetch> fetch(new ExtensionFetch(
597 update->extension_id, crx_url, update->package_hash,
598 update->version, fetch_data.request_ids()));
599 FetchUpdatedExtension(fetch.Pass());
600 }
601
602 // If the manifest response included a <daystart> element, we want to save
603 // that value for any extensions which had sent a ping in the request.
604 if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
605 results->daystart_elapsed_seconds >= 0) {
606 Time day_start =
607 Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
608
609 const std::set<std::string>& extension_ids = fetch_data.extension_ids();
610 std::set<std::string>::const_iterator i;
611 for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
612 const std::string& id = *i;
613 ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
614 result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
615 result.day_start = day_start;
616 }
617 }
618
619 NotifyExtensionsDownloadFailed(
620 not_updated,
621 fetch_data.request_ids(),
622 ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
623 }
624
625 void ExtensionDownloader::DetermineUpdates(
626 const ManifestFetchData& fetch_data,
627 const UpdateManifest::Results& possible_updates,
628 std::vector<int>* result) {
629 for (size_t i = 0; i < possible_updates.list.size(); i++) {
630 const UpdateManifest::Result* update = &possible_updates.list[i];
631 const std::string& id = update->extension_id;
632
633 if (!fetch_data.Includes(id)) {
634 VLOG(2) << "Ignoring " << id << " from this manifest";
635 continue;
636 }
637
638 if (VLOG_IS_ON(2)) {
639 if (update->version.empty())
640 VLOG(2) << "manifest indicates " << id << " has no update";
641 else
642 VLOG(2) << "manifest indicates " << id
643 << " latest version is '" << update->version << "'";
644 }
645
646 if (!delegate_->IsExtensionPending(id)) {
647 // If we're not installing pending extension, and the update
648 // version is the same or older than what's already installed,
649 // we don't want it.
650 std::string version;
651 if (!delegate_->GetExtensionExistingVersion(id, &version)) {
652 VLOG(2) << id << " is not installed";
653 continue;
654 }
655
656 VLOG(2) << id << " is at '" << version << "'";
657
658 // We should skip the version check if update was forced.
659 if (!fetch_data.DidForceUpdate(id)) {
660 Version existing_version(version);
661 Version update_version(update->version);
662 if (!update_version.IsValid() ||
663 update_version.CompareTo(existing_version) <= 0) {
664 continue;
665 }
666 }
667 }
668
669 // If the update specifies a browser minimum version, do we qualify?
670 if (update->browser_min_version.length() > 0 &&
671 !ExtensionsBrowserClient::Get()->IsMinBrowserVersionSupported(
672 update->browser_min_version)) {
673 // TODO(asargent) - We may want this to show up in the extensions UI
674 // eventually. (http://crbug.com/12547).
675 LOG(WARNING) << "Updated version of extension " << id
676 << " available, but requires chrome version "
677 << update->browser_min_version;
678 continue;
679 }
680 VLOG(2) << "will try to update " << id;
681 result->push_back(i);
682 }
683 }
684
685 // Begins (or queues up) download of an updated extension.
686 void ExtensionDownloader::FetchUpdatedExtension(
687 scoped_ptr<ExtensionFetch> fetch_data) {
688 if (!fetch_data->url.is_valid()) {
689 // TODO(asargent): This can sometimes be invalid. See crbug.com/130881.
690 LOG(ERROR) << "Invalid URL: '" << fetch_data->url.possibly_invalid_spec()
691 << "' for extension " << fetch_data->id;
692 return;
693 }
694
695 for (RequestQueue<ExtensionFetch>::iterator iter =
696 extensions_queue_.begin();
697 iter != extensions_queue_.end(); ++iter) {
698 if (iter->id == fetch_data->id || iter->url == fetch_data->url) {
699 iter->request_ids.insert(fetch_data->request_ids.begin(),
700 fetch_data->request_ids.end());
701 return; // already scheduled
702 }
703 }
704
705 if (extensions_queue_.active_request() &&
706 extensions_queue_.active_request()->url == fetch_data->url) {
707 extensions_queue_.active_request()->request_ids.insert(
708 fetch_data->request_ids.begin(), fetch_data->request_ids.end());
709 } else {
710 std::string version;
711 if (extension_cache_ &&
712 extension_cache_->GetExtension(fetch_data->id, NULL, &version) &&
713 version == fetch_data->version) {
714 base::FilePath crx_path;
715 // Now get .crx file path and mark extension as used.
716 extension_cache_->GetExtension(fetch_data->id, &crx_path, &version);
717 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, false);
718 } else {
719 extensions_queue_.ScheduleRequest(fetch_data.Pass());
720 }
721 }
722 }
723
724 void ExtensionDownloader::NotifyDelegateDownloadFinished(
725 scoped_ptr<ExtensionFetch> fetch_data,
726 const base::FilePath& crx_path,
727 bool file_ownership_passed) {
728 delegate_->OnExtensionDownloadFinished(fetch_data->id, crx_path,
729 file_ownership_passed, fetch_data->url, fetch_data->version,
730 ping_results_[fetch_data->id], fetch_data->request_ids);
731 ping_results_.erase(fetch_data->id);
732 }
733
734 void ExtensionDownloader::CreateExtensionFetcher() {
735 const ExtensionFetch* fetch = extensions_queue_.active_request();
736 extension_fetcher_.reset(net::URLFetcher::Create(
737 kExtensionFetcherId, fetch->url, net::URLFetcher::GET, this));
738 extension_fetcher_->SetRequestContext(request_context_.get());
739 extension_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3);
740
741 int load_flags = net::LOAD_DISABLE_CACHE;
742 bool is_secure = fetch->url.SchemeIsSecure();
743 if (fetch->credentials != ExtensionFetch::CREDENTIALS_COOKIES || !is_secure) {
744 load_flags |= net::LOAD_DO_NOT_SEND_COOKIES |
745 net::LOAD_DO_NOT_SAVE_COOKIES;
746 }
747 extension_fetcher_->SetLoadFlags(load_flags);
748
749 // Download CRX files to a temp file. The blacklist is small and will be
750 // processed in memory, so it is fetched into a string.
751 if (fetch->id != kBlacklistAppID) {
752 extension_fetcher_->SaveResponseToTemporaryFile(
753 BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE));
754 }
755
756 if (fetch->credentials == ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN &&
757 is_secure) {
758 if (access_token_.empty()) {
759 // We should try OAuth2, but we have no token cached. This
760 // ExtensionFetcher will be started once the token fetch is complete,
761 // in either OnTokenFetchSuccess or OnTokenFetchFailure.
762 DCHECK(identity_provider_.get());
763 OAuth2TokenService::ScopeSet webstore_scopes;
764 webstore_scopes.insert(kWebstoreOAuth2Scope);
765 access_token_request_ =
766 identity_provider_->GetTokenService()->StartRequest(
767 identity_provider_->GetActiveAccountId(),
768 webstore_scopes,
769 this);
770 return;
771 }
772 extension_fetcher_->AddExtraRequestHeader(
773 base::StringPrintf("%s: Bearer %s",
774 net::HttpRequestHeaders::kAuthorization,
775 access_token_.c_str()));
776 }
777
778 VLOG(2) << "Starting fetch of " << fetch->url << " for " << fetch->id;
779 extension_fetcher_->Start();
780 }
781
782 void ExtensionDownloader::OnCRXFetchComplete(
783 const net::URLFetcher* source,
784 const GURL& url,
785 const net::URLRequestStatus& status,
786 int response_code,
787 const base::TimeDelta& backoff_delay) {
788 ExtensionFetch& active_request = *extensions_queue_.active_request();
789 const std::string& id = active_request.id;
790 if (status.status() == net::URLRequestStatus::SUCCESS &&
791 (response_code == 200 || url.SchemeIsFile())) {
792 RETRY_HISTOGRAM("CrxFetchSuccess",
793 extensions_queue_.active_request_failure_count(), url);
794 base::FilePath crx_path;
795 // Take ownership of the file at |crx_path|.
796 CHECK(source->GetResponseAsFilePath(true, &crx_path));
797 scoped_ptr<ExtensionFetch> fetch_data =
798 extensions_queue_.reset_active_request();
799 if (extension_cache_) {
800 const std::string& version = fetch_data->version;
801 extension_cache_->PutExtension(id, crx_path, version,
802 base::Bind(&ExtensionDownloader::NotifyDelegateDownloadFinished,
803 weak_ptr_factory_.GetWeakPtr(),
804 base::Passed(&fetch_data)));
805 } else {
806 NotifyDelegateDownloadFinished(fetch_data.Pass(), crx_path, true);
807 }
808 } else if (IterateFetchCredentialsAfterFailure(
809 &active_request,
810 status,
811 response_code)) {
812 extensions_queue_.RetryRequest(backoff_delay);
813 } else {
814 const std::set<int>& request_ids = active_request.request_ids;
815 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[id];
816 VLOG(1) << "Failed to fetch extension '" << url.possibly_invalid_spec()
817 << "' response code:" << response_code;
818 if (ShouldRetryRequest(status, response_code) &&
819 extensions_queue_.active_request_failure_count() < kMaxRetries) {
820 extensions_queue_.RetryRequest(backoff_delay);
821 } else {
822 RETRY_HISTOGRAM("CrxFetchFailure",
823 extensions_queue_.active_request_failure_count(), url);
824 // status.error() is 0 (net::OK) or negative. (See net/base/net_errors.h)
825 UMA_HISTOGRAM_SPARSE_SLOWLY("Extensions.CrxFetchError", -status.error());
826 delegate_->OnExtensionDownloadFailed(
827 id, ExtensionDownloaderDelegate::CRX_FETCH_FAILED, ping, request_ids);
828 }
829 ping_results_.erase(id);
830 extensions_queue_.reset_active_request();
831 }
832
833 extension_fetcher_.reset();
834
835 // If there are any pending downloads left, start the next one.
836 extensions_queue_.StartNextRequest();
837 }
838
839 void ExtensionDownloader::NotifyExtensionsDownloadFailed(
840 const std::set<std::string>& extension_ids,
841 const std::set<int>& request_ids,
842 ExtensionDownloaderDelegate::Error error) {
843 for (std::set<std::string>::const_iterator it = extension_ids.begin();
844 it != extension_ids.end(); ++it) {
845 const ExtensionDownloaderDelegate::PingResult& ping = ping_results_[*it];
846 delegate_->OnExtensionDownloadFailed(*it, error, ping, request_ids);
847 ping_results_.erase(*it);
848 }
849 }
850
851 void ExtensionDownloader::NotifyUpdateFound(const std::string& id,
852 const std::string& version) {
853 UpdateDetails updateInfo(id, Version(version));
854 content::NotificationService::current()->Notify(
855 extensions::NOTIFICATION_EXTENSION_UPDATE_FOUND,
856 content::NotificationService::AllBrowserContextsAndSources(),
857 content::Details<UpdateDetails>(&updateInfo));
858 }
859
860 bool ExtensionDownloader::IterateFetchCredentialsAfterFailure(
861 ExtensionFetch* fetch,
862 const net::URLRequestStatus& status,
863 int response_code) {
864 bool auth_failure = status.status() == net::URLRequestStatus::CANCELED ||
865 (status.status() == net::URLRequestStatus::SUCCESS &&
866 (response_code == net::HTTP_UNAUTHORIZED ||
867 response_code == net::HTTP_FORBIDDEN));
868 if (!auth_failure) {
869 return false;
870 }
871 // Here we decide what to do next if the server refused to authorize this
872 // fetch.
873 switch (fetch->credentials) {
874 case ExtensionFetch::CREDENTIALS_NONE:
875 if (fetch->url.DomainIs(kGoogleDotCom) && identity_provider_) {
876 fetch->credentials = ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN;
877 } else {
878 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
879 }
880 return true;
881 case ExtensionFetch::CREDENTIALS_OAUTH2_TOKEN:
882 fetch->oauth2_attempt_count++;
883 // OAuth2 may fail due to an expired access token, in which case we
884 // should invalidate the token and try again.
885 if (response_code == net::HTTP_UNAUTHORIZED &&
886 fetch->oauth2_attempt_count <= kMaxOAuth2Attempts) {
887 DCHECK(identity_provider_.get());
888 OAuth2TokenService::ScopeSet webstore_scopes;
889 webstore_scopes.insert(kWebstoreOAuth2Scope);
890 identity_provider_->GetTokenService()->InvalidateToken(
891 identity_provider_->GetActiveAccountId(),
892 webstore_scopes,
893 access_token_);
894 access_token_.clear();
895 return true;
896 }
897 // Either there is no Gaia identity available, the active identity
898 // doesn't have access to this resource, or the server keeps returning
899 // 401s and we've retried too many times. Fall back on cookies.
900 if (access_token_.empty() ||
901 response_code == net::HTTP_FORBIDDEN ||
902 fetch->oauth2_attempt_count > kMaxOAuth2Attempts) {
903 fetch->credentials = ExtensionFetch::CREDENTIALS_COOKIES;
904 return true;
905 }
906 // Something else is wrong. Time to give up.
907 return false;
908 case ExtensionFetch::CREDENTIALS_COOKIES:
909 if (response_code == net::HTTP_FORBIDDEN) {
910 // Try the next session identity, up to some maximum.
911 return IncrementAuthUserIndex(&fetch->url);
912 }
913 return false;
914 default:
915 NOTREACHED();
916 }
917 NOTREACHED();
918 return false;
919 }
920
921 void ExtensionDownloader::OnGetTokenSuccess(
922 const OAuth2TokenService::Request* request,
923 const std::string& access_token,
924 const base::Time& expiration_time) {
925 access_token_ = access_token;
926 extension_fetcher_->AddExtraRequestHeader(
927 base::StringPrintf("%s: Bearer %s",
928 net::HttpRequestHeaders::kAuthorization,
929 access_token_.c_str()));
930 extension_fetcher_->Start();
931 }
932
933 void ExtensionDownloader::OnGetTokenFailure(
934 const OAuth2TokenService::Request* request,
935 const GoogleServiceAuthError& error) {
936 // If we fail to get an access token, kick the pending fetch and let it fall
937 // back on cookies.
938 extension_fetcher_->Start();
939 }
940
941 ManifestFetchData* ExtensionDownloader::CreateManifestFetchData(
942 const GURL& update_url,
943 int request_id) {
944 ManifestFetchData::PingMode ping_mode = ManifestFetchData::NO_PING;
945 if (update_url.DomainIs(ping_enabled_domain_.c_str())) {
946 if (enable_extra_update_metrics_) {
947 ping_mode = ManifestFetchData::PING_WITH_METRICS;
948 } else {
949 ping_mode = ManifestFetchData::PING;
950 }
951 }
952 return new ManifestFetchData(
953 update_url, request_id, brand_code_, manifest_query_params_, ping_mode);
954 }
955
956 } // namespace extensions
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698