| Index: extensions/browser/updater/extension_downloader.cc
|
| diff --git a/extensions/browser/updater/extension_downloader.cc b/extensions/browser/updater/extension_downloader.cc
|
| index 3e060835c0d13945448bee54bfee9b7260017809..f572fa0e562d8b5e6de4bce826c7dd11ced7cd80 100644
|
| --- a/extensions/browser/updater/extension_downloader.cc
|
| +++ b/extensions/browser/updater/extension_downloader.cc
|
| @@ -528,9 +528,10 @@ void ExtensionDownloader::OnManifestFetchComplete(
|
| VLOG(2) << "beginning manifest parse for " << url;
|
| scoped_refptr<SafeManifestParser> safe_parser(new SafeManifestParser(
|
| data,
|
| - manifests_queue_.reset_active_request().release(),
|
| - base::Bind(&ExtensionDownloader::HandleManifestResults,
|
| - weak_ptr_factory_.GetWeakPtr())));
|
| + base::Bind(
|
| + &ExtensionDownloader::HandleManifestResults,
|
| + weak_ptr_factory_.GetWeakPtr(),
|
| + base::Owned(manifests_queue_.reset_active_request().release()))));
|
| safe_parser->Start();
|
| } else {
|
| VLOG(1) << "Failed to fetch manifest '" << url.possibly_invalid_spec()
|
| @@ -556,23 +557,25 @@ void ExtensionDownloader::OnManifestFetchComplete(
|
| }
|
|
|
| void ExtensionDownloader::HandleManifestResults(
|
| - const ManifestFetchData& fetch_data,
|
| + const ManifestFetchData* fetch_data,
|
| const UpdateManifest::Results* results) {
|
| // Keep a list of extensions that will not be updated, so that the |delegate_|
|
| // can be notified once we're done here.
|
| - std::set<std::string> not_updated(fetch_data.extension_ids());
|
| + std::set<std::string> not_updated(fetch_data->extension_ids());
|
|
|
| if (!results) {
|
| + VLOG(2) << "parsing manifest failed (" << fetch_data->full_url() << ")";
|
| NotifyExtensionsDownloadFailed(
|
| - not_updated,
|
| - fetch_data.request_ids(),
|
| + not_updated, fetch_data->request_ids(),
|
| ExtensionDownloaderDelegate::MANIFEST_INVALID);
|
| return;
|
| + } else {
|
| + VLOG(2) << "parsing manifest succeeded (" << fetch_data->full_url() << ")";
|
| }
|
|
|
| // Examine the parsed manifest and kick off fetches of any new crx files.
|
| std::vector<int> updates;
|
| - DetermineUpdates(fetch_data, *results, &updates);
|
| + DetermineUpdates(*fetch_data, *results, &updates);
|
| for (size_t i = 0; i < updates.size(); i++) {
|
| const UpdateManifest::Result* update = &(results->list.at(updates[i]));
|
| const std::string& id = update->extension_id;
|
| @@ -597,34 +600,30 @@ void ExtensionDownloader::HandleManifestResults(
|
| }
|
| }
|
| scoped_ptr<ExtensionFetch> fetch(
|
| - new ExtensionFetch(update->extension_id,
|
| - crx_url,
|
| - update->package_hash,
|
| - update->version,
|
| - fetch_data.request_ids()));
|
| + new ExtensionFetch(update->extension_id, crx_url, update->package_hash,
|
| + update->version, fetch_data->request_ids()));
|
| FetchUpdatedExtension(fetch.Pass());
|
| }
|
|
|
| // If the manifest response included a <daystart> element, we want to save
|
| // that value for any extensions which had sent a ping in the request.
|
| - if (fetch_data.base_url().DomainIs(kGoogleDotCom) &&
|
| + if (fetch_data->base_url().DomainIs(kGoogleDotCom) &&
|
| results->daystart_elapsed_seconds >= 0) {
|
| Time day_start =
|
| Time::Now() - TimeDelta::FromSeconds(results->daystart_elapsed_seconds);
|
|
|
| - const std::set<std::string>& extension_ids = fetch_data.extension_ids();
|
| + const std::set<std::string>& extension_ids = fetch_data->extension_ids();
|
| std::set<std::string>::const_iterator i;
|
| for (i = extension_ids.begin(); i != extension_ids.end(); i++) {
|
| const std::string& id = *i;
|
| ExtensionDownloaderDelegate::PingResult& result = ping_results_[id];
|
| - result.did_ping = fetch_data.DidPing(id, ManifestFetchData::ROLLCALL);
|
| + result.did_ping = fetch_data->DidPing(id, ManifestFetchData::ROLLCALL);
|
| result.day_start = day_start;
|
| }
|
| }
|
|
|
| NotifyExtensionsDownloadFailed(
|
| - not_updated,
|
| - fetch_data.request_ids(),
|
| + not_updated, fetch_data->request_ids(),
|
| ExtensionDownloaderDelegate::NO_UPDATE_AVAILABLE);
|
| }
|
|
|
|
|