OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "extensions/browser/content_hash_fetcher.h" | 5 #include "extensions/browser/content_hash_fetcher.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 #include <memory> | 10 #include <memory> |
(...skipping 218 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
229 LOG(WARNING) << "Failed to delete " << path.value(); | 229 LOG(WARNING) << "Failed to delete " << path.value(); |
230 return false; | 230 return false; |
231 } | 231 } |
232 return true; | 232 return true; |
233 } | 233 } |
234 | 234 |
235 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { | 235 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { |
236 if (IsCancelled()) | 236 if (IsCancelled()) |
237 return; | 237 return; |
238 if (found) { | 238 if (found) { |
239 VLOG(1) << "Found verified contents for " << extension_id_; | 239 LOG(WARNING) << "Found verified contents for " << extension_id_; |
240 DoneFetchingVerifiedContents(true); | 240 DoneFetchingVerifiedContents(true); |
241 } else { | 241 } else { |
242 VLOG(1) << "Missing verified contents for " << extension_id_ | 242 LOG(WARNING) << "Missing verified contents for " << extension_id_ |
243 << ", fetching..."; | 243 << ", fetching..."; |
244 url_fetcher_ = | 244 url_fetcher_ = |
245 net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this); | 245 net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this); |
246 url_fetcher_->SetRequestContext(request_context_); | 246 url_fetcher_->SetRequestContext(request_context_); |
247 url_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES | | 247 url_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES | |
248 net::LOAD_DO_NOT_SAVE_COOKIES | | 248 net::LOAD_DO_NOT_SAVE_COOKIES | |
249 net::LOAD_DISABLE_CACHE); | 249 net::LOAD_DISABLE_CACHE); |
250 url_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); | 250 url_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); |
251 url_fetcher_->Start(); | 251 url_fetcher_->Start(); |
252 } | 252 } |
253 } | 253 } |
254 | 254 |
255 // Helper function to let us pass ownership of a string via base::Bind with the | 255 // Helper function to let us pass ownership of a string via base::Bind with the |
256 // contents to be written into a file. Also ensures that the directory for | 256 // contents to be written into a file. Also ensures that the directory for |
257 // |path| exists, creating it if needed. | 257 // |path| exists, creating it if needed. |
258 static int WriteFileHelper(const base::FilePath& path, | 258 static int WriteFileHelper(const base::FilePath& path, |
259 std::unique_ptr<std::string> content) { | 259 std::unique_ptr<std::string> content) { |
260 base::FilePath dir = path.DirName(); | 260 base::FilePath dir = path.DirName(); |
261 if (!base::CreateDirectoryAndGetError(dir, nullptr)) | 261 if (!base::CreateDirectoryAndGetError(dir, nullptr)) |
262 return -1; | 262 return -1; |
263 return base::WriteFile(path, content->data(), content->size()); | 263 return base::WriteFile(path, content->data(), content->size()); |
264 } | 264 } |
265 | 265 |
266 void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { | 266 void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { |
267 VLOG(1) << "URLFetchComplete for " << extension_id_ | 267 LOG(WARNING) << "URLFetchComplete for " << extension_id_ |
268 << " is_success:" << url_fetcher_->GetStatus().is_success() << " " | 268 << " is_success:" << url_fetcher_->GetStatus().is_success() |
269 << fetch_url_.possibly_invalid_spec(); | 269 << " " << fetch_url_.possibly_invalid_spec(); |
270 if (IsCancelled()) | 270 if (IsCancelled()) |
271 return; | 271 return; |
272 std::unique_ptr<std::string> response(new std::string); | 272 std::unique_ptr<std::string> response(new std::string); |
273 if (!url_fetcher_->GetStatus().is_success() || | 273 if (!url_fetcher_->GetStatus().is_success() || |
274 !url_fetcher_->GetResponseAsString(response.get())) { | 274 !url_fetcher_->GetResponseAsString(response.get())) { |
275 DoneFetchingVerifiedContents(false); | 275 DoneFetchingVerifiedContents(false); |
276 return; | 276 return; |
277 } | 277 } |
278 | 278 |
279 // Parse the response to make sure it is valid json (on staging sometimes it | 279 // Parse the response to make sure it is valid json (on staging sometimes it |
280 // can be a login redirect html, xml file, etc. if you aren't logged in with | 280 // can be a login redirect html, xml file, etc. if you aren't logged in with |
281 // the right cookies). TODO(asargent) - It would be a nice enhancement to | 281 // the right cookies). TODO(asargent) - It would be a nice enhancement to |
282 // move to parsing this in a sandboxed helper (crbug.com/372878). | 282 // move to parsing this in a sandboxed helper (crbug.com/372878). |
283 std::unique_ptr<base::Value> parsed(base::JSONReader::Read(*response)); | 283 std::unique_ptr<base::Value> parsed(base::JSONReader::Read(*response)); |
284 if (parsed) { | 284 if (parsed) { |
285 VLOG(1) << "JSON parsed ok for " << extension_id_; | 285 LOG(WARNING) << "JSON parsed ok for " << extension_id_; |
286 | 286 |
287 parsed.reset(); // no longer needed | 287 parsed.reset(); // no longer needed |
288 base::FilePath destination = | 288 base::FilePath destination = |
289 file_util::GetVerifiedContentsPath(extension_path_); | 289 file_util::GetVerifiedContentsPath(extension_path_); |
290 size_t size = response->size(); | 290 size_t size = response->size(); |
291 base::PostTaskAndReplyWithResult( | 291 base::PostTaskAndReplyWithResult( |
292 content::BrowserThread::GetBlockingPool(), | 292 content::BrowserThread::GetBlockingPool(), |
293 FROM_HERE, | 293 FROM_HERE, |
294 base::Bind(&WriteFileHelper, destination, base::Passed(&response)), | 294 base::Bind(&WriteFileHelper, destination, base::Passed(&response)), |
295 base::Bind( | 295 base::Bind( |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
394 continue; | 394 continue; |
395 } | 395 } |
396 | 396 |
397 // Iterate through taking the hash of each block of size (block_size_) of | 397 // Iterate through taking the hash of each block of size (block_size_) of |
398 // the file. | 398 // the file. |
399 std::vector<std::string> hashes; | 399 std::vector<std::string> hashes; |
400 ComputedHashes::ComputeHashesForContent(contents, block_size_, &hashes); | 400 ComputedHashes::ComputeHashesForContent(contents, block_size_, &hashes); |
401 std::string root = | 401 std::string root = |
402 ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length); | 402 ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length); |
403 if (!verified_contents_->TreeHashRootEquals(relative_path, root)) { | 403 if (!verified_contents_->TreeHashRootEquals(relative_path, root)) { |
404 VLOG(1) << "content mismatch for " << relative_path.AsUTF8Unsafe(); | 404 LOG(WARNING) << "content mismatch for " << relative_path.AsUTF8Unsafe(); |
405 hash_mismatch_paths_.insert(relative_path); | 405 hash_mismatch_paths_.insert(relative_path); |
406 continue; | 406 continue; |
407 } | 407 } |
408 | 408 |
409 writer.AddHashes(relative_path, block_size_, hashes); | 409 writer.AddHashes(relative_path, block_size_, hashes); |
410 } | 410 } |
411 bool result = writer.WriteToFile(hashes_file); | 411 bool result = writer.WriteToFile(hashes_file); |
412 UMA_HISTOGRAM_TIMES("ExtensionContentHashFetcher.CreateHashesTime", | 412 UMA_HISTOGRAM_TIMES("ExtensionContentHashFetcher.CreateHashesTime", |
413 timer.Elapsed()); | 413 timer.Elapsed()); |
414 return result; | 414 return result; |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
498 | 498 |
499 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { | 499 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { |
500 if (i->second.get() == job) { | 500 if (i->second.get() == job) { |
501 jobs_.erase(i); | 501 jobs_.erase(i); |
502 break; | 502 break; |
503 } | 503 } |
504 } | 504 } |
505 } | 505 } |
506 | 506 |
507 } // namespace extensions | 507 } // namespace extensions |
OLD | NEW |