OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "extensions/browser/content_hash_fetcher.h" | 5 #include "extensions/browser/content_hash_fetcher.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include <algorithm> | 9 #include <algorithm> |
10 #include <memory> | 10 #include <memory> |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
66 // Cancels this job, which will attempt to stop I/O operations sooner than | 66 // Cancels this job, which will attempt to stop I/O operations sooner than |
67 // just waiting for the entire job to complete. Safe to call from any thread. | 67 // just waiting for the entire job to complete. Safe to call from any thread. |
68 void Cancel(); | 68 void Cancel(); |
69 | 69 |
70 // Checks whether this job has been cancelled. Safe to call from any thread. | 70 // Checks whether this job has been cancelled. Safe to call from any thread. |
71 bool IsCancelled(); | 71 bool IsCancelled(); |
72 | 72 |
73 // Returns whether this job was successful (we have both verified contents | 73 // Returns whether this job was successful (we have both verified contents |
74 // and computed hashes). Even if the job was a success, there might have been | 74 // and computed hashes). Even if the job was a success, there might have been |
75 // files that were found to have contents not matching expectations; these | 75 // files that were found to have contents not matching expectations; these |
76 // are available by calling hash_mismatch_paths(). | 76 // are available by calling hash_mismatch_unix_paths(). |
77 bool success() { return success_; } | 77 bool success() { return success_; } |
78 | 78 |
79 bool force() { return force_; } | 79 bool force() { return force_; } |
80 | 80 |
81 const std::string& extension_id() { return extension_id_; } | 81 const std::string& extension_id() { return extension_id_; } |
82 | 82 |
83 // Returns the set of paths that had a hash mismatch. | 83 // Returns the set of paths (with unix style '/' separators) that had a hash |
84 const std::set<base::FilePath>& hash_mismatch_paths() { | 84 // mismatch. |
85 return hash_mismatch_paths_; | 85 const std::set<base::FilePath>& hash_mismatch_unix_paths() { |
| 86 return hash_mismatch_unix_paths_; |
86 } | 87 } |
87 | 88 |
88 private: | 89 private: |
89 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; | 90 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; |
90 ~ContentHashFetcherJob() override; | 91 ~ContentHashFetcherJob() override; |
91 | 92 |
92 // Tries to load a verified_contents.json file at |path|. On successfully | 93 // Tries to load a verified_contents.json file at |path|. On successfully |
93 // reading and validing the file, the verified_contents_ member variable will | 94 // reading and validing the file, the verified_contents_ member variable will |
94 // be set and this function will return true. If the file does not exist, or | 95 // be set and this function will return true. If the file does not exist, or |
95 // exists but is invalid, it will return false. Also, any invalid | 96 // exists but is invalid, it will return false. Also, any invalid |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
146 ContentVerifierKey key_; | 147 ContentVerifierKey key_; |
147 | 148 |
148 // The parsed contents of the verified_contents.json file, either read from | 149 // The parsed contents of the verified_contents.json file, either read from |
149 // disk or fetched from the network and then written to disk. | 150 // disk or fetched from the network and then written to disk. |
150 std::unique_ptr<VerifiedContents> verified_contents_; | 151 std::unique_ptr<VerifiedContents> verified_contents_; |
151 | 152 |
152 // Whether this job succeeded. | 153 // Whether this job succeeded. |
153 bool success_; | 154 bool success_; |
154 | 155 |
155 // Paths that were found to have a mismatching hash. | 156 // Paths that were found to have a mismatching hash. |
156 std::set<base::FilePath> hash_mismatch_paths_; | 157 std::set<base::FilePath> hash_mismatch_unix_paths_; |
157 | 158 |
158 // The block size to use for hashing. | 159 // The block size to use for hashing. |
159 int block_size_; | 160 int block_size_; |
160 | 161 |
161 // Note: this may be accessed from multiple threads, so all access should | 162 // Note: this may be accessed from multiple threads, so all access should |
162 // be protected by |cancelled_lock_|. | 163 // be protected by |cancelled_lock_|. |
163 bool cancelled_; | 164 bool cancelled_; |
164 | 165 |
165 // A lock for synchronizing access to |cancelled_|. | 166 // A lock for synchronizing access to |cancelled_|. |
166 base::Lock cancelled_lock_; | 167 base::Lock cancelled_lock_; |
(...skipping 231 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
398 paths.insert(full_path); | 399 paths.insert(full_path); |
399 } | 400 } |
400 | 401 |
401 // Now iterate over all the paths in sorted order and compute the block hashes | 402 // Now iterate over all the paths in sorted order and compute the block hashes |
402 // for each one. | 403 // for each one. |
403 ComputedHashes::Writer writer; | 404 ComputedHashes::Writer writer; |
404 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { | 405 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { |
405 if (IsCancelled()) | 406 if (IsCancelled()) |
406 return false; | 407 return false; |
407 const base::FilePath& full_path = *i; | 408 const base::FilePath& full_path = *i; |
408 base::FilePath relative_path; | 409 base::FilePath relative_unix_path; |
409 extension_path_.AppendRelativePath(full_path, &relative_path); | 410 extension_path_.AppendRelativePath(full_path, &relative_unix_path); |
410 relative_path = relative_path.NormalizePathSeparatorsTo('/'); | 411 relative_unix_path = relative_unix_path.NormalizePathSeparatorsTo('/'); |
411 | 412 |
412 if (!verified_contents_->HasTreeHashRoot(relative_path)) | 413 if (!verified_contents_->HasTreeHashRoot(relative_unix_path)) |
413 continue; | 414 continue; |
414 | 415 |
415 std::string contents; | 416 std::string contents; |
416 if (!base::ReadFileToString(full_path, &contents)) { | 417 if (!base::ReadFileToString(full_path, &contents)) { |
417 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); | 418 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); |
418 continue; | 419 continue; |
419 } | 420 } |
420 | 421 |
421 // Iterate through taking the hash of each block of size (block_size_) of | 422 // Iterate through taking the hash of each block of size (block_size_) of |
422 // the file. | 423 // the file. |
423 std::vector<std::string> hashes; | 424 std::vector<std::string> hashes; |
424 ComputedHashes::ComputeHashesForContent(contents, block_size_, &hashes); | 425 ComputedHashes::ComputeHashesForContent(contents, block_size_, &hashes); |
425 std::string root = | 426 std::string root = |
426 ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length); | 427 ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length); |
427 if (!verified_contents_->TreeHashRootEquals(relative_path, root)) { | 428 if (!verified_contents_->TreeHashRootEquals(relative_unix_path, root)) { |
428 VLOG(1) << "content mismatch for " << relative_path.AsUTF8Unsafe(); | 429 VLOG(1) << "content mismatch for " << relative_unix_path.AsUTF8Unsafe(); |
429 hash_mismatch_paths_.insert(relative_path); | 430 hash_mismatch_unix_paths_.insert(relative_unix_path); |
430 continue; | 431 continue; |
431 } | 432 } |
432 | 433 |
433 writer.AddHashes(relative_path, block_size_, hashes); | 434 writer.AddHashes(relative_unix_path, block_size_, hashes); |
434 } | 435 } |
435 bool result = writer.WriteToFile(hashes_file); | 436 bool result = writer.WriteToFile(hashes_file); |
436 UMA_HISTOGRAM_TIMES("ExtensionContentHashFetcher.CreateHashesTime", | 437 UMA_HISTOGRAM_TIMES("ExtensionContentHashFetcher.CreateHashesTime", |
437 timer.Elapsed()); | 438 timer.Elapsed()); |
438 return result; | 439 return result; |
439 } | 440 } |
440 | 441 |
441 void ContentHashFetcherJob::DispatchCallback() { | 442 void ContentHashFetcherJob::DispatchCallback() { |
442 { | 443 { |
443 base::AutoLock autolock(cancelled_lock_); | 444 base::AutoLock autolock(cancelled_lock_); |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
507 IdAndVersion key(extension->id(), extension->version()->GetString()); | 508 IdAndVersion key(extension->id(), extension->version()->GetString()); |
508 JobMap::iterator found = jobs_.find(key); | 509 JobMap::iterator found = jobs_.find(key); |
509 if (found != jobs_.end()) { | 510 if (found != jobs_.end()) { |
510 found->second->Cancel(); | 511 found->second->Cancel(); |
511 jobs_.erase(found); | 512 jobs_.erase(found); |
512 } | 513 } |
513 } | 514 } |
514 | 515 |
515 void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) { | 516 void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) { |
516 if (!job->IsCancelled()) { | 517 if (!job->IsCancelled()) { |
517 fetch_callback_.Run(job->extension_id(), | 518 fetch_callback_.Run(job->extension_id(), job->success(), job->force(), |
518 job->success(), | 519 job->hash_mismatch_unix_paths()); |
519 job->force(), | |
520 job->hash_mismatch_paths()); | |
521 } | 520 } |
522 | 521 |
523 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { | 522 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { |
524 if (i->second.get() == job) { | 523 if (i->second.get() == job) { |
525 jobs_.erase(i); | 524 jobs_.erase(i); |
526 break; | 525 break; |
527 } | 526 } |
528 } | 527 } |
529 } | 528 } |
530 | 529 |
531 } // namespace extensions | 530 } // namespace extensions |
OLD | NEW |