OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "extensions/browser/content_hash_fetcher.h" | 5 #include "extensions/browser/content_hash_fetcher.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "base/base64.h" | 9 #include "base/base64.h" |
10 #include "base/file_util.h" | 10 #include "base/file_util.h" |
11 #include "base/files/file_enumerator.h" | 11 #include "base/files/file_enumerator.h" |
12 #include "base/json/json_reader.h" | 12 #include "base/json/json_reader.h" |
13 #include "base/memory/ref_counted.h" | 13 #include "base/memory/ref_counted.h" |
14 #include "base/metrics/histogram.h" | 14 #include "base/metrics/histogram.h" |
15 #include "base/synchronization/lock.h" | 15 #include "base/synchronization/lock.h" |
16 #include "base/task_runner_util.h" | 16 #include "base/task_runner_util.h" |
17 #include "base/timer/elapsed_timer.h" | 17 #include "base/timer/elapsed_timer.h" |
18 #include "base/version.h" | 18 #include "base/version.h" |
19 #include "content/public/browser/browser_context.h" | 19 #include "content/public/browser/browser_context.h" |
20 #include "content/public/browser/browser_thread.h" | 20 #include "content/public/browser/browser_thread.h" |
21 #include "crypto/sha2.h" | 21 #include "crypto/sha2.h" |
22 #include "extensions/browser/computed_hashes.h" | 22 #include "extensions/browser/computed_hashes.h" |
23 #include "extensions/browser/content_hash_tree.h" | 23 #include "extensions/browser/content_hash_tree.h" |
24 #include "extensions/browser/content_verifier_delegate.h" | 24 #include "extensions/browser/content_verifier_delegate.h" |
25 #include "extensions/browser/extension_registry.h" | |
26 #include "extensions/browser/verified_contents.h" | 25 #include "extensions/browser/verified_contents.h" |
27 #include "extensions/common/constants.h" | 26 #include "extensions/common/constants.h" |
28 #include "extensions/common/extension.h" | 27 #include "extensions/common/extension.h" |
29 #include "extensions/common/file_util.h" | 28 #include "extensions/common/file_util.h" |
30 #include "net/base/load_flags.h" | 29 #include "net/base/load_flags.h" |
31 #include "net/url_request/url_fetcher.h" | 30 #include "net/url_request/url_fetcher.h" |
32 #include "net/url_request/url_fetcher_delegate.h" | 31 #include "net/url_request/url_fetcher_delegate.h" |
33 #include "net/url_request/url_request_status.h" | 32 #include "net/url_request/url_request_status.h" |
34 | 33 |
35 namespace { | 34 namespace { |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
78 | 77 |
79 // Returns the set of paths that had a hash mismatch. | 78 // Returns the set of paths that had a hash mismatch. |
80 const std::set<base::FilePath>& hash_mismatch_paths() { | 79 const std::set<base::FilePath>& hash_mismatch_paths() { |
81 return hash_mismatch_paths_; | 80 return hash_mismatch_paths_; |
82 } | 81 } |
83 | 82 |
84 private: | 83 private: |
85 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; | 84 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; |
86 virtual ~ContentHashFetcherJob(); | 85 virtual ~ContentHashFetcherJob(); |
87 | 86 |
| 87 // Tries to load a verified_contents.json file at |path|. On successfully |
| 88 // reading and validing the file, the verified_contents_ member variable will |
| 89 // be set and this function will return true. If the file does not exist, or |
| 90 // exists but is invalid, it will return false. Also, any invalid |
| 91 // file will be removed from disk and |
| 92 bool LoadVerifiedContents(const base::FilePath& path); |
| 93 |
88 // Callback for when we're done doing file I/O to see if we already have | 94 // Callback for when we're done doing file I/O to see if we already have |
89 // a verified contents file. If we don't, this will kick off a network | 95 // a verified contents file. If we don't, this will kick off a network |
90 // request to get one. | 96 // request to get one. |
91 void DoneCheckingForVerifiedContents(bool found); | 97 void DoneCheckingForVerifiedContents(bool found); |
92 | 98 |
93 // URLFetcherDelegate interface | 99 // URLFetcherDelegate interface |
94 virtual void OnURLFetchComplete(const net::URLFetcher* source) OVERRIDE; | 100 virtual void OnURLFetchComplete(const net::URLFetcher* source) OVERRIDE; |
95 | 101 |
96 // Callback for when we're done ensuring we have verified contents, and are | 102 // Callback for when we're done ensuring we have verified contents, and are |
97 // ready to move on to MaybeCreateHashes. | 103 // ready to move on to MaybeCreateHashes. |
(...skipping 29 matching lines...) Expand all Loading... |
127 | 133 |
128 CompletionCallback callback_; | 134 CompletionCallback callback_; |
129 content::BrowserThread::ID creation_thread_; | 135 content::BrowserThread::ID creation_thread_; |
130 | 136 |
131 // Used for fetching content signatures. | 137 // Used for fetching content signatures. |
132 scoped_ptr<net::URLFetcher> url_fetcher_; | 138 scoped_ptr<net::URLFetcher> url_fetcher_; |
133 | 139 |
134 // The key used to validate verified_contents.json. | 140 // The key used to validate verified_contents.json. |
135 ContentVerifierKey key_; | 141 ContentVerifierKey key_; |
136 | 142 |
| 143 // The parsed contents of the verified_contents.json file, either read from |
| 144 // disk or fetched from the network and then written to disk. |
| 145 scoped_ptr<VerifiedContents> verified_contents_; |
| 146 |
137 // Whether this job succeeded. | 147 // Whether this job succeeded. |
138 bool success_; | 148 bool success_; |
139 | 149 |
140 // Paths that were found to have a mismatching hash. | 150 // Paths that were found to have a mismatching hash. |
141 std::set<base::FilePath> hash_mismatch_paths_; | 151 std::set<base::FilePath> hash_mismatch_paths_; |
142 | 152 |
143 // The block size to use for hashing. | 153 // The block size to use for hashing. |
144 int block_size_; | 154 int block_size_; |
145 | 155 |
146 // Note: this may be accessed from multiple threads, so all access should | 156 // Note: this may be accessed from multiple threads, so all access should |
(...skipping 30 matching lines...) Expand all Loading... |
177 content::BrowserThread::GetCurrentThreadIdentifier(&creation_thread_); | 187 content::BrowserThread::GetCurrentThreadIdentifier(&creation_thread_); |
178 DCHECK(got_id); | 188 DCHECK(got_id); |
179 } | 189 } |
180 | 190 |
181 void ContentHashFetcherJob::Start() { | 191 void ContentHashFetcherJob::Start() { |
182 base::FilePath verified_contents_path = | 192 base::FilePath verified_contents_path = |
183 file_util::GetVerifiedContentsPath(extension_path_); | 193 file_util::GetVerifiedContentsPath(extension_path_); |
184 base::PostTaskAndReplyWithResult( | 194 base::PostTaskAndReplyWithResult( |
185 content::BrowserThread::GetBlockingPool(), | 195 content::BrowserThread::GetBlockingPool(), |
186 FROM_HERE, | 196 FROM_HERE, |
187 base::Bind(&base::PathExists, verified_contents_path), | 197 base::Bind(&ContentHashFetcherJob::LoadVerifiedContents, |
| 198 this, |
| 199 verified_contents_path), |
188 base::Bind(&ContentHashFetcherJob::DoneCheckingForVerifiedContents, | 200 base::Bind(&ContentHashFetcherJob::DoneCheckingForVerifiedContents, |
189 this)); | 201 this)); |
190 } | 202 } |
191 | 203 |
192 void ContentHashFetcherJob::Cancel() { | 204 void ContentHashFetcherJob::Cancel() { |
193 base::AutoLock autolock(cancelled_lock_); | 205 base::AutoLock autolock(cancelled_lock_); |
194 cancelled_ = true; | 206 cancelled_ = true; |
195 } | 207 } |
196 | 208 |
197 bool ContentHashFetcherJob::IsCancelled() { | 209 bool ContentHashFetcherJob::IsCancelled() { |
198 base::AutoLock autolock(cancelled_lock_); | 210 base::AutoLock autolock(cancelled_lock_); |
199 bool result = cancelled_; | 211 bool result = cancelled_; |
200 return result; | 212 return result; |
201 } | 213 } |
202 | 214 |
203 ContentHashFetcherJob::~ContentHashFetcherJob() { | 215 ContentHashFetcherJob::~ContentHashFetcherJob() { |
204 } | 216 } |
205 | 217 |
| 218 bool ContentHashFetcherJob::LoadVerifiedContents(const base::FilePath& path) { |
| 219 if (!base::PathExists(path)) |
| 220 return false; |
| 221 verified_contents_.reset(new VerifiedContents(key_.data, key_.size)); |
| 222 if (!verified_contents_->InitFrom(path, false)) { |
| 223 verified_contents_.reset(); |
| 224 if (!base::DeleteFile(path, false)) |
| 225 LOG(WARNING) << "Failed to delete " << path.value(); |
| 226 return false; |
| 227 } |
| 228 return true; |
| 229 } |
| 230 |
206 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { | 231 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { |
207 if (IsCancelled()) | 232 if (IsCancelled()) |
208 return; | 233 return; |
209 if (found) { | 234 if (found) { |
210 VLOG(1) << "Found verified contents for " << extension_id_; | 235 VLOG(1) << "Found verified contents for " << extension_id_; |
211 DoneFetchingVerifiedContents(true); | 236 DoneFetchingVerifiedContents(true); |
212 } else { | 237 } else { |
213 VLOG(1) << "Missing verified contents for " << extension_id_ | 238 VLOG(1) << "Missing verified contents for " << extension_id_ |
214 << ", fetching..."; | 239 << ", fetching..."; |
215 url_fetcher_.reset( | 240 url_fetcher_.reset( |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
312 } | 337 } |
313 | 338 |
314 bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { | 339 bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { |
315 base::ElapsedTimer timer; | 340 base::ElapsedTimer timer; |
316 if (IsCancelled()) | 341 if (IsCancelled()) |
317 return false; | 342 return false; |
318 // Make sure the directory exists. | 343 // Make sure the directory exists. |
319 if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL)) | 344 if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL)) |
320 return false; | 345 return false; |
321 | 346 |
322 base::FilePath verified_contents_path = | 347 if (!verified_contents_.get()) { |
323 file_util::GetVerifiedContentsPath(extension_path_); | 348 base::FilePath verified_contents_path = |
324 VerifiedContents verified_contents(key_.data, key_.size); | 349 file_util::GetVerifiedContentsPath(extension_path_); |
325 if (!verified_contents.InitFrom(verified_contents_path, false)) | 350 verified_contents_.reset(new VerifiedContents(key_.data, key_.size)); |
326 return false; | 351 if (!verified_contents_->InitFrom(verified_contents_path, false)) |
| 352 return false; |
| 353 verified_contents_.reset(); |
| 354 } |
327 | 355 |
328 base::FileEnumerator enumerator(extension_path_, | 356 base::FileEnumerator enumerator(extension_path_, |
329 true, /* recursive */ | 357 true, /* recursive */ |
330 base::FileEnumerator::FILES); | 358 base::FileEnumerator::FILES); |
331 // First discover all the file paths and put them in a sorted set. | 359 // First discover all the file paths and put them in a sorted set. |
332 SortedFilePathSet paths; | 360 SortedFilePathSet paths; |
333 for (;;) { | 361 for (;;) { |
334 if (IsCancelled()) | 362 if (IsCancelled()) |
335 return false; | 363 return false; |
336 | 364 |
337 base::FilePath full_path = enumerator.Next(); | 365 base::FilePath full_path = enumerator.Next(); |
338 if (full_path.empty()) | 366 if (full_path.empty()) |
339 break; | 367 break; |
340 paths.insert(full_path); | 368 paths.insert(full_path); |
341 } | 369 } |
342 | 370 |
343 // Now iterate over all the paths in sorted order and compute the block hashes | 371 // Now iterate over all the paths in sorted order and compute the block hashes |
344 // for each one. | 372 // for each one. |
345 ComputedHashes::Writer writer; | 373 ComputedHashes::Writer writer; |
346 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { | 374 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { |
347 if (IsCancelled()) | 375 if (IsCancelled()) |
348 return false; | 376 return false; |
349 const base::FilePath& full_path = *i; | 377 const base::FilePath& full_path = *i; |
350 base::FilePath relative_path; | 378 base::FilePath relative_path; |
351 extension_path_.AppendRelativePath(full_path, &relative_path); | 379 extension_path_.AppendRelativePath(full_path, &relative_path); |
352 relative_path = relative_path.NormalizePathSeparatorsTo('/'); | 380 relative_path = relative_path.NormalizePathSeparatorsTo('/'); |
353 | 381 |
354 const std::string* expected_root = | 382 const std::string* expected_root = |
355 verified_contents.GetTreeHashRoot(relative_path); | 383 verified_contents_->GetTreeHashRoot(relative_path); |
356 if (!expected_root) | 384 if (!expected_root) |
357 continue; | 385 continue; |
358 | 386 |
359 std::string contents; | 387 std::string contents; |
360 if (!base::ReadFileToString(full_path, &contents)) { | 388 if (!base::ReadFileToString(full_path, &contents)) { |
361 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); | 389 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); |
362 continue; | 390 continue; |
363 } | 391 } |
364 | 392 |
365 // Iterate through taking the hash of each block of size (block_size_) of | 393 // Iterate through taking the hash of each block of size (block_size_) of |
(...skipping 104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
470 | 498 |
471 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { | 499 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { |
472 if (i->second.get() == job) { | 500 if (i->second.get() == job) { |
473 jobs_.erase(i); | 501 jobs_.erase(i); |
474 break; | 502 break; |
475 } | 503 } |
476 } | 504 } |
477 } | 505 } |
478 | 506 |
479 } // namespace extensions | 507 } // namespace extensions |
OLD | NEW |