OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "extensions/browser/content_hash_fetcher.h" | 5 #include "extensions/browser/content_hash_fetcher.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 | 8 |
9 #include "base/base64.h" | 9 #include "base/base64.h" |
10 #include "base/file_util.h" | 10 #include "base/file_util.h" |
11 #include "base/files/file_enumerator.h" | 11 #include "base/files/file_enumerator.h" |
12 #include "base/json/json_reader.h" | 12 #include "base/json/json_reader.h" |
13 #include "base/memory/ref_counted.h" | 13 #include "base/memory/ref_counted.h" |
14 #include "base/stl_util.h" | 14 #include "base/stl_util.h" |
15 #include "base/synchronization/lock.h" | 15 #include "base/synchronization/lock.h" |
16 #include "base/task_runner_util.h" | 16 #include "base/task_runner_util.h" |
17 #include "base/version.h" | 17 #include "base/version.h" |
18 #include "content/public/browser/browser_context.h" | 18 #include "content/public/browser/browser_context.h" |
19 #include "content/public/browser/browser_thread.h" | 19 #include "content/public/browser/browser_thread.h" |
20 #include "crypto/secure_hash.h" | 20 #include "crypto/secure_hash.h" |
21 #include "crypto/sha2.h" | 21 #include "crypto/sha2.h" |
22 #include "extensions/browser/computed_hashes.h" | 22 #include "extensions/browser/computed_hashes.h" |
| 23 #include "extensions/browser/content_hash_tree.h" |
23 #include "extensions/browser/extension_registry.h" | 24 #include "extensions/browser/extension_registry.h" |
| 25 #include "extensions/browser/verified_contents.h" |
24 #include "extensions/common/constants.h" | 26 #include "extensions/common/constants.h" |
25 #include "extensions/common/extension.h" | 27 #include "extensions/common/extension.h" |
26 #include "extensions/common/file_util.h" | 28 #include "extensions/common/file_util.h" |
27 #include "net/base/load_flags.h" | 29 #include "net/base/load_flags.h" |
28 #include "net/url_request/url_fetcher.h" | 30 #include "net/url_request/url_fetcher.h" |
29 #include "net/url_request/url_fetcher_delegate.h" | 31 #include "net/url_request/url_fetcher_delegate.h" |
30 #include "net/url_request/url_request_status.h" | 32 #include "net/url_request/url_request_status.h" |
31 | 33 |
32 namespace { | 34 namespace { |
33 | 35 |
34 typedef std::set<base::FilePath> SortedFilePathSet; | 36 typedef std::set<base::FilePath> SortedFilePathSet; |
35 | 37 |
36 } // namespace | 38 } // namespace |
37 | 39 |
38 namespace extensions { | 40 namespace extensions { |
39 | 41 |
40 // This class takes care of doing the disk and network I/O work to ensure we | 42 // This class takes care of doing the disk and network I/O work to ensure we |
41 // have both verified_contents.json files from the webstore and | 43 // have both verified_contents.json files from the webstore and |
42 // computed_hashes.json files computed over the files in an extension's | 44 // computed_hashes.json files computed over the files in an extension's |
43 // directory. | 45 // directory. |
44 class ContentHashFetcherJob | 46 class ContentHashFetcherJob |
45 : public base::RefCountedThreadSafe<ContentHashFetcherJob>, | 47 : public base::RefCountedThreadSafe<ContentHashFetcherJob>, |
46 public net::URLFetcherDelegate { | 48 public net::URLFetcherDelegate { |
47 public: | 49 public: |
48 typedef base::Callback<void(ContentHashFetcherJob*)> CompletionCallback; | 50 typedef base::Callback<void(ContentHashFetcherJob*)> CompletionCallback; |
49 ContentHashFetcherJob(net::URLRequestContextGetter* request_context, | 51 ContentHashFetcherJob(net::URLRequestContextGetter* request_context, |
| 52 ContentVerifierKey key, |
50 const std::string& extension_id, | 53 const std::string& extension_id, |
51 const base::FilePath& extension_path, | 54 const base::FilePath& extension_path, |
52 const GURL& fetch_url, | 55 const GURL& fetch_url, |
| 56 bool force, |
53 const CompletionCallback& callback); | 57 const CompletionCallback& callback); |
54 | 58 |
55 void Start(); | 59 void Start(); |
56 | 60 |
57 // Cancels this job, which will attempt to stop I/O operations sooner than | 61 // Cancels this job, which will attempt to stop I/O operations sooner than |
58 // just waiting for the entire job to complete. Safe to call from any thread. | 62 // just waiting for the entire job to complete. Safe to call from any thread. |
59 void Cancel(); | 63 void Cancel(); |
60 | 64 |
61 // Returns whether this job was completely successful (we have both verified | 65 // Checks whether this job has been cancelled. Safe to call from any thread. |
62 // contents and computed hashes). | 66 bool IsCancelled(); |
| 67 |
| 68 // Returns whether this job was successful (we have both verified contents |
| 69 // and computed hashes). Even if the job was a success, there might have been |
| 70 // files that were found to have contents not matching expectations; these |
| 71 // are available by calling hash_mismatch_paths(). |
63 bool success() { return success_; } | 72 bool success() { return success_; } |
64 | 73 |
65 // Do we have a verified_contents.json file? | 74 bool force() { return force_; } |
66 bool have_verified_contents() { return have_verified_contents_; } | 75 |
| 76 const std::string& extension_id() { return extension_id_; } |
| 77 |
| 78 // Returns the set of paths that had a hash mismatch. |
| 79 const std::set<base::FilePath>& hash_mismatch_paths() { |
| 80 return hash_mismatch_paths_; |
| 81 } |
67 | 82 |
68 private: | 83 private: |
69 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; | 84 friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; |
70 virtual ~ContentHashFetcherJob(); | 85 virtual ~ContentHashFetcherJob(); |
71 | 86 |
72 // Checks whether this job has been cancelled. Safe to call from any thread. | |
73 bool IsCancelled(); | |
74 | |
75 // Callback for when we're done doing file I/O to see if we already have | 87 // Callback for when we're done doing file I/O to see if we already have |
76 // a verified contents file. If we don't, this will kick off a network | 88 // a verified contents file. If we don't, this will kick off a network |
77 // request to get one. | 89 // request to get one. |
78 void DoneCheckingForVerifiedContents(bool found); | 90 void DoneCheckingForVerifiedContents(bool found); |
79 | 91 |
80 // URLFetcherDelegate interface | 92 // URLFetcherDelegate interface |
81 virtual void OnURLFetchComplete(const net::URLFetcher* source) OVERRIDE; | 93 virtual void OnURLFetchComplete(const net::URLFetcher* source) OVERRIDE; |
82 | 94 |
83 // Callback for when we're done ensuring we have verified contents, and are | 95 // Callback for when we're done ensuring we have verified contents, and are |
84 // ready to move on to MaybeCreateHashes. | 96 // ready to move on to MaybeCreateHashes. |
(...skipping 18 matching lines...) Expand all Loading... |
103 // Will call the callback, if we haven't been cancelled. | 115 // Will call the callback, if we haven't been cancelled. |
104 void DispatchCallback(); | 116 void DispatchCallback(); |
105 | 117 |
106 net::URLRequestContextGetter* request_context_; | 118 net::URLRequestContextGetter* request_context_; |
107 std::string extension_id_; | 119 std::string extension_id_; |
108 base::FilePath extension_path_; | 120 base::FilePath extension_path_; |
109 | 121 |
110 // The url we'll need to use to fetch a verified_contents.json file. | 122 // The url we'll need to use to fetch a verified_contents.json file. |
111 GURL fetch_url_; | 123 GURL fetch_url_; |
112 | 124 |
| 125 bool force_; |
| 126 |
113 CompletionCallback callback_; | 127 CompletionCallback callback_; |
114 content::BrowserThread::ID creation_thread_; | 128 content::BrowserThread::ID creation_thread_; |
115 | 129 |
116 // Used for fetching content signatures. | 130 // Used for fetching content signatures. |
117 scoped_ptr<net::URLFetcher> url_fetcher_; | 131 scoped_ptr<net::URLFetcher> url_fetcher_; |
118 | 132 |
| 133 // The key used to validate verified_contents.json. |
| 134 ContentVerifierKey key_; |
| 135 |
119 // Whether this job succeeded. | 136 // Whether this job succeeded. |
120 bool success_; | 137 bool success_; |
121 | 138 |
122 // Whether we either found a verified contents file, or were successful in | 139 // Paths that were found to have a mismatching hash. |
123 // fetching one and saving it to disk. | 140 std::set<base::FilePath> hash_mismatch_paths_; |
124 bool have_verified_contents_; | |
125 | 141 |
126 // The block size to use for hashing. | 142 // The block size to use for hashing. |
127 int block_size_; | 143 int block_size_; |
128 | 144 |
129 // Note: this may be accessed from multiple threads, so all access should | 145 // Note: this may be accessed from multiple threads, so all access should |
130 // be protected by |cancelled_lock_|. | 146 // be protected by |cancelled_lock_|. |
131 bool cancelled_; | 147 bool cancelled_; |
132 | 148 |
133 // A lock for synchronizing access to |cancelled_|. | 149 // A lock for synchronizing access to |cancelled_|. |
134 base::Lock cancelled_lock_; | 150 base::Lock cancelled_lock_; |
| 151 |
| 152 DISALLOW_COPY_AND_ASSIGN(ContentHashFetcherJob); |
135 }; | 153 }; |
136 | 154 |
137 ContentHashFetcherJob::ContentHashFetcherJob( | 155 ContentHashFetcherJob::ContentHashFetcherJob( |
138 net::URLRequestContextGetter* request_context, | 156 net::URLRequestContextGetter* request_context, |
| 157 ContentVerifierKey key, |
139 const std::string& extension_id, | 158 const std::string& extension_id, |
140 const base::FilePath& extension_path, | 159 const base::FilePath& extension_path, |
141 const GURL& fetch_url, | 160 const GURL& fetch_url, |
| 161 bool force, |
142 const CompletionCallback& callback) | 162 const CompletionCallback& callback) |
143 : request_context_(request_context), | 163 : request_context_(request_context), |
144 extension_id_(extension_id), | 164 extension_id_(extension_id), |
145 extension_path_(extension_path), | 165 extension_path_(extension_path), |
146 fetch_url_(fetch_url), | 166 fetch_url_(fetch_url), |
| 167 force_(force), |
147 callback_(callback), | 168 callback_(callback), |
| 169 key_(key), |
148 success_(false), | 170 success_(false), |
149 have_verified_contents_(false), | |
150 // TODO(asargent) - use the value from verified_contents.json for each | 171 // TODO(asargent) - use the value from verified_contents.json for each |
151 // file, instead of using a constant. | 172 // file, instead of using a constant. |
152 block_size_(4096), | 173 block_size_(4096), |
153 cancelled_(false) { | 174 cancelled_(false) { |
154 bool got_id = | 175 bool got_id = |
155 content::BrowserThread::GetCurrentThreadIdentifier(&creation_thread_); | 176 content::BrowserThread::GetCurrentThreadIdentifier(&creation_thread_); |
156 DCHECK(got_id); | 177 DCHECK(got_id); |
157 } | 178 } |
158 | 179 |
159 void ContentHashFetcherJob::Start() { | 180 void ContentHashFetcherJob::Start() { |
160 base::FilePath verified_contents_path = | 181 base::FilePath verified_contents_path = |
161 file_util::GetVerifiedContentsPath(extension_path_); | 182 file_util::GetVerifiedContentsPath(extension_path_); |
162 base::PostTaskAndReplyWithResult( | 183 base::PostTaskAndReplyWithResult( |
163 content::BrowserThread::GetBlockingPool(), | 184 content::BrowserThread::GetBlockingPool(), |
164 FROM_HERE, | 185 FROM_HERE, |
165 base::Bind(&base::PathExists, verified_contents_path), | 186 base::Bind(&base::PathExists, verified_contents_path), |
166 base::Bind(&ContentHashFetcherJob::DoneCheckingForVerifiedContents, | 187 base::Bind(&ContentHashFetcherJob::DoneCheckingForVerifiedContents, |
167 this)); | 188 this)); |
168 } | 189 } |
169 | 190 |
170 void ContentHashFetcherJob::Cancel() { | 191 void ContentHashFetcherJob::Cancel() { |
171 base::AutoLock autolock(cancelled_lock_); | 192 base::AutoLock autolock(cancelled_lock_); |
172 cancelled_ = true; | 193 cancelled_ = true; |
173 } | 194 } |
174 | 195 |
175 ContentHashFetcherJob::~ContentHashFetcherJob() { | |
176 } | |
177 | |
178 bool ContentHashFetcherJob::IsCancelled() { | 196 bool ContentHashFetcherJob::IsCancelled() { |
179 base::AutoLock autolock(cancelled_lock_); | 197 base::AutoLock autolock(cancelled_lock_); |
180 bool result = cancelled_; | 198 bool result = cancelled_; |
181 return result; | 199 return result; |
182 } | 200 } |
183 | 201 |
| 202 ContentHashFetcherJob::~ContentHashFetcherJob() { |
| 203 } |
| 204 |
184 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { | 205 void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { |
185 if (IsCancelled()) | 206 if (IsCancelled()) |
186 return; | 207 return; |
187 if (found) { | 208 if (found) { |
| 209 VLOG(1) << "Found verified contents for " << extension_id_; |
188 DoneFetchingVerifiedContents(true); | 210 DoneFetchingVerifiedContents(true); |
189 } else { | 211 } else { |
| 212 VLOG(1) << "Missing verified contents for " << extension_id_ |
| 213 << ", fetching..."; |
190 url_fetcher_.reset( | 214 url_fetcher_.reset( |
191 net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this)); | 215 net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this)); |
192 url_fetcher_->SetRequestContext(request_context_); | 216 url_fetcher_->SetRequestContext(request_context_); |
193 url_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES | | 217 url_fetcher_->SetLoadFlags(net::LOAD_DO_NOT_SEND_COOKIES | |
194 net::LOAD_DO_NOT_SAVE_COOKIES | | 218 net::LOAD_DO_NOT_SAVE_COOKIES | |
195 net::LOAD_DISABLE_CACHE); | 219 net::LOAD_DISABLE_CACHE); |
196 url_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); | 220 url_fetcher_->SetAutomaticallyRetryOnNetworkChanges(3); |
197 url_fetcher_->Start(); | 221 url_fetcher_->Start(); |
198 } | 222 } |
199 } | 223 } |
200 | 224 |
201 // Helper function to let us pass ownership of a string via base::Bind with the | 225 // Helper function to let us pass ownership of a string via base::Bind with the |
202 // contents to be written into a file. Also ensures that the directory for | 226 // contents to be written into a file. Also ensures that the directory for |
203 // |path| exists, creating it if needed. | 227 // |path| exists, creating it if needed. |
204 static int WriteFileHelper(const base::FilePath& path, | 228 static int WriteFileHelper(const base::FilePath& path, |
205 scoped_ptr<std::string> content) { | 229 scoped_ptr<std::string> content) { |
206 base::FilePath dir = path.DirName(); | 230 base::FilePath dir = path.DirName(); |
207 return (base::CreateDirectoryAndGetError(dir, NULL) && | 231 return (base::CreateDirectoryAndGetError(dir, NULL) && |
208 base::WriteFile(path, content->data(), content->size())); | 232 base::WriteFile(path, content->data(), content->size())); |
209 } | 233 } |
210 | 234 |
211 void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { | 235 void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { |
| 236 VLOG(1) << "URLFetchComplete for " << extension_id_ |
| 237 << " is_success:" << url_fetcher_->GetStatus().is_success() << " " |
| 238 << fetch_url_.possibly_invalid_spec(); |
212 if (IsCancelled()) | 239 if (IsCancelled()) |
213 return; | 240 return; |
214 scoped_ptr<std::string> response(new std::string); | 241 scoped_ptr<std::string> response(new std::string); |
215 if (!url_fetcher_->GetStatus().is_success() || | 242 if (!url_fetcher_->GetStatus().is_success() || |
216 !url_fetcher_->GetResponseAsString(response.get())) { | 243 !url_fetcher_->GetResponseAsString(response.get())) { |
217 DoneFetchingVerifiedContents(false); | 244 DoneFetchingVerifiedContents(false); |
218 return; | 245 return; |
219 } | 246 } |
220 | 247 |
221 // Parse the response to make sure it is valid json (on staging sometimes it | 248 // Parse the response to make sure it is valid json (on staging sometimes it |
222 // can be a login redirect html, xml file, etc. if you aren't logged in with | 249 // can be a login redirect html, xml file, etc. if you aren't logged in with |
223 // the right cookies). TODO(asargent) - It would be a nice enhancement to | 250 // the right cookies). TODO(asargent) - It would be a nice enhancement to |
224 // move to parsing this in a sandboxed helper (crbug.com/372878). | 251 // move to parsing this in a sandboxed helper (crbug.com/372878). |
225 scoped_ptr<base::Value> parsed(base::JSONReader::Read(*response)); | 252 scoped_ptr<base::Value> parsed(base::JSONReader::Read(*response)); |
226 if (parsed) { | 253 if (parsed) { |
| 254 VLOG(1) << "JSON parsed ok for " << extension_id_; |
| 255 |
227 parsed.reset(); // no longer needed | 256 parsed.reset(); // no longer needed |
228 base::FilePath destination = | 257 base::FilePath destination = |
229 file_util::GetVerifiedContentsPath(extension_path_); | 258 file_util::GetVerifiedContentsPath(extension_path_); |
230 size_t size = response->size(); | 259 size_t size = response->size(); |
231 base::PostTaskAndReplyWithResult( | 260 base::PostTaskAndReplyWithResult( |
232 content::BrowserThread::GetBlockingPool(), | 261 content::BrowserThread::GetBlockingPool(), |
233 FROM_HERE, | 262 FROM_HERE, |
234 base::Bind(&WriteFileHelper, destination, base::Passed(&response)), | 263 base::Bind(&WriteFileHelper, destination, base::Passed(&response)), |
235 base::Bind( | 264 base::Bind( |
236 &ContentHashFetcherJob::OnVerifiedContentsWritten, this, size)); | 265 &ContentHashFetcherJob::OnVerifiedContentsWritten, this, size)); |
237 } else { | 266 } else { |
238 DoneFetchingVerifiedContents(false); | 267 DoneFetchingVerifiedContents(false); |
239 } | 268 } |
240 } | 269 } |
241 | 270 |
242 void ContentHashFetcherJob::OnVerifiedContentsWritten(size_t expected_size, | 271 void ContentHashFetcherJob::OnVerifiedContentsWritten(size_t expected_size, |
243 int write_result) { | 272 int write_result) { |
244 bool success = | 273 bool success = |
245 (write_result >= 0 && static_cast<size_t>(write_result) == expected_size); | 274 (write_result >= 0 && static_cast<size_t>(write_result) == expected_size); |
246 DoneFetchingVerifiedContents(success); | 275 DoneFetchingVerifiedContents(success); |
247 } | 276 } |
248 | 277 |
249 void ContentHashFetcherJob::DoneFetchingVerifiedContents(bool success) { | 278 void ContentHashFetcherJob::DoneFetchingVerifiedContents(bool success) { |
250 have_verified_contents_ = success; | |
251 | |
252 if (IsCancelled()) | 279 if (IsCancelled()) |
253 return; | 280 return; |
254 | 281 |
255 // TODO(asargent) - eventually we should abort here on !success, but for | 282 if (!success) { |
256 // testing purposes it's actually still helpful to continue on to create the | 283 DispatchCallback(); |
257 // computed hashes. | 284 return; |
| 285 } |
258 | 286 |
259 content::BrowserThread::PostBlockingPoolSequencedTask( | 287 content::BrowserThread::PostBlockingPoolSequencedTask( |
260 "ContentHashFetcher", | 288 "ContentHashFetcher", |
261 FROM_HERE, | 289 FROM_HERE, |
262 base::Bind(&ContentHashFetcherJob::MaybeCreateHashes, this)); | 290 base::Bind(&ContentHashFetcherJob::MaybeCreateHashes, this)); |
263 } | 291 } |
264 | 292 |
265 void ContentHashFetcherJob::MaybeCreateHashes() { | 293 void ContentHashFetcherJob::MaybeCreateHashes() { |
266 if (IsCancelled()) | 294 if (IsCancelled()) |
267 return; | 295 return; |
268 base::FilePath hashes_file = | 296 base::FilePath hashes_file = |
269 file_util::GetComputedHashesPath(extension_path_); | 297 file_util::GetComputedHashesPath(extension_path_); |
270 | 298 |
271 if (base::PathExists(hashes_file)) | 299 if (!force_ && base::PathExists(hashes_file)) { |
272 success_ = true; | 300 success_ = true; |
273 else | 301 } else { |
| 302 if (force_) |
| 303 base::DeleteFile(hashes_file, false /* recursive */); |
274 success_ = CreateHashes(hashes_file); | 304 success_ = CreateHashes(hashes_file); |
| 305 } |
275 | 306 |
276 content::BrowserThread::PostTask( | 307 content::BrowserThread::PostTask( |
277 creation_thread_, | 308 creation_thread_, |
278 FROM_HERE, | 309 FROM_HERE, |
279 base::Bind(&ContentHashFetcherJob::DispatchCallback, this)); | 310 base::Bind(&ContentHashFetcherJob::DispatchCallback, this)); |
280 } | 311 } |
281 | 312 |
282 bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { | 313 bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { |
283 if (IsCancelled()) | 314 if (IsCancelled()) |
284 return false; | 315 return false; |
285 // Make sure the directory exists. | 316 // Make sure the directory exists. |
286 if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL)) | 317 if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL)) |
287 return false; | 318 return false; |
288 | 319 |
| 320 base::FilePath verified_contents_path = |
| 321 file_util::GetVerifiedContentsPath(extension_path_); |
| 322 VerifiedContents verified_contents(key_.data, key_.size); |
| 323 if (!verified_contents.InitFrom(verified_contents_path, false)) |
| 324 return false; |
| 325 |
289 base::FileEnumerator enumerator(extension_path_, | 326 base::FileEnumerator enumerator(extension_path_, |
290 true, /* recursive */ | 327 true, /* recursive */ |
291 base::FileEnumerator::FILES); | 328 base::FileEnumerator::FILES); |
292 // First discover all the file paths and put them in a sorted set. | 329 // First discover all the file paths and put them in a sorted set. |
293 SortedFilePathSet paths; | 330 SortedFilePathSet paths; |
294 for (;;) { | 331 for (;;) { |
295 if (IsCancelled()) | 332 if (IsCancelled()) |
296 return false; | 333 return false; |
297 | 334 |
298 base::FilePath full_path = enumerator.Next(); | 335 base::FilePath full_path = enumerator.Next(); |
299 if (full_path.empty()) | 336 if (full_path.empty()) |
300 break; | 337 break; |
301 paths.insert(full_path); | 338 paths.insert(full_path); |
302 } | 339 } |
303 | 340 |
304 // Now iterate over all the paths in sorted order and compute the block hashes | 341 // Now iterate over all the paths in sorted order and compute the block hashes |
305 // for each one. | 342 // for each one. |
306 ComputedHashes::Writer writer; | 343 ComputedHashes::Writer writer; |
307 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { | 344 for (SortedFilePathSet::iterator i = paths.begin(); i != paths.end(); ++i) { |
308 if (IsCancelled()) | 345 if (IsCancelled()) |
309 return false; | 346 return false; |
310 const base::FilePath& full_path = *i; | 347 const base::FilePath& full_path = *i; |
311 base::FilePath relative_path; | 348 base::FilePath relative_path; |
312 extension_path_.AppendRelativePath(full_path, &relative_path); | 349 extension_path_.AppendRelativePath(full_path, &relative_path); |
| 350 |
| 351 const std::string* expected_root = |
| 352 verified_contents.GetTreeHashRoot(relative_path); |
| 353 if (!expected_root) |
| 354 continue; |
| 355 |
313 std::string contents; | 356 std::string contents; |
314 if (!base::ReadFileToString(full_path, &contents)) { | 357 if (!base::ReadFileToString(full_path, &contents)) { |
315 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); | 358 LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); |
316 continue; | 359 continue; |
317 } | 360 } |
318 | 361 |
319 // Iterate through taking the hash of each block of size (block_size_) of | 362 // Iterate through taking the hash of each block of size (block_size_) of |
320 // the file. | 363 // the file. |
321 std::vector<std::string> hashes; | 364 std::vector<std::string> hashes; |
322 size_t offset = 0; | 365 size_t offset = 0; |
323 while (offset < contents.size()) { | 366 while (offset < contents.size()) { |
324 if (IsCancelled()) | 367 if (IsCancelled()) |
325 return false; | 368 return false; |
326 const char* block_start = contents.data() + offset; | 369 const char* block_start = contents.data() + offset; |
327 size_t bytes_to_read = | 370 size_t bytes_to_read = |
328 std::min(contents.size() - offset, static_cast<size_t>(block_size_)); | 371 std::min(contents.size() - offset, static_cast<size_t>(block_size_)); |
329 DCHECK(bytes_to_read > 0); | 372 DCHECK(bytes_to_read > 0); |
330 scoped_ptr<crypto::SecureHash> hash( | 373 scoped_ptr<crypto::SecureHash> hash( |
331 crypto::SecureHash::Create(crypto::SecureHash::SHA256)); | 374 crypto::SecureHash::Create(crypto::SecureHash::SHA256)); |
332 hash->Update(block_start, bytes_to_read); | 375 hash->Update(block_start, bytes_to_read); |
333 | 376 |
334 hashes.push_back(std::string()); | 377 hashes.push_back(std::string()); |
335 std::string* buffer = &hashes.back(); | 378 std::string* buffer = &hashes.back(); |
336 buffer->resize(crypto::kSHA256Length); | 379 buffer->resize(crypto::kSHA256Length); |
337 hash->Finish(string_as_array(buffer), buffer->size()); | 380 hash->Finish(string_as_array(buffer), buffer->size()); |
338 | 381 |
339 // Get ready for next iteration. | 382 // Get ready for next iteration. |
340 offset += bytes_to_read; | 383 offset += bytes_to_read; |
341 } | 384 } |
| 385 std::string root = |
| 386 ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length); |
| 387 if (expected_root && *expected_root != root) { |
| 388 VLOG(1) << "content mismatch for " << relative_path.AsUTF8Unsafe(); |
| 389 hash_mismatch_paths_.insert(relative_path); |
| 390 continue; |
| 391 } |
| 392 |
342 writer.AddHashes(relative_path, block_size_, hashes); | 393 writer.AddHashes(relative_path, block_size_, hashes); |
343 } | 394 } |
344 return writer.WriteToFile(hashes_file); | 395 return writer.WriteToFile(hashes_file); |
345 } | 396 } |
346 | 397 |
347 void ContentHashFetcherJob::DispatchCallback() { | 398 void ContentHashFetcherJob::DispatchCallback() { |
348 { | 399 { |
349 base::AutoLock autolock(cancelled_lock_); | 400 base::AutoLock autolock(cancelled_lock_); |
350 if (cancelled_) | 401 if (cancelled_) |
351 return; | 402 return; |
352 } | 403 } |
353 callback_.Run(this); | 404 callback_.Run(this); |
354 } | 405 } |
355 | 406 |
356 // ---- | 407 // ---- |
357 | 408 |
358 ContentHashFetcher::ContentHashFetcher(content::BrowserContext* context, | 409 ContentHashFetcher::ContentHashFetcher(content::BrowserContext* context, |
359 ContentVerifierDelegate* delegate) | 410 ContentVerifierDelegate* delegate, |
| 411 const FetchCallback& callback) |
360 : context_(context), | 412 : context_(context), |
361 delegate_(delegate), | 413 delegate_(delegate), |
| 414 fetch_callback_(callback), |
362 observer_(this), | 415 observer_(this), |
363 weak_ptr_factory_(this) { | 416 weak_ptr_factory_(this) { |
364 } | 417 } |
365 | 418 |
366 ContentHashFetcher::~ContentHashFetcher() { | 419 ContentHashFetcher::~ContentHashFetcher() { |
367 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { | 420 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { |
368 i->second->Cancel(); | 421 i->second->Cancel(); |
369 } | 422 } |
370 } | 423 } |
371 | 424 |
372 void ContentHashFetcher::Start() { | 425 void ContentHashFetcher::Start() { |
373 ExtensionRegistry* registry = ExtensionRegistry::Get(context_); | 426 ExtensionRegistry* registry = ExtensionRegistry::Get(context_); |
374 observer_.Add(registry); | 427 observer_.Add(registry); |
375 } | 428 } |
376 | 429 |
377 void ContentHashFetcher::DoFetch(const Extension* extension) { | 430 void ContentHashFetcher::DoFetch(const Extension* extension, bool force) { |
378 if (!extension || !delegate_->ShouldBeVerified(*extension)) | 431 if (!extension || !delegate_->ShouldBeVerified(*extension)) |
379 return; | 432 return; |
380 | 433 |
381 IdAndVersion key(extension->id(), extension->version()->GetString()); | 434 IdAndVersion key(extension->id(), extension->version()->GetString()); |
382 if (ContainsKey(jobs_, key)) | 435 JobMap::iterator found = jobs_.find(key); |
383 return; | 436 if (found != jobs_.end()) { |
| 437 if (!force || found->second->force()) { |
| 438 // Just let the existing job keep running. |
| 439 return; |
| 440 } else { |
| 441 // Kill the existing non-force job, so we can start a new one below. |
| 442 found->second->Cancel(); |
| 443 jobs_.erase(found); |
| 444 } |
| 445 } |
384 | 446 |
385 // TODO(asargent) - we should do something here to remember recent attempts | 447 // TODO(asargent) - we should do something here to remember recent attempts |
386 // to fetch signatures by extension id, and use exponential backoff to avoid | 448 // to fetch signatures by extension id, and use exponential backoff to avoid |
387 // hammering the server when we aren't successful in getting them. | 449 // hammering the server when we aren't successful in getting them. |
388 // crbug.com/373397 | 450 // crbug.com/373397 |
389 | 451 |
390 DCHECK(extension->version()); | 452 DCHECK(extension->version()); |
391 GURL url = | 453 GURL url = |
392 delegate_->GetSignatureFetchUrl(extension->id(), *extension->version()); | 454 delegate_->GetSignatureFetchUrl(extension->id(), *extension->version()); |
393 ContentHashFetcherJob* job = | 455 ContentHashFetcherJob* job = |
394 new ContentHashFetcherJob(context_->GetRequestContext(), | 456 new ContentHashFetcherJob(context_->GetRequestContext(), |
| 457 delegate_->PublicKey(), |
395 extension->id(), | 458 extension->id(), |
396 extension->path(), | 459 extension->path(), |
397 url, | 460 url, |
| 461 force, |
398 base::Bind(&ContentHashFetcher::JobFinished, | 462 base::Bind(&ContentHashFetcher::JobFinished, |
399 weak_ptr_factory_.GetWeakPtr())); | 463 weak_ptr_factory_.GetWeakPtr())); |
400 jobs_.insert(std::make_pair(key, job)); | 464 jobs_.insert(std::make_pair(key, job)); |
401 job->Start(); | 465 job->Start(); |
402 } | 466 } |
403 | 467 |
404 void ContentHashFetcher::OnExtensionLoaded( | 468 void ContentHashFetcher::OnExtensionLoaded( |
405 content::BrowserContext* browser_context, | 469 content::BrowserContext* browser_context, |
406 const Extension* extension) { | 470 const Extension* extension) { |
407 CHECK(extension); | 471 CHECK(extension); |
408 DoFetch(extension); | 472 DoFetch(extension, false); |
409 } | 473 } |
410 | 474 |
411 void ContentHashFetcher::OnExtensionUnloaded( | 475 void ContentHashFetcher::OnExtensionUnloaded( |
412 content::BrowserContext* browser_context, | 476 content::BrowserContext* browser_context, |
413 const Extension* extension, | 477 const Extension* extension, |
414 UnloadedExtensionInfo::Reason reason) { | 478 UnloadedExtensionInfo::Reason reason) { |
415 CHECK(extension); | 479 CHECK(extension); |
416 IdAndVersion key(extension->id(), extension->version()->GetString()); | 480 IdAndVersion key(extension->id(), extension->version()->GetString()); |
417 JobMap::iterator found = jobs_.find(key); | 481 JobMap::iterator found = jobs_.find(key); |
418 if (found != jobs_.end()) | 482 if (found != jobs_.end()) { |
| 483 found->second->Cancel(); |
419 jobs_.erase(found); | 484 jobs_.erase(found); |
| 485 } |
420 } | 486 } |
421 | 487 |
422 void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) { | 488 void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) { |
| 489 if (!job->IsCancelled()) { |
| 490 fetch_callback_.Run(job->extension_id(), |
| 491 job->success(), |
| 492 job->force(), |
| 493 job->hash_mismatch_paths()); |
| 494 } |
| 495 |
423 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { | 496 for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { |
424 if (i->second.get() == job) { | 497 if (i->second.get() == job) { |
425 jobs_.erase(i); | 498 jobs_.erase(i); |
426 break; | 499 break; |
427 } | 500 } |
428 } | 501 } |
429 } | 502 } |
430 | 503 |
431 } // namespace extensions | 504 } // namespace extensions |
OLD | NEW |