OLD | NEW |
---|---|
1 # Copyright 2014 The Chromium Authors. All rights reserved. | 1 # Copyright 2014 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """Wrappers for gsutil, for basic interaction with Google Cloud Storage.""" | 5 """Wrappers for gsutil, for basic interaction with Google Cloud Storage.""" |
6 | 6 |
7 import contextlib | 7 import contextlib |
8 import cStringIO | 8 import cStringIO |
9 import hashlib | 9 import hashlib |
10 import logging | 10 import logging |
11 import os | 11 import os |
12 import subprocess | 12 import subprocess |
13 import sys | 13 import sys |
14 import tarfile | 14 import tarfile |
15 import urllib2 | 15 import urllib2 |
16 | 16 |
17 from telemetry import decorators | |
17 from telemetry.core import util | 18 from telemetry.core import util |
18 from telemetry.util import path | 19 from telemetry.util import path |
19 | 20 |
20 | 21 |
21 PUBLIC_BUCKET = 'chromium-telemetry' | 22 PUBLIC_BUCKET = 'chromium-telemetry' |
22 PARTNER_BUCKET = 'chrome-partner-telemetry' | 23 PARTNER_BUCKET = 'chrome-partner-telemetry' |
23 INTERNAL_BUCKET = 'chrome-telemetry' | 24 INTERNAL_BUCKET = 'chrome-telemetry' |
24 | 25 |
25 | 26 |
26 BUCKET_ALIASES = { | 27 BUCKET_ALIASES = { |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
210 command_and_args += ['-a', 'public-read'] | 211 command_and_args += ['-a', 'public-read'] |
211 extra_info = ' (publicly readable)' | 212 extra_info = ' (publicly readable)' |
212 command_and_args += [local_path, url] | 213 command_and_args += [local_path, url] |
213 logging.info('Uploading %s to %s%s' % (local_path, url, extra_info)) | 214 logging.info('Uploading %s to %s%s' % (local_path, url, extra_info)) |
214 _RunCommand(command_and_args) | 215 _RunCommand(command_and_args) |
215 return 'https://console.developers.google.com/m/cloudstorage/b/%s/o/%s' % ( | 216 return 'https://console.developers.google.com/m/cloudstorage/b/%s/o/%s' % ( |
216 bucket, remote_path) | 217 bucket, remote_path) |
217 | 218 |
218 | 219 |
219 def GetIfChanged(file_path, bucket): | 220 def GetIfChanged(file_path, bucket): |
220 """Gets the file at file_path if it has a hash file that doesn't match. | 221 """Gets the file at file_path if it has a hash file that doesn't match or |
221 | 222 if there is no local copy of file_path, but there is a hash file for it. |
222 If the file is not in Cloud Storage, log a warning instead of raising an | |
223 exception. We assume that the user just hasn't uploaded the file yet. | |
224 | 223 |
225 Returns: | 224 Returns: |
226 True if the binary was changed. | 225 True if the binary was changed. |
227 Raises: | 226 Raises: |
228 CredentialsError if the user has no configured credentials. | 227 CredentialsError if the user has no configured credentials. |
229 PermissionError if the user does not have permission to access the bucket. | 228 PermissionError if the user does not have permission to access the bucket. |
230 NotFoundError if the file is not in the given bucket in cloud_storage. | 229 NotFoundError if the file is not in the given bucket in cloud_storage. |
231 """ | 230 """ |
232 hash_path = file_path + '.sha1' | 231 hash_path = file_path + '.sha1' |
233 if not os.path.exists(hash_path): | 232 if not os.path.exists(hash_path): |
234 logging.warning('Hash file not found: %s' % hash_path) | 233 logging.warning('Hash file not found: %s' % hash_path) |
235 return False | 234 return False |
236 | 235 |
237 expected_hash = ReadHash(hash_path) | 236 expected_hash = ReadHash(hash_path) |
238 if os.path.exists(file_path) and CalculateHash(file_path) == expected_hash: | 237 if os.path.exists(file_path) and CalculateHash(file_path) == expected_hash: |
239 return False | 238 return False |
240 | 239 |
241 Get(bucket, expected_hash, file_path) | 240 Get(bucket, expected_hash, file_path) |
242 return True | 241 return True |
243 | 242 |
243 # TODO(aiolos): remove @decorators.Cache for http://crbug.com/459787 | |
244 @decorators.Cache | |
245 def GetFilesInDirectoryIfChanged(directory, bucket): | |
nednguyen
2015/02/20 20:12:21
nit: Instead of "Get", probably it should be "Down
aiolos (Not reviewing)
2015/02/20 20:17:17
I used Get because that's consistent with the othe
| |
246 """ Scan the directory for .sha1 files, and download them from the given | |
247 bucket in cloud storage if the local and remote hash don't match or | |
248 there is no local copy. | |
249 """ | |
250 if os.path.splitdrive(directory)[1] == '/': | |
251 raise ValueError('Trying to serve root directory from HTTP server.') | |
252 for dirpath, _, filenames in os.walk(directory): | |
253 for filename in filenames: | |
254 path_name, extension = os.path.splitext( | |
255 os.path.join(dirpath, filename)) | |
256 if extension != '.sha1': | |
257 continue | |
258 GetIfChanged(path_name, bucket) | |
244 | 259 |
245 def CalculateHash(file_path): | 260 def CalculateHash(file_path): |
246 """Calculates and returns the hash of the file at file_path.""" | 261 """Calculates and returns the hash of the file at file_path.""" |
247 sha1 = hashlib.sha1() | 262 sha1 = hashlib.sha1() |
248 with open(file_path, 'rb') as f: | 263 with open(file_path, 'rb') as f: |
249 while True: | 264 while True: |
250 # Read in 1mb chunks, so it doesn't all have to be loaded into memory. | 265 # Read in 1mb chunks, so it doesn't all have to be loaded into memory. |
251 chunk = f.read(1024*1024) | 266 chunk = f.read(1024*1024) |
252 if not chunk: | 267 if not chunk: |
253 break | 268 break |
254 sha1.update(chunk) | 269 sha1.update(chunk) |
255 return sha1.hexdigest() | 270 return sha1.hexdigest() |
256 | 271 |
257 | 272 |
258 def ReadHash(hash_path): | 273 def ReadHash(hash_path): |
259 with open(hash_path, 'rb') as f: | 274 with open(hash_path, 'rb') as f: |
260 return f.read(1024).rstrip() | 275 return f.read(1024).rstrip() |
OLD | NEW |