Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(199)

Side by Side Diff: download_from_google_storage.py

Issue 807463005: Add support for tar.gz archive files to download from download_from_google_storage (Closed) Base URL: http://src.chromium.org/svn/trunk/tools/depot_tools/
Patch Set: address comments Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 """Download files from Google Storage based on SHA1 sums.""" 6 """Download files from Google Storage based on SHA1 sums."""
7 7
8 8
9 import hashlib 9 import hashlib
10 import optparse 10 import optparse
11 import os 11 import os
12 import Queue 12 import Queue
13 import re 13 import re
14 import shutil
14 import stat 15 import stat
15 import sys 16 import sys
17 import tarfile
16 import threading 18 import threading
17 import time 19 import time
18 20
19 import subprocess2 21 import subprocess2
20 22
21 23
22 GSUTIL_DEFAULT_PATH = os.path.join( 24 GSUTIL_DEFAULT_PATH = os.path.join(
23 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py') 25 os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
24 # Maps sys.platform to what we actually want to call them. 26 # Maps sys.platform to what we actually want to call them.
25 PLATFORM_MAPPING = { 27 PLATFORM_MAPPING = {
(...skipping 16 matching lines...) Expand all
42 pass 44 pass
43 45
44 46
45 def GetNormalizedPlatform(): 47 def GetNormalizedPlatform():
46 """Returns the result of sys.platform accounting for cygwin. 48 """Returns the result of sys.platform accounting for cygwin.
47 Under cygwin, this will always return "win32" like the native Python.""" 49 Under cygwin, this will always return "win32" like the native Python."""
48 if sys.platform == 'cygwin': 50 if sys.platform == 'cygwin':
49 return 'win32' 51 return 'win32'
50 return sys.platform 52 return sys.platform
51 53
52
53 # Common utilities 54 # Common utilities
54 class Gsutil(object): 55 class Gsutil(object):
55 """Call gsutil with some predefined settings. This is a convenience object, 56 """Call gsutil with some predefined settings. This is a convenience object,
56 and is also immutable.""" 57 and is also immutable."""
57 def __init__(self, path, boto_path=None, timeout=None, version='4.7'): 58 def __init__(self, path, boto_path=None, timeout=None, version='4.7'):
58 if not os.path.exists(path): 59 if not os.path.exists(path):
59 raise FileNotFoundError('GSUtil not found in %s' % path) 60 raise FileNotFoundError('GSUtil not found in %s' % path)
60 self.path = path 61 self.path = path
61 self.timeout = timeout 62 self.timeout = timeout
62 self.boto_path = boto_path 63 self.boto_path = boto_path
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after
179 work_queue.put( 180 work_queue.put(
180 (sha1_match.groups(1)[0], full_path.replace('.sha1', ''))) 181 (sha1_match.groups(1)[0], full_path.replace('.sha1', '')))
181 work_queue_size += 1 182 work_queue_size += 1
182 else: 183 else:
183 if not ignore_errors: 184 if not ignore_errors:
184 raise InvalidFileError('No sha1 sum found in %s.' % filename) 185 raise InvalidFileError('No sha1 sum found in %s.' % filename)
185 print >> sys.stderr, 'No sha1 sum found in %s.' % filename 186 print >> sys.stderr, 'No sha1 sum found in %s.' % filename
186 return work_queue_size 187 return work_queue_size
187 188
188 189
190 def _validate_tar_file(tar, prefix):
191 def _validate(tarinfo):
192 """Returns false if the tarinfo is something we explicitly forbid."""
193 if tarinfo.issym() or tarinfo.islnk():
194 return False
195 if '..' in tarinfo.name or not tarinfo.name.startswith(prefix):
196 return False
197 return True
198 return all(map(_validate, tar.getmembers()))
199
189 def _downloader_worker_thread(thread_num, q, force, base_url, 200 def _downloader_worker_thread(thread_num, q, force, base_url,
190 gsutil, out_q, ret_codes, verbose): 201 gsutil, out_q, ret_codes, verbose, extract,
202 delete=True):
191 while True: 203 while True:
192 input_sha1_sum, output_filename = q.get() 204 input_sha1_sum, output_filename = q.get()
193 if input_sha1_sum is None: 205 if input_sha1_sum is None:
194 return 206 return
195 if os.path.exists(output_filename) and not force: 207 if os.path.exists(output_filename) and not force:
196 if get_sha1(output_filename) == input_sha1_sum: 208 if get_sha1(output_filename) == input_sha1_sum:
197 if verbose: 209 if verbose:
198 out_q.put( 210 out_q.put(
199 '%d> File %s exists and SHA1 matches. Skipping.' % ( 211 '%d> File %s exists and SHA1 matches. Skipping.' % (
200 thread_num, output_filename)) 212 thread_num, output_filename))
(...skipping 10 matching lines...) Expand all
211 else: 223 else:
212 # Other error, probably auth related (bad ~/.boto, etc). 224 # Other error, probably auth related (bad ~/.boto, etc).
213 out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % ( 225 out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % (
214 thread_num, file_url, output_filename, err)) 226 thread_num, file_url, output_filename, err))
215 ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % ( 227 ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % (
216 file_url, output_filename, err))) 228 file_url, output_filename, err)))
217 continue 229 continue
218 # Fetch the file. 230 # Fetch the file.
219 out_q.put('%d> Downloading %s...' % (thread_num, output_filename)) 231 out_q.put('%d> Downloading %s...' % (thread_num, output_filename))
220 try: 232 try:
221 os.remove(output_filename) # Delete the file if it exists already. 233 if delete:
234 os.remove(output_filename) # Delete the file if it exists already.
222 except OSError: 235 except OSError:
223 if os.path.exists(output_filename): 236 if os.path.exists(output_filename):
224 out_q.put('%d> Warning: deleting %s failed.' % ( 237 out_q.put('%d> Warning: deleting %s failed.' % (
225 thread_num, output_filename)) 238 thread_num, output_filename))
226 code, _, err = gsutil.check_call('cp', file_url, output_filename) 239 code, _, err = gsutil.check_call('cp', file_url, output_filename)
227 if code != 0: 240 if code != 0:
228 out_q.put('%d> %s' % (thread_num, err)) 241 out_q.put('%d> %s' % (thread_num, err))
229 ret_codes.put((code, err)) 242 ret_codes.put((code, err))
230 243
244 if extract:
245 if (not tarfile.is_tarfile(output_filename)
246 or not output_filename.endswith('.tar.gz')):
247 out_q.put('%d> Error: %s is not a tar.gz archive.' % (
248 thread_num, output_filename))
249 ret_codes.put((1, '%s is not a tar.gz archive.' % (output_filename)))
250 continue
251 with tarfile.open(output_filename, 'r:gz') as tar:
252 dirname = os.path.dirname(os.path.abspath(output_filename))
253 extract_dir = output_filename[0:len(output_filename)-7]
254 if not _validate_tar_file(tar, os.path.basename(extract_dir)):
255 out_q.put('%d> Error: %s contains files outside %s.' % (
256 thread_num, output_filename, extract_dir))
257 ret_codes.put((1, '%s contains invalid entries.' % (output_filename)))
258 continue
259 if os.path.exists(extract_dir):
260 try:
261 shutil.rmtree(extract_dir)
262 out_q.put('%d> Removed %s...' % (thread_num, extract_dir))
263 except OSError:
264 out_q.put('%d> Warning: Can\'t delete: %s' % (
265 thread_num, extract_dir))
266 ret_codes.put((1, 'Can\'t delete %s.' % (extract_dir)))
267 continue
268 out_q.put('%d> Extracting %d entries from %s to %s' %
269 (thread_num, len(tar.getmembers()),output_filename,
270 extract_dir))
271 tar.extractall(path=dirname)
231 # Set executable bit. 272 # Set executable bit.
232 if sys.platform == 'cygwin': 273 if sys.platform == 'cygwin':
233 # Under cygwin, mark all files as executable. The executable flag in 274 # Under cygwin, mark all files as executable. The executable flag in
234 # Google Storage will not be set when uploading from Windows, so if 275 # Google Storage will not be set when uploading from Windows, so if
235 # this script is running under cygwin and we're downloading an 276 # this script is running under cygwin and we're downloading an
236 # executable, it will be unrunnable from inside cygwin without this. 277 # executable, it will be unrunnable from inside cygwin without this.
237 st = os.stat(output_filename) 278 st = os.stat(output_filename)
238 os.chmod(output_filename, st.st_mode | stat.S_IEXEC) 279 os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
239 elif sys.platform != 'win32': 280 elif sys.platform != 'win32':
240 # On non-Windows platforms, key off of the custom header 281 # On non-Windows platforms, key off of the custom header
(...skipping 10 matching lines...) Expand all
251 while True: 292 while True:
252 line = output_queue.get() 293 line = output_queue.get()
253 # Its plausible we want to print empty lines. 294 # Its plausible we want to print empty lines.
254 if line is None: 295 if line is None:
255 break 296 break
256 print line 297 print line
257 298
258 299
259 def download_from_google_storage( 300 def download_from_google_storage(
260 input_filename, base_url, gsutil, num_threads, directory, recursive, 301 input_filename, base_url, gsutil, num_threads, directory, recursive,
261 force, output, ignore_errors, sha1_file, verbose, auto_platform): 302 force, output, ignore_errors, sha1_file, verbose, auto_platform, extract):
262 # Start up all the worker threads. 303 # Start up all the worker threads.
263 all_threads = [] 304 all_threads = []
264 download_start = time.time() 305 download_start = time.time()
265 stdout_queue = Queue.Queue() 306 stdout_queue = Queue.Queue()
266 work_queue = Queue.Queue() 307 work_queue = Queue.Queue()
267 ret_codes = Queue.Queue() 308 ret_codes = Queue.Queue()
268 ret_codes.put((0, None)) 309 ret_codes.put((0, None))
269 for thread_num in range(num_threads): 310 for thread_num in range(num_threads):
270 t = threading.Thread( 311 t = threading.Thread(
271 target=_downloader_worker_thread, 312 target=_downloader_worker_thread,
272 args=[thread_num, work_queue, force, base_url, 313 args=[thread_num, work_queue, force, base_url,
273 gsutil, stdout_queue, ret_codes, verbose]) 314 gsutil, stdout_queue, ret_codes, verbose, extract])
274 t.daemon = True 315 t.daemon = True
275 t.start() 316 t.start()
276 all_threads.append(t) 317 all_threads.append(t)
277 printer_thread = threading.Thread(target=printer_worker, args=[stdout_queue]) 318 printer_thread = threading.Thread(target=printer_worker, args=[stdout_queue])
278 printer_thread.daemon = True 319 printer_thread.daemon = True
279 printer_thread.start() 320 printer_thread.start()
280 321
281 # Enumerate our work queue. 322 # Enumerate our work queue.
282 work_queue_size = enumerate_work_queue( 323 work_queue_size = enumerate_work_queue(
283 input_filename, work_queue, directory, recursive, 324 input_filename, work_queue, directory, recursive,
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
351 parser.add_option('-p', '--platform', 392 parser.add_option('-p', '--platform',
352 help='A regular expression that is compared against ' 393 help='A regular expression that is compared against '
353 'Python\'s sys.platform. If this option is specified, ' 394 'Python\'s sys.platform. If this option is specified, '
354 'the download will happen only if there is a match.') 395 'the download will happen only if there is a match.')
355 parser.add_option('-a', '--auto_platform', 396 parser.add_option('-a', '--auto_platform',
356 action='store_true', 397 action='store_true',
357 help='Detects if any parent folder of the target matches ' 398 help='Detects if any parent folder of the target matches '
358 '(linux|mac|win). If so, the script will only ' 399 '(linux|mac|win). If so, the script will only '
359 'process files that are in the paths that ' 400 'process files that are in the paths that '
360 'that matches the current platform.') 401 'that matches the current platform.')
402 parser.add_option('-u', '--extract',
403 action='store_true',
404 help='Extract a downloaded tar.gz file. '
405 'Leaves the tar.gz file around for sha1 verification'
406 'If a directory with the same name as the tar.gz '
407 'file already exists, is deleted (to get a '
408 'clean state in case of update.)')
361 parser.add_option('-v', '--verbose', action='store_true', 409 parser.add_option('-v', '--verbose', action='store_true',
362 help='Output extra diagnostic and progress information.') 410 help='Output extra diagnostic and progress information.')
363 411
364 (options, args) = parser.parse_args() 412 (options, args) = parser.parse_args()
365 413
366 # Make sure we should run at all based on platform matching. 414 # Make sure we should run at all based on platform matching.
367 if options.platform: 415 if options.platform:
368 if options.auto_platform: 416 if options.auto_platform:
369 parser.error('--platform can not be specified with --auto_platform') 417 parser.error('--platform can not be specified with --auto_platform')
370 if not re.match(options.platform, GetNormalizedPlatform()): 418 if not re.match(options.platform, GetNormalizedPlatform()):
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
444 if not options.directory and not options.force and not options.no_resume: 492 if not options.directory and not options.force and not options.no_resume:
445 if os.path.exists(options.output): 493 if os.path.exists(options.output):
446 parser.error('Output file %s exists and --no_resume is specified.' 494 parser.error('Output file %s exists and --no_resume is specified.'
447 % options.output) 495 % options.output)
448 496
449 base_url = 'gs://%s' % options.bucket 497 base_url = 'gs://%s' % options.bucket
450 498
451 return download_from_google_storage( 499 return download_from_google_storage(
452 input_filename, base_url, gsutil, options.num_threads, options.directory, 500 input_filename, base_url, gsutil, options.num_threads, options.directory,
453 options.recursive, options.force, options.output, options.ignore_errors, 501 options.recursive, options.force, options.output, options.ignore_errors,
454 options.sha1_file, options.verbose, options.auto_platform) 502 options.sha1_file, options.verbose, options.auto_platform,
503 options.extract)
455 504
456 505
457 if __name__ == '__main__': 506 if __name__ == '__main__':
458 sys.exit(main(sys.argv)) 507 sys.exit(main(sys.argv))
OLDNEW
« no previous file with comments | « no previous file | tests/download_from_google_storage_unittests.py » ('j') | tests/download_from_google_storage_unittests.py » ('J')

Powered by Google App Engine
This is Rietveld 408576698