Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(111)

Side by Side Diff: build/upload_to_google_storage.py

Issue 11664024: Scripts to download files from google storage based on sha1 sums (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: newline fixes Created 7 years, 12 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5 """Script to download files from Google Storage."""
cmp 2013/01/04 17:42:04 insert an empty line before line 5
Ryan Tseng 2013/01/14 21:37:12 Done.
6 import os
7 import zipfile
8 import tempfile
9 import subprocess
10 import hashlib
11 import Queue
12 import sys
13 import time
14 import re
15 import threading
16 import zipfile
17 from optparse import OptionParser
18 GSUTIL_DEFAULT_PATH = os.path.join(os.path.dirname(os.path.normpath(__file__)),
19 '..', '..', 'third_party', 'gsutil', 'gsutil')
cmp 2013/01/04 17:42:04 similar comments here as what I wrote in download_
20 class Gsutil():
21 def __init__(self, path):
22 if os.path.exists(path):
23 self.path = path
24 else:
25 raise IOError('GSUtil not found in %s' % path)
26 stdout = None
27 stderr = None
28 def call_interactive(self, *args):
29 p = subprocess.Popen(('python', self.path) + args, stdout=sys.stdout,
30 stderr=sys.stderr, stdin=sys.stdin)
31 return p.wait()
32 def call(self, *args):
33 p = subprocess.Popen(('python', self.path) + args, stdout=subprocess.PIPE,
34 stderr=subprocess.PIPE)
35 code = p.wait()
36 out, err = p.communicate()
37 self.stdout = out
38 self.stderr = err
39
40 if code == 0:
41 return 0
42 else:
43 status_code_match = re.search('status=([0-9]+)', err)
44 if status_code_match:
45 return int(status_code_match.groups(1))
46 elif ('You are attempting to access protected data with '
47 'no configured credentials.' in err):
48 return 403
49 elif 'No such object' in err:
50 return 404
51 else:
52 return code
53 def check_sha1(sha1_sum, filename):
54 sha1 = hashlib.sha1()
55 sha1.update(open(filename).read())
56 return sha1_sum == sha1.hexdigest()
57 def _upload_worker(thread_num, q, base_url, gsutil, options, md5_lock):
58 while True:
59 try:
60 filename, sha1_sum = q.get_nowait()
61 file_url = '%s/%s' % (base_url, sha1_sum)
62 if gsutil.call('ls', file_url) == 0 and not options.force:
63 # File exists, check MD5 hash.
64 gsutil.call('ls', '-L', file_url)
65 etag_match = re.search('ETag:\s+([a-z0-9]{32})', gsutil.stdout)
66 if etag_match:
67 remote_md5 = etag_match.groups()[0]
68 md5_calculator = hashlib.md5()
69 with md5_lock:
70 md5_calculator.update(open(filename).read())
71 local_md5 = md5_calculator.hexdigest()
72 if local_md5 == remote_md5:
73 print ('File already exists at %s and MD5 matches, exiting' %
74 file_url)
75 continue
76 print 'Uploading %s to %s' % (filename, file_url)
77 code = gsutil.call_interactive('cp', '-q', filename, file_url)
78 if code != 0:
79 print >>sys.stderr, gsutil.stderr
80 continue
81 except Queue.Empty:
82 return
83 def main(args):
84 parser = OptionParser()
85 parser.add_option('-d', '--delete', action='store_true', default=False,
86 help='Deletes the target file after upload.')
87 parser.add_option('-b', '--bucket', default='chrome-artifacts',
88 help='Google Storage bucket to fetch from.')
89 parser.add_option('-f', '--force', action='store_true', default=False,
90 help='Force upload even if remote file exists.')
91 parser.add_option('-g', '--gsutil_path', default=GSUTIL_DEFAULT_PATH,
92 help='Path to the gsutil script.')
93 parser.add_option('-t', '--num_threads', default=1, type='int',
94 help='Number of uploader threads to run.')
95 parser.add_option('-s', '--skip_hashing', action='store_true', default=False,
96 help='Skip hashing if .sha1 file exists.')
97 (options, args) = parser.parse_args()
98 if len(args) < 1:
99 print >>sys.stderr, 'Missing target.'
100 return 1
101 else:
102 input_filename = args[0]
103 base_url = 'gs://%s' % options.bucket
104 if os.path.exists(options.gsutil_path):
105 gsutil = Gsutil(options.gsutil_path)
106 else:
107 for path in os.environ["PATH"].split(os.pathsep):
108 if os.path.exists(path) and 'gsutil' in os.listdir(path):
109 gsutil = Gsutil(os.path.join(path, 'gsutil'))
110 # Check if we have permissions.
111 code = gsutil.call('ls', base_url)
112 if code == 403:
113 code = gsutil.call_interactive('config')
114 if code != 0:
115 print >>sys.stderr, 'Error while authenticating to %s, exiting' % base_url
116 return 403
117 elif code == 404:
118 print >>sys.stderr, '%s not found.' % base_url
119 return 404
120 elif code != 0:
121 print >>sys.stderr, gsutil.stderr
122 return code
123 # Enumerate the list of file(s) we want to transfer over.
124 hash_queue = []
125 if input_filename == '-':
126 # Take stdin as a newline-seperated list of files.
127 for line in sys.stdin.readlines():
128 hash_queue.append(line.strip())
129 else:
130 hash_queue.append(input_filename)
131 # We want to hash everything in a single thread since its faster.
132 # The bottleneck is in disk IO, not CPU.
133 upload_queue = Queue.Queue()
134 hash_timer = time.time()
135 for filename in hash_queue:
136 if os.path.exists('%s.sha1' % filename) and options.skip_hashing:
137 print 'Found hash for %s, skipping.' % filename
138 upload_queue.put((filename, open('%s.sha1' % filename).read()))
139 continue
140 print 'Calculating hash for %s...' % filename,
141 sha1_calculator = hashlib.sha1()
142 sha1_calculator.update(open(filename).read())
143 sha1_sum = sha1_calculator.hexdigest()
144 with open(filename + '.sha1', 'w') as f:
145 f.write(sha1_sum)
146 print 'done'
147 upload_queue.put((filename, sha1_sum))
148 hash_time = time.time() - hash_timer
149 # Start up all the worker threads.
150 all_threads = []
151 # We only want one MD5 calculation happening at a time.
152 md5_lock = threading.Lock()
153 upload_timer = time.time()
154 for thread_num in range(options.num_threads):
155 t = threading.Thread(target=_upload_worker, args=[thread_num,
156 upload_queue, base_url, gsutil, options, md5_lock])
157 t.daemon = True
158 t.start()
159 all_threads.append(t)
160 def _wait_thread(threads, done):
161 for t in threads:
162 t.join()
163 print 'Now we\'re done'
164 done.set()
165 # Have a thread set a flag when all the tasks are done.
166 done = threading.Event()
167 done_thread = threading.Thread(target=_wait_thread, args=[all_threads, done])
168 done_thread.daemon = True
169 done_thread.start()
170 while not done.is_set():
171 time.sleep(1) # Do a sleep loop so we can ctrl + c out of this anytime.
172 print 'Hashing %s took %1f seconds' % (len(hash_queue), hash_time)
173 print 'Uploading took %1f seconds' % (time.time() - upload_timer)
174 if __name__ == '__main__':
175 sys.exit(main(sys.argv))
OLDNEW
« build/download_from_google_storage.py ('K') | « build/download_from_google_storage.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698