Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: tests/isolateserver_archive_test.py

Issue 14455006: Do not retry uploading to blobstore on HTTP 500, regenerate a new url first. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/swarm_client
Patch Set: Rebasef of 14383005 Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« isolateserver_archive.py ('K') | « run_isolated.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be 3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file. 4 # found in the LICENSE file.
5 5
6 import binascii 6 import binascii
7 import random 7 import random
8 import hashlib 8 import hashlib
9 import logging 9 import logging
10 import os 10 import os
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
138 {'data': sha1encoded, 'content_type': 'application/octet-stream'}, 138 {'data': sha1encoded, 'content_type': 'application/octet-stream'},
139 StringIO.StringIO('\0'), 139 StringIO.StringIO('\0'),
140 ), 140 ),
141 ( 141 (
142 path + 'content/generate_blobstore_url/default-gzip/%s' % s, 142 path + 'content/generate_blobstore_url/default-gzip/%s' % s,
143 {'data': [('token', 'foo bar')]}, 143 {'data': [('token', 'foo bar')]},
144 StringIO.StringIO('an_url/'), 144 StringIO.StringIO('an_url/'),
145 ), 145 ),
146 ( 146 (
147 'an_url/', 147 'an_url/',
148 {'data': body, 'content_type': content_type}, 148 {'data': body, 'content_type': content_type, 'retry_50x': False},
149 StringIO.StringIO('ok'), 149 StringIO.StringIO('ok'),
150 ), 150 ),
151 ] 151 ]
152 152
153 old_read_and_compress = isolateserver_archive.read_and_compress 153 old_read_and_compress = isolateserver_archive.read_and_compress
154 try: 154 try:
155 isolateserver_archive.read_and_compress = lambda x, y: compressed 155 isolateserver_archive.read_and_compress = lambda x, y: compressed
156 result = isolateserver_archive.upload_sha1_tree( 156 result = isolateserver_archive.upload_sha1_tree(
157 base_url=path, 157 base_url=path,
158 indir=os.getcwd(), 158 indir=os.getcwd(),
(...skipping 25 matching lines...) Expand all
184 self.assertEqual('FakeUrl', url) 184 self.assertEqual('FakeUrl', url)
185 self.assertEqual(self.fail, upload_func) 185 self.assertEqual(self.fail, upload_func)
186 actual.extend(items) 186 actual.extend(items)
187 187
188 isolateserver_archive.update_files_to_upload = process 188 isolateserver_archive.update_files_to_upload = process
189 isolateserver_archive.process_items('FakeUrl', items, self.fail) 189 isolateserver_archive.process_items('FakeUrl', items, self.fail)
190 self.assertEqual(expected, actual) 190 self.assertEqual(expected, actual)
191 finally: 191 finally:
192 isolateserver_archive.update_files_to_upload = old 192 isolateserver_archive.update_files_to_upload = old
193 193
194 def test_upload_blobstore_simple(self):
195 content = 'blob_content'
196 s = hashlib.sha1(content).hexdigest()
197 path = 'http://example.com:80/'
198 data = [('token', 'foo bar')]
199 content_type, body = isolateserver_archive.encode_multipart_formdata(
200 data[:], [('content', s, 'blob_content')])
201 self._requests = [
202 (
203 path + 'gen_url?foo#bar',
204 {'data': data[:]},
205 StringIO.StringIO('an_url/'),
206 ),
207 (
208 'an_url/',
209 {'data': body, 'content_type': content_type, 'retry_50x': False},
210 StringIO.StringIO('ok42'),
211 ),
212 ]
213 result = isolateserver_archive.upload_hash_content_to_blobstore(
214 path + 'gen_url?foo#bar', data[:], s, content)
215 self.assertEqual('ok42', result)
216
217 def test_upload_blobstore_retry_500(self):
218 content = 'blob_content'
219 s = hashlib.sha1(content).hexdigest()
220 path = 'http://example.com:80/'
221 data = [('token', 'foo bar')]
222 content_type, body = isolateserver_archive.encode_multipart_formdata(
223 data[:], [('content', s, 'blob_content')])
224 self._requests = [
225 (
226 path + 'gen_url?foo#bar',
227 {'data': data[:]},
228 StringIO.StringIO('an_url/'),
229 ),
230 (
231 'an_url/',
232 {'data': body, 'content_type': content_type, 'retry_50x': False},
233 # Let's say an HTTP 500 was returned.
234 None,
235 ),
236 # In that case, a new url must be generated since the last one may have
237 # been "consumed".
238 (
239 path + 'gen_url?foo#bar',
240 {'data': data[:]},
241 StringIO.StringIO('an_url/'),
242 ),
243 (
244 'an_url/',
245 {'data': body, 'content_type': content_type, 'retry_50x': False},
246 StringIO.StringIO('ok42'),
247 ),
248 ]
249 result = isolateserver_archive.upload_hash_content_to_blobstore(
250 path + 'gen_url?foo#bar', data[:], s, content)
251 self.assertEqual('ok42', result)
252
194 253
195 if __name__ == '__main__': 254 if __name__ == '__main__':
196 logging.basicConfig( 255 logging.basicConfig(
197 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR)) 256 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))
198 unittest.main() 257 unittest.main()
OLDNEW
« isolateserver_archive.py ('K') | « run_isolated.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698