Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1253)

Side by Side Diff: client/tests/run_isolated_smoke_test.py

Issue 2484133002: luci-py/isolateserver.py: Add support for tar archives when downloading. (Closed) Base URL: https://github.com/luci/luci-py.git@master
Patch Set: Rebase onto master. Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « client/tests/isolateserver_test.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2012 The LUCI Authors. All rights reserved. 2 # Copyright 2012 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 import json 6 import json
7 import logging 7 import logging
8 import os 8 import os
9 import subprocess 9 import subprocess
10 import sys 10 import sys
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
60 import os, sys 60 import os, sys
61 prefix = u'\\\\\\\\?\\\\' if sys.platform == 'win32' else u'' 61 prefix = u'\\\\\\\\?\\\\' if sys.platform == 'win32' else u''
62 path = os.path.join(os.getcwd().decode( 62 path = os.path.join(os.getcwd().decode(
63 sys.getfilesystemencoding()), 'a' * 200, 'b' * 200) 63 sys.getfilesystemencoding()), 'a' * 200, 'b' * 200)
64 with open(prefix + path, 'rb') as f: 64 with open(prefix + path, 'rb') as f:
65 actual = f.read() 65 actual = f.read()
66 if actual != 'File1\\n': 66 if actual != 'File1\\n':
67 print >> sys.stderr, 'Unexpected content: %s' % actual 67 print >> sys.stderr, 'Unexpected content: %s' % actual
68 sys.exit(1) 68 sys.exit(1)
69 print('Success')""", 69 print('Success')""",
70 'archive': ( 70 'ar_archive': (
71 '!<arch>\n' 71 '!<arch>\n'
72 '#1/5 ' 72 '#1/5 '
73 '1447140471 1000 1000 100640 ' 73 '1447140471 1000 1000 100640 '
74 '12 ' 74 '12 '
75 '\x60\n' 75 '\x60\n'
76 'a/foo' 76 'a/foo'
77 'Content' 77 'Content'
78 'b ' 78 'b '
79 '1447140471 1000 1000 100640 ' 79 '1447140471 1000 1000 100640 '
80 '12 ' 80 '12 '
81 '\x60\n' 81 '\x60\n'
82 'More content'), 82 'More content'),
83 'tar_archive': open(os.path.join(ROOT_DIR, 'tests', 'archive.tar')).read(),
83 'archive_files.py': """if True: 84 'archive_files.py': """if True:
84 import os, sys 85 import os, sys
85 ROOT_DIR = os.path.dirname(os.path.abspath( 86 ROOT_DIR = os.path.dirname(os.path.abspath(
86 __file__.decode(sys.getfilesystemencoding()))) 87 __file__.decode(sys.getfilesystemencoding())))
87 expected = ['a', 'archive_files.py', 'b'] 88 expected = ['a', 'archive_files.py', 'b']
88 actual = sorted(os.listdir(ROOT_DIR)) 89 actual = sorted(os.listdir(ROOT_DIR))
89 if expected != actual: 90 if expected != actual:
90 print >> sys.stderr, 'Expected list doesn\\'t match:' 91 print >> sys.stderr, 'Expected list doesn\\'t match:'
91 print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual)) 92 print >> sys.stderr, '%s\\n%s' % (','.join(expected), ','.join(actual))
92 sys.exit(1) 93 sys.exit(1)
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after
145 146
146 CONTENTS['manifest2.isolated'] = json.dumps( 147 CONTENTS['manifest2.isolated'] = json.dumps(
147 { 148 {
148 'files': {'file2.txt': file_meta('file2.txt')}, 149 'files': {'file2.txt': file_meta('file2.txt')},
149 'includes': [ 150 'includes': [
150 isolateserver_mock.hash_content(CONTENTS['manifest1.isolated']), 151 isolateserver_mock.hash_content(CONTENTS['manifest1.isolated']),
151 ], 152 ],
152 }) 153 })
153 154
154 155
155 CONTENTS['archive.isolated'] = json.dumps( 156 CONTENTS['ar_archive.isolated'] = json.dumps(
156 { 157 {
157 'command': ['python', 'archive_files.py'], 158 'command': ['python', 'archive_files.py'],
158 'files': { 159 'files': {
159 'archive': { 160 'archive': {
160 'h': isolateserver_mock.hash_content(CONTENTS['archive']), 161 'h': isolateserver_mock.hash_content(CONTENTS['ar_archive']),
161 's': len(CONTENTS['archive']), 162 's': len(CONTENTS['ar_archive']),
162 't': 'ar', 163 't': 'ar',
163 }, 164 },
164 'archive_files.py': file_meta('archive_files.py'), 165 'archive_files.py': file_meta('archive_files.py'),
165 }, 166 },
166 }) 167 })
167 168
169 CONTENTS['tar_archive.isolated'] = json.dumps(
170 {
171 'command': ['python', 'archive_files.py'],
172 'files': {
173 'archive': {
174 'h': isolateserver_mock.hash_content(CONTENTS['tar_archive']),
175 's': len(CONTENTS['tar_archive']),
176 't': 'tar',
177 },
178 'archive_files.py': file_meta('archive_files.py'),
179 },
180 })
181
168 182
169 CONTENTS['max_path.isolated'] = json.dumps( 183 CONTENTS['max_path.isolated'] = json.dumps(
170 { 184 {
171 'command': ['python', 'max_path.py'], 185 'command': ['python', 'max_path.py'],
172 'files': { 186 'files': {
173 'a' * 200 + '/' + 'b' * 200: file_meta('file1.txt'), 187 'a' * 200 + '/' + 'b' * 200: file_meta('file1.txt'),
174 'max_path.py': file_meta('max_path.py'), 188 'max_path.py': file_meta('max_path.py'),
175 }, 189 },
176 }) 190 })
177 191
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
382 self._store('repeated_files.py'), 396 self._store('repeated_files.py'),
383 self._store('repeated_files.isolated'), 397 self._store('repeated_files.isolated'),
384 ] 398 ]
385 out, err, returncode = self._run(self._cmd_args(isolated_hash)) 399 out, err, returncode = self._run(self._cmd_args(isolated_hash))
386 self.assertEqual('', err) 400 self.assertEqual('', err)
387 self.assertEqual('Success\n', out) 401 self.assertEqual('Success\n', out)
388 self.assertEqual(0, returncode) 402 self.assertEqual(0, returncode)
389 actual = list_files_tree(self.cache) 403 actual = list_files_tree(self.cache)
390 self.assertEqual(sorted(expected), actual) 404 self.assertEqual(sorted(expected), actual)
391 405
392 def test_archive(self): 406 def test_ar_archive(self):
393 # Loads an .isolated that includes an ar archive. 407 # Loads an .isolated that includes an ar archive.
394 isolated_hash = self._store('archive.isolated') 408 isolated_hash = self._store('ar_archive.isolated')
395 expected = [ 409 expected = [
396 'state.json', 410 'state.json',
397 isolated_hash, 411 isolated_hash,
398 self._store('archive'), 412 self._store('ar_archive'),
399 self._store('archive_files.py'), 413 self._store('archive_files.py'),
400 ] 414 ]
401 out, err, returncode = self._run(self._cmd_args(isolated_hash)) 415 out, err, returncode = self._run(self._cmd_args(isolated_hash))
416 self.assertEqual('', err)
417 self.assertEqual('Success\n', out)
418 self.assertEqual(0, returncode)
419 actual = list_files_tree(self.cache)
420 self.assertEqual(sorted(expected), actual)
421
422 def test_tar_archive(self):
423 # Loads an .isolated that includes an ar archive.
424 isolated_hash = self._store('tar_archive.isolated')
425 expected = [
426 'state.json',
427 isolated_hash,
428 self._store('tar_archive'),
429 self._store('archive_files.py'),
430 ]
431 out, err, returncode = self._run(self._cmd_args(isolated_hash))
402 self.assertEqual('', err) 432 self.assertEqual('', err)
403 self.assertEqual('Success\n', out) 433 self.assertEqual('Success\n', out)
404 self.assertEqual(0, returncode) 434 self.assertEqual(0, returncode)
405 actual = list_files_tree(self.cache) 435 actual = list_files_tree(self.cache)
406 self.assertEqual(sorted(expected), actual) 436 self.assertEqual(sorted(expected), actual)
407 437
408 def _test_corruption_common(self, new_content): 438 def _test_corruption_common(self, new_content):
409 isolated_hash = self._store('file_with_size.isolated') 439 isolated_hash = self._store('file_with_size.isolated')
410 file1_hash = self._store('file1.txt') 440 file1_hash = self._store('file1.txt')
411 441
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
458 cached_file_path = self._test_corruption_common( 488 cached_file_path = self._test_corruption_common(
459 CONTENTS['file1.txt'][:-1] + ' ') 489 CONTENTS['file1.txt'][:-1] + ' ')
460 # TODO(maruel): This corruption is NOT detected. 490 # TODO(maruel): This corruption is NOT detected.
461 # This needs to be fixed. 491 # This needs to be fixed.
462 self.assertNotEqual(CONTENTS['file1.txt'], read_content(cached_file_path)) 492 self.assertNotEqual(CONTENTS['file1.txt'], read_content(cached_file_path))
463 493
464 494
465 if __name__ == '__main__': 495 if __name__ == '__main__':
466 fix_encoding.fix_encoding() 496 fix_encoding.fix_encoding()
467 test_utils.main() 497 test_utils.main()
OLDNEW
« no previous file with comments | « client/tests/isolateserver_test.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698