Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(42)

Side by Side Diff: client/tests/isolateserver_test.py

Issue 2186263002: luci-py: Refactor file writing code to allow file objects. (Closed) Base URL: https://github.com/luci/luci-py.git@master
Patch Set: Rebase Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « client/run_isolated.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 # pylint: disable=W0212,W0223,W0231,W0613 6 # pylint: disable=W0212,W0223,W0231,W0613
7 7
8 import base64 8 import base64
9 import collections 9 import collections
10 import hashlib 10 import hashlib
11 import json 11 import json
12 import logging 12 import logging
13 import io
13 import os 14 import os
14 import StringIO 15 import StringIO
15 import sys 16 import sys
16 import tempfile 17 import tempfile
17 import unittest 18 import unittest
18 import urllib 19 import urllib
19 import zlib 20 import zlib
20 21
21 # net_utils adjusts sys.path. 22 # net_utils adjusts sys.path.
22 import net_utils 23 import net_utils
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 145
145 def contains(self, items): 146 def contains(self, items):
146 self.contains_calls.append(items) 147 self.contains_calls.append(items)
147 missing = {} 148 missing = {}
148 for item in items: 149 for item in items:
149 if item.digest in self.missing_hashes: 150 if item.digest in self.missing_hashes:
150 missing[item] = self.missing_hashes[item.digest] 151 missing[item] = self.missing_hashes[item.digest]
151 return missing 152 return missing
152 153
153 154
155 class UtilsTest(TestCase):
156 """Tests for helper methods in isolateserver file."""
157
158 def assertFile(self, path, contents):
159 self.assertTrue(fs.exists(path), 'File %s doesn\'t exist!' % path)
160 self.assertMultiLineEqual(contents, fs.open(path, 'rb').read())
161
162 def test_file_read(self):
163 # TODO(maruel): Write test for file_read generator (or remove it).
164 pass
165
166 def test_file_write(self):
167 # TODO(maruel): Write test for file_write generator (or remove it).
168 pass
169
170 def test_fileobj_path(self):
171 # No path on in-memory objects
172 self.assertIs(None, isolateserver.fileobj_path(io.BytesIO('hello')))
173
174 # Path on opened files
175 thisfile = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
176 f = fs.open(thisfile)
177 result = isolateserver.fileobj_path(f)
178 self.assertIsInstance(result, unicode)
179 self.assertSequenceEqual(result, thisfile)
180
181 # Path on temporary files
182 tf = tempfile.NamedTemporaryFile()
183 result = isolateserver.fileobj_path(tf)
184 self.assertIsInstance(result, unicode)
185 self.assertSequenceEqual(result, tf.name)
186
187 # No path on files which are no longer on the file system
188 tf = tempfile.NamedTemporaryFile(delete=False)
189 fs.unlink(tf.name.decode(sys.getfilesystemencoding()))
190 self.assertIs(None, isolateserver.fileobj_path(tf))
191
192 def test_fileobj_copy_simple(self):
193 inobj = io.BytesIO('hello')
194 outobj = io.BytesIO()
195
196 isolateserver.fileobj_copy(outobj, inobj)
197 self.assertEqual('hello', outobj.getvalue())
198
199 def test_fileobj_copy_partial(self):
200 inobj = io.BytesIO('adatab')
201 outobj = io.BytesIO()
202 inobj.read(1)
203
204 isolateserver.fileobj_copy(outobj, inobj, size=4)
205 self.assertEqual('data', outobj.getvalue())
206
207 def test_fileobj_copy_partial_file_no_size(self):
208 with self.assertRaises(IOError):
209 inobj = io.BytesIO('hello')
210 outobj = io.BytesIO()
211
212 inobj.read(1)
213 isolateserver.fileobj_copy(outobj, inobj)
214
215 def test_fileobj_copy_size_but_file_short(self):
216 with self.assertRaises(IOError):
217 inobj = io.BytesIO('hello')
218 outobj = io.BytesIO()
219
220 isolateserver.fileobj_copy(outobj, inobj, size=10)
221
222 def test_putfile(self):
223 tmpoutdir = None
224 tmpindir = None
225
226 try:
227 tmpindir = tempfile.mkdtemp(prefix='isolateserver_test')
228 infile = os.path.join(tmpindir, u'in')
229 with fs.open(infile, 'wb') as f:
230 f.write('data')
231
232 tmpoutdir = tempfile.mkdtemp(prefix='isolateserver_test')
233
234 # Copy as fileobj
235 fo = os.path.join(tmpoutdir, u'fo')
236 isolateserver.putfile(io.BytesIO('data'), fo)
237 self.assertEqual(True, fs.exists(fo))
238 self.assertEqual(False, fs.islink(fo))
239 self.assertFile(fo, 'data')
240
241 # Copy with partial fileobj
242 pfo = os.path.join(tmpoutdir, u'pfo')
243 fobj = io.BytesIO('adatab')
244 fobj.read(1) # Read the 'a'
245 isolateserver.putfile(fobj, pfo, size=4)
246 self.assertEqual(True, fs.exists(pfo))
247 self.assertEqual(False, fs.islink(pfo))
248 self.assertEqual('b', fobj.read())
249 self.assertFile(pfo, 'data')
250
251 # Copy as not readonly
252 cp = os.path.join(tmpoutdir, u'cp')
253 with fs.open(infile, 'rb') as f:
254 isolateserver.putfile(f, cp, file_mode=0755)
255 self.assertEqual(True, fs.exists(cp))
256 self.assertEqual(False, fs.islink(cp))
257 self.assertFile(cp, 'data')
258
259 # Use hardlink
260 hl = os.path.join(tmpoutdir, u'hl')
261 with fs.open(infile, 'rb') as f:
262 isolateserver.putfile(f, hl, use_symlink=False)
263 self.assertEqual(True, fs.exists(hl))
264 self.assertEqual(False, fs.islink(hl))
265 self.assertFile(hl, 'data')
266
267 # Use symlink
268 sl = os.path.join(tmpoutdir, u'sl')
269 with fs.open(infile, 'rb') as f:
270 isolateserver.putfile(f, sl, use_symlink=True)
271 self.assertEqual(True, fs.exists(sl))
272 self.assertEqual(True, fs.islink(sl))
273 self.assertEqual('data', fs.open(sl, 'rb').read())
274 self.assertFile(sl, 'data')
275
276 finally:
277 if tmpindir:
278 file_path.rmtree(tmpindir)
279 if tmpoutdir:
280 file_path.rmtree(tmpoutdir)
281
282
154 class StorageTest(TestCase): 283 class StorageTest(TestCase):
155 """Tests for Storage methods.""" 284 """Tests for Storage methods."""
156 285
157 def assertEqualIgnoringOrder(self, a, b): 286 def assertEqualIgnoringOrder(self, a, b):
158 """Asserts that containers |a| and |b| contain same items.""" 287 """Asserts that containers |a| and |b| contain same items."""
159 self.assertEqual(len(a), len(b)) 288 self.assertEqual(len(a), len(b))
160 self.assertEqual(set(a), set(b)) 289 self.assertEqual(set(a), set(b))
161 290
162 def get_push_state(self, storage, item): 291 def get_push_state(self, storage, item):
163 missing = list(storage.get_missing_items([item])) 292 missing = list(storage.get_missing_items([item]))
(...skipping 552 matching lines...) Expand 10 before | Expand all | Expand 10 after
716 for item in items: 845 for item in items:
717 pending.add(item.digest) 846 pending.add(item.digest)
718 queue.add(item.digest) 847 queue.add(item.digest)
719 848
720 # Wait for fetch to complete. 849 # Wait for fetch to complete.
721 while pending: 850 while pending:
722 fetched = queue.wait(pending) 851 fetched = queue.wait(pending)
723 pending.discard(fetched) 852 pending.discard(fetched)
724 853
725 # Ensure fetched same data as was pushed. 854 # Ensure fetched same data as was pushed.
726 self.assertEqual( 855 actual = []
727 [i.buffer for i in items], 856 for i in items:
728 [cache.read(i.digest) for i in items]) 857 with cache.getfileobj(i.digest) as f:
858 actual.append(f.read())
859
860 self.assertEqual([i.buffer for i in items], actual)
729 861
730 def test_push_and_fetch(self): 862 def test_push_and_fetch(self):
731 self.run_push_and_fetch_test('default') 863 self.run_push_and_fetch_test('default')
732 864
733 def test_push_and_fetch_gzip(self): 865 def test_push_and_fetch_gzip(self):
734 self.run_push_and_fetch_test('default-gzip') 866 self.run_push_and_fetch_test('default-gzip')
735 867
736 if sys.maxsize == (2**31) - 1: 868 if sys.maxsize == (2**31) - 1:
737 def test_archive_multiple_huge_file(self): 869 def test_archive_multiple_huge_file(self):
738 self.server.discard_content() 870 self.server.discard_content()
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
832 '--file', 'sha-1', 'path/to/a', 964 '--file', 'sha-1', 'path/to/a',
833 '--file', 'sha-2', 'path/to/b', 965 '--file', 'sha-2', 'path/to/b',
834 ] 966 ]
835 self.assertEqual(0, isolateserver.main(cmd)) 967 self.assertEqual(0, isolateserver.main(cmd))
836 expected = { 968 expected = {
837 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou', 969 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou',
838 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye', 970 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye',
839 } 971 }
840 self.assertEqual(expected, actual) 972 self.assertEqual(expected, actual)
841 973
842 def test_download_isolated(self): 974 def test_download_isolated_simple(self):
843 # Test downloading an isolated tree. 975 # Test downloading an isolated tree.
844 actual = {} 976 actual = {}
845 def file_write_mock(key, generator): 977 def putfile_mock(
846 actual[key] = ''.join(generator) 978 srcfileobj, dstpath, file_mode=None, size=-1, use_symlink=False):
847 self.mock(isolateserver, 'file_write', file_write_mock) 979 actual[dstpath] = srcfileobj.read()
980 self.mock(isolateserver, 'putfile', putfile_mock)
848 self.mock(os, 'makedirs', lambda _: None) 981 self.mock(os, 'makedirs', lambda _: None)
849 server = 'http://example.com' 982 server = 'http://example.com'
850 files = { 983 files = {
851 os.path.join('a', 'foo'): 'Content', 984 os.path.join('a', 'foo'): 'Content',
852 'b': 'More content', 985 'b': 'More content',
853 } 986 }
854 isolated = { 987 isolated = {
855 'command': ['Absurb', 'command'], 988 'command': ['Absurb', 'command'],
856 'relative_cwd': 'a', 989 'relative_cwd': 'a',
857 'files': dict( 990 'files': dict(
858 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)}) 991 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)})
859 for k, v in files.iteritems()), 992 for k, v in files.iteritems()),
860 'version': isolated_format.ISOLATED_FILE_VERSION, 993 'version': isolated_format.ISOLATED_FILE_VERSION,
861 } 994 }
862 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':')) 995 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
863 isolated_hash = isolateserver_mock.hash_content(isolated_data) 996 isolated_hash = isolateserver_mock.hash_content(isolated_data)
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
1016 return isolateserver.DiskCache(self.tempdir, self._policies, self._algo) 1149 return isolateserver.DiskCache(self.tempdir, self._policies, self._algo)
1017 1150
1018 def to_hash(self, content): 1151 def to_hash(self, content):
1019 return self._algo(content).hexdigest(), content 1152 return self._algo(content).hexdigest(), content
1020 1153
1021 def test_read_evict(self): 1154 def test_read_evict(self):
1022 self._free_disk = 1100 1155 self._free_disk = 1100
1023 h_a = self.to_hash('a')[0] 1156 h_a = self.to_hash('a')[0]
1024 with self.get_cache() as cache: 1157 with self.get_cache() as cache:
1025 cache.write(h_a, 'a') 1158 cache.write(h_a, 'a')
1026 self.assertEqual('a', cache.read(h_a)) 1159 with cache.getfileobj(h_a) as f:
1160 self.assertEqual('a', f.read())
1027 1161
1028 with self.get_cache() as cache: 1162 with self.get_cache() as cache:
1029 cache.evict(h_a) 1163 cache.evict(h_a)
1030 with self.assertRaises(isolateserver.CacheMiss): 1164 with self.assertRaises(isolateserver.CacheMiss):
1031 cache.read(h_a) 1165 cache.getfileobj(h_a)
1032
1033 def test_link(self):
1034 self._free_disk = 1100
1035 cache = self.get_cache()
1036 h_a = self.to_hash('a')[0]
1037 cache.write(h_a, 'a')
1038 mapped = tempfile.mkdtemp(prefix='isolateserver_test')
1039 try:
1040 cache.link(h_a, os.path.join(mapped, u'hl'), False, False)
1041 cache.link(h_a, os.path.join(mapped, u'sl'), False, True)
1042 self.assertEqual(sorted(['hl', 'sl']), sorted(os.listdir(mapped)))
1043 self.assertEqual(False, fs.islink(os.path.join(mapped, u'hl')))
1044 self.assertEqual(True, fs.islink(os.path.join(mapped, u'sl')))
1045 finally:
1046 file_path.rmtree(mapped)
1047 1166
1048 def test_policies_free_disk(self): 1167 def test_policies_free_disk(self):
1049 with self.assertRaises(isolateserver.Error): 1168 with self.assertRaises(isolateserver.Error):
1050 self.get_cache().write(*self.to_hash('a')) 1169 self.get_cache().write(*self.to_hash('a'))
1051 1170
1052 def test_policies_fit(self): 1171 def test_policies_fit(self):
1053 self._free_disk = 1100 1172 self._free_disk = 1100
1054 self.get_cache().write(*self.to_hash('a'*100)) 1173 self.get_cache().write(*self.to_hash('a'*100))
1055 1174
1056 def test_policies_too_much(self): 1175 def test_policies_too_much(self):
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1138 1257
1139 1258
1140 if __name__ == '__main__': 1259 if __name__ == '__main__':
1141 fix_encoding.fix_encoding() 1260 fix_encoding.fix_encoding()
1142 if '-v' in sys.argv: 1261 if '-v' in sys.argv:
1143 unittest.TestCase.maxDiff = None 1262 unittest.TestCase.maxDiff = None
1144 logging.basicConfig( 1263 logging.basicConfig(
1145 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL)) 1264 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL))
1146 clear_env_vars() 1265 clear_env_vars()
1147 unittest.main() 1266 unittest.main()
OLDNEW
« no previous file with comments | « client/run_isolated.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698