Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(109)

Side by Side Diff: client/tests/isolateserver_test.py

Issue 2186263002: luci-py: Refactor file writing code to allow file objects. (Closed) Base URL: https://github.com/luci/luci-py.git@master
Patch Set: Fixes for review. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « client/run_isolated.py ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 # pylint: disable=W0212,W0223,W0231,W0613 6 # pylint: disable=W0212,W0223,W0231,W0613
7 7
8 import base64 8 import base64
9 import collections 9 import collections
10 import hashlib 10 import hashlib
11 import json 11 import json
12 import logging 12 import logging
13 import io
13 import os 14 import os
14 import StringIO 15 import StringIO
15 import sys 16 import sys
16 import tempfile 17 import tempfile
17 import unittest 18 import unittest
18 import urllib 19 import urllib
19 import zlib 20 import zlib
20 21
21 # net_utils adjusts sys.path. 22 # net_utils adjusts sys.path.
22 import net_utils 23 import net_utils
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 145
145 def contains(self, items): 146 def contains(self, items):
146 self.contains_calls.append(items) 147 self.contains_calls.append(items)
147 missing = {} 148 missing = {}
148 for item in items: 149 for item in items:
149 if item.digest in self.missing_hashes: 150 if item.digest in self.missing_hashes:
150 missing[item] = self.missing_hashes[item.digest] 151 missing[item] = self.missing_hashes[item.digest]
151 return missing 152 return missing
152 153
153 154
155 class UtilsTest(TestCase):
156 """Tests for helper methods in isolateserver file."""
157
158 def assertFile(self, path, contents):
159 self.assertTrue(fs.exists(path), 'File %s doesn\'t exist!' % path)
160 self.assertMultiLineEqual(contents, fs.open(path, 'rb').read())
161
162 def test_file_read(self):
163 # TODO(maruel): Write test for file_read generator (or remove it).
164 pass
165
166 def test_file_write(self):
167 # TODO(maruel): Write test for file_write generator (or remove it).
168 pass
169
170 def test_fileobj_path(self):
171 # No path on in-memory objects
172 self.assertIs(None, isolateserver.fileobj_path(io.BytesIO('hello')))
173
174 # Path on opened files
175 thisfile = os.path.abspath(__file__.decode(sys.getfilesystemencoding()))
176 f = fs.open(thisfile)
177 result = isolateserver.fileobj_path(f)
178 self.assertIsInstance(result, unicode)
179 self.assertSequenceEqual(result, thisfile)
180
181 # Path on temporary files
182 tf = tempfile.NamedTemporaryFile()
183 result = isolateserver.fileobj_path(tf)
184 self.assertIsInstance(result, unicode)
185 self.assertSequenceEqual(result, tf.name)
186
187 # No path on files which are no longer on the file system
188 tf = tempfile.NamedTemporaryFile(delete=False)
189 fs.unlink(tf.name.decode(sys.getfilesystemencoding()))
190 self.assertIs(None, isolateserver.fileobj_path(tf))
191
192 def test_fileobj_copy_simple(self):
193 inobj = io.BytesIO('hello')
194 outobj = io.BytesIO()
195
196 isolateserver.fileobj_copy(outobj, inobj)
197 self.assertEqual('hello', outobj.getvalue())
198
199 def test_fileobj_copy_partial(self):
200 inobj = io.BytesIO('adatab')
201 outobj = io.BytesIO()
202 inobj.read(1)
203
204 isolateserver.fileobj_copy(outobj, inobj, size=4)
205 self.assertEqual('data', outobj.getvalue())
206
207 def test_fileobj_copy_partial_file_no_size(self):
208 with self.assertRaises(IOError):
209 inobj = io.BytesIO('hello')
210 outobj = io.BytesIO()
211
212 inobj.read(1)
213 isolateserver.fileobj_copy(outobj, inobj)
214
215 def test_fileobj_copy_size_but_file_short(self):
216 with self.assertRaises(IOError):
217 inobj = io.BytesIO('hello')
218 outobj = io.BytesIO()
219
220 isolateserver.fileobj_copy(outobj, inobj, size=10)
221
222 def test_putfile(self):
223 tmpoutdir = None
224 tmpindir = None
225
226 try:
227 tmpindir = tempfile.mkdtemp(prefix='isolateserver_test')
228 infile = os.path.join(tmpindir, u'in')
229 with fs.open(infile, 'wb') as f:
230 f.write('data')
231
232 tmpoutdir = tempfile.mkdtemp(prefix='isolateserver_test')
233
234 # Copy as fileobj
235 fo = os.path.join(tmpoutdir, u'fo')
236 isolateserver.putfile(io.BytesIO('data'), fo)
237 self.assertEqual(True, fs.exists(fo))
238 self.assertEqual(False, fs.islink(fo))
239 self.assertFile(fo, 'data')
240
241 # Copy with partial fileobj
242 pfo = os.path.join(tmpoutdir, u'pfo')
243 fobj = io.BytesIO('adatab')
244 fobj.read(1) # Read the 'a'
245 isolateserver.putfile(fobj, pfo, size=4)
246 self.assertEqual(True, fs.exists(pfo))
247 self.assertEqual(False, fs.islink(pfo))
248 self.assertEqual('b', fobj.read())
249 self.assertFile(pfo, 'data')
250
251 # Copy as not readonly
252 cp = os.path.join(tmpoutdir, u'cp')
253 isolateserver.putfile(fs.open(infile, 'rb'), cp, file_mode=0755)
M-A Ruel 2016/07/28 15:53:23 I'd prefer a with fs.open() here too to not leak t
mithro 2016/07/28 15:58:47 Done.
254 self.assertEqual(True, fs.exists(cp))
255 self.assertEqual(False, fs.islink(cp))
256 self.assertFile(cp, 'data')
257
258 # Use hardlink
259 hl = os.path.join(tmpoutdir, u'hl')
260 isolateserver.putfile(fs.open(infile, 'rb'), hl, use_symlink=False)
261 self.assertEqual(True, fs.exists(hl))
262 self.assertEqual(False, fs.islink(hl))
263 self.assertFile(hl, 'data')
264
265 # Use symlink
266 sl = os.path.join(tmpoutdir, u'sl')
267 isolateserver.putfile(fs.open(infile, 'rb'), sl, use_symlink=True)
268 self.assertEqual(True, fs.exists(sl))
269 self.assertEqual(True, fs.islink(sl))
270 self.assertEqual('data', fs.open(sl, 'rb').read())
271 self.assertFile(sl, 'data')
272
273 finally:
274 if tmpindir:
275 file_path.rmtree(tmpindir)
276 if tmpoutdir:
277 file_path.rmtree(tmpoutdir)
278
279
154 class StorageTest(TestCase): 280 class StorageTest(TestCase):
155 """Tests for Storage methods.""" 281 """Tests for Storage methods."""
156 282
157 def assertEqualIgnoringOrder(self, a, b): 283 def assertEqualIgnoringOrder(self, a, b):
158 """Asserts that containers |a| and |b| contain same items.""" 284 """Asserts that containers |a| and |b| contain same items."""
159 self.assertEqual(len(a), len(b)) 285 self.assertEqual(len(a), len(b))
160 self.assertEqual(set(a), set(b)) 286 self.assertEqual(set(a), set(b))
161 287
162 def get_push_state(self, storage, item): 288 def get_push_state(self, storage, item):
163 missing = list(storage.get_missing_items([item])) 289 missing = list(storage.get_missing_items([item]))
(...skipping 552 matching lines...) Expand 10 before | Expand all | Expand 10 after
716 for item in items: 842 for item in items:
717 pending.add(item.digest) 843 pending.add(item.digest)
718 queue.add(item.digest) 844 queue.add(item.digest)
719 845
720 # Wait for fetch to complete. 846 # Wait for fetch to complete.
721 while pending: 847 while pending:
722 fetched = queue.wait(pending) 848 fetched = queue.wait(pending)
723 pending.discard(fetched) 849 pending.discard(fetched)
724 850
725 # Ensure fetched same data as was pushed. 851 # Ensure fetched same data as was pushed.
726 self.assertEqual( 852 actual = []
727 [i.buffer for i in items], 853 for i in items:
728 [cache.read(i.digest) for i in items]) 854 with cache.getfileobj(i.digest) as f:
855 actual.append(f.read())
856
857 self.assertEqual([i.buffer for i in items], actual)
729 858
730 def test_push_and_fetch(self): 859 def test_push_and_fetch(self):
731 self.run_push_and_fetch_test('default') 860 self.run_push_and_fetch_test('default')
732 861
733 def test_push_and_fetch_gzip(self): 862 def test_push_and_fetch_gzip(self):
734 self.run_push_and_fetch_test('default-gzip') 863 self.run_push_and_fetch_test('default-gzip')
735 864
736 if sys.maxsize == (2**31) - 1: 865 if sys.maxsize == (2**31) - 1:
737 def test_archive_multiple_huge_file(self): 866 def test_archive_multiple_huge_file(self):
738 self.server.discard_content() 867 self.server.discard_content()
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
832 '--file', 'sha-1', 'path/to/a', 961 '--file', 'sha-1', 'path/to/a',
833 '--file', 'sha-2', 'path/to/b', 962 '--file', 'sha-2', 'path/to/b',
834 ] 963 ]
835 self.assertEqual(0, isolateserver.main(cmd)) 964 self.assertEqual(0, isolateserver.main(cmd))
836 expected = { 965 expected = {
837 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou', 966 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou',
838 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye', 967 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye',
839 } 968 }
840 self.assertEqual(expected, actual) 969 self.assertEqual(expected, actual)
841 970
842 def test_download_isolated(self): 971 def test_download_isolated_simple(self):
843 # Test downloading an isolated tree. 972 # Test downloading an isolated tree.
844 actual = {} 973 actual = {}
845 def file_write_mock(key, generator): 974 def putfile_mock(
846 actual[key] = ''.join(generator) 975 srcfileobj, dstpath, file_mode=None, size=-1, use_symlink=False):
847 self.mock(isolateserver, 'file_write', file_write_mock) 976 actual[dstpath] = srcfileobj.read()
977 self.mock(isolateserver, 'putfile', putfile_mock)
848 self.mock(os, 'makedirs', lambda _: None) 978 self.mock(os, 'makedirs', lambda _: None)
849 server = 'http://example.com' 979 server = 'http://example.com'
850 files = { 980 files = {
851 os.path.join('a', 'foo'): 'Content', 981 os.path.join('a', 'foo'): 'Content',
852 'b': 'More content', 982 'b': 'More content',
853 } 983 }
854 isolated = { 984 isolated = {
855 'command': ['Absurb', 'command'], 985 'command': ['Absurb', 'command'],
856 'relative_cwd': 'a', 986 'relative_cwd': 'a',
857 'files': dict( 987 'files': dict(
858 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)}) 988 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)})
859 for k, v in files.iteritems()), 989 for k, v in files.iteritems()),
860 'version': isolated_format.ISOLATED_FILE_VERSION, 990 'version': isolated_format.ISOLATED_FILE_VERSION,
861 } 991 }
862 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':')) 992 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
863 isolated_hash = isolateserver_mock.hash_content(isolated_data) 993 isolated_hash = isolateserver_mock.hash_content(isolated_data)
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
1016 return isolateserver.DiskCache(self.tempdir, self._policies, self._algo) 1146 return isolateserver.DiskCache(self.tempdir, self._policies, self._algo)
1017 1147
1018 def to_hash(self, content): 1148 def to_hash(self, content):
1019 return self._algo(content).hexdigest(), content 1149 return self._algo(content).hexdigest(), content
1020 1150
1021 def test_read_evict(self): 1151 def test_read_evict(self):
1022 self._free_disk = 1100 1152 self._free_disk = 1100
1023 h_a = self.to_hash('a')[0] 1153 h_a = self.to_hash('a')[0]
1024 with self.get_cache() as cache: 1154 with self.get_cache() as cache:
1025 cache.write(h_a, 'a') 1155 cache.write(h_a, 'a')
1026 self.assertEqual('a', cache.read(h_a)) 1156 with cache.getfileobj(h_a) as f:
1157 self.assertEqual('a', f.read())
1027 1158
1028 with self.get_cache() as cache: 1159 with self.get_cache() as cache:
1029 cache.evict(h_a) 1160 cache.evict(h_a)
1030 with self.assertRaises(isolateserver.CacheMiss): 1161 with self.assertRaises(isolateserver.CacheMiss):
1031 cache.read(h_a) 1162 cache.getfileobj(h_a)
1032
1033 def test_link(self):
1034 self._free_disk = 1100
1035 cache = self.get_cache()
1036 h_a = self.to_hash('a')[0]
1037 cache.write(h_a, 'a')
1038 mapped = tempfile.mkdtemp(prefix='isolateserver_test')
1039 try:
1040 cache.link(h_a, os.path.join(mapped, u'hl'), False, False)
1041 cache.link(h_a, os.path.join(mapped, u'sl'), False, True)
1042 self.assertEqual(sorted(['hl', 'sl']), sorted(os.listdir(mapped)))
1043 self.assertEqual(False, fs.islink(os.path.join(mapped, u'hl')))
1044 self.assertEqual(True, fs.islink(os.path.join(mapped, u'sl')))
1045 finally:
1046 file_path.rmtree(mapped)
1047 1163
1048 def test_policies_free_disk(self): 1164 def test_policies_free_disk(self):
1049 with self.assertRaises(isolateserver.Error): 1165 with self.assertRaises(isolateserver.Error):
1050 self.get_cache().write(*self.to_hash('a')) 1166 self.get_cache().write(*self.to_hash('a'))
1051 1167
1052 def test_policies_fit(self): 1168 def test_policies_fit(self):
1053 self._free_disk = 1100 1169 self._free_disk = 1100
1054 self.get_cache().write(*self.to_hash('a'*100)) 1170 self.get_cache().write(*self.to_hash('a'*100))
1055 1171
1056 def test_policies_too_much(self): 1172 def test_policies_too_much(self):
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
1138 1254
1139 1255
1140 if __name__ == '__main__': 1256 if __name__ == '__main__':
1141 fix_encoding.fix_encoding() 1257 fix_encoding.fix_encoding()
1142 if '-v' in sys.argv: 1258 if '-v' in sys.argv:
1143 unittest.TestCase.maxDiff = None 1259 unittest.TestCase.maxDiff = None
1144 logging.basicConfig( 1260 logging.basicConfig(
1145 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL)) 1261 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL))
1146 clear_env_vars() 1262 clear_env_vars()
1147 unittest.main() 1263 unittest.main()
OLDNEW
« no previous file with comments | « client/run_isolated.py ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698