Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(403)

Side by Side Diff: client/tests/isolateserver_test.py

Issue 2060983006: luci-py/isolateserver.py: Add archive support when downloading. (Closed) Base URL: https://github.com/luci/luci-py.git@master
Patch Set: Rebase Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # Copyright 2013 The LUCI Authors. All rights reserved. 2 # Copyright 2013 The LUCI Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 3 # Use of this source code is governed under the Apache License, Version 2.0
4 # that can be found in the LICENSE file. 4 # that can be found in the LICENSE file.
5 5
6 # pylint: disable=W0212,W0223,W0231,W0613 6 # pylint: disable=W0212,W0223,W0231,W0613
7 7
8 import base64 8 import base64
9 import collections 9 import collections
10 import hashlib 10 import hashlib
11 import json 11 import json
12 import logging 12 import logging
13 import io
13 import os 14 import os
14 import StringIO 15 import StringIO
15 import sys 16 import sys
16 import tempfile 17 import tempfile
17 import unittest 18 import unittest
18 import urllib 19 import urllib
19 import zlib 20 import zlib
20 21
21 # net_utils adjusts sys.path. 22 # net_utils adjusts sys.path.
22 import net_utils 23 import net_utils
(...skipping 692 matching lines...) Expand 10 before | Expand all | Expand 10 after
715 for item in items: 716 for item in items:
716 pending.add(item.digest) 717 pending.add(item.digest)
717 queue.add(item.digest) 718 queue.add(item.digest)
718 719
719 # Wait for fetch to complete. 720 # Wait for fetch to complete.
720 while pending: 721 while pending:
721 fetched = queue.wait(pending) 722 fetched = queue.wait(pending)
722 pending.discard(fetched) 723 pending.discard(fetched)
723 724
724 # Ensure fetched same data as was pushed. 725 # Ensure fetched same data as was pushed.
725 self.assertEqual( 726 actual = []
726 [i.buffer for i in items], 727 for i in items:
727 [cache.read(i.digest) for i in items]) 728 with cache.getfileobj(i.digest) as f:
729 actual.append(f.read())
730
731 self.assertEqual([i.buffer for i in items], actual)
728 732
729 def test_push_and_fetch(self): 733 def test_push_and_fetch(self):
730 self.run_push_and_fetch_test('default') 734 self.run_push_and_fetch_test('default')
731 735
732 def test_push_and_fetch_gzip(self): 736 def test_push_and_fetch_gzip(self):
733 self.run_push_and_fetch_test('default-gzip') 737 self.run_push_and_fetch_test('default-gzip')
734 738
735 if sys.maxsize == (2**31) - 1: 739 if sys.maxsize == (2**31) - 1:
736 def test_archive_multiple_huge_file(self): 740 def test_archive_multiple_huge_file(self):
737 self.server.discard_content() 741 self.server.discard_content()
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
831 '--file', 'sha-1', 'path/to/a', 835 '--file', 'sha-1', 'path/to/a',
832 '--file', 'sha-2', 'path/to/b', 836 '--file', 'sha-2', 'path/to/b',
833 ] 837 ]
834 self.assertEqual(0, isolateserver.main(cmd)) 838 self.assertEqual(0, isolateserver.main(cmd))
835 expected = { 839 expected = {
836 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou', 840 os.path.join(net_utils.ROOT_DIR, 'path/to/a'): 'Coucou',
837 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye', 841 os.path.join(net_utils.ROOT_DIR, 'path/to/b'): 'Bye Bye',
838 } 842 }
839 self.assertEqual(expected, actual) 843 self.assertEqual(expected, actual)
840 844
841 def test_download_isolated(self): 845 def test_download_isolated_simple(self):
842 # Test downloading an isolated tree. 846 # Test downloading an isolated tree.
843 actual = {} 847 actual = {}
844 def file_write_mock(key, generator): 848 def putfile_mock(srcfileobj, dstpath, file_mode=None):
845 actual[key] = ''.join(generator) 849 actual[dstpath] = srcfileobj.read()
846 self.mock(isolateserver, 'file_write', file_write_mock) 850 self.mock(isolateserver, 'putfile', putfile_mock)
847 self.mock(os, 'makedirs', lambda _: None) 851 self.mock(os, 'makedirs', lambda _: None)
848 server = 'http://example.com' 852 server = 'http://example.com'
849 files = { 853 files = {
850 os.path.join('a', 'foo'): 'Content', 854 os.path.join('a', 'foo'): 'Content',
851 'b': 'More content', 855 'b': 'More content',
852 } 856 }
853 isolated = { 857 isolated = {
854 'command': ['Absurb', 'command'], 858 'command': ['Absurb', 'command'],
855 'relative_cwd': 'a', 859 'relative_cwd': 'a',
856 'files': dict( 860 'files': dict(
857 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)}) 861 (k, {'h': isolateserver_mock.hash_content(v), 's': len(v)})
858 for k, v in files.iteritems()), 862 for k, v in files.iteritems()),
859 'version': isolated_format.ISOLATED_FILE_VERSION, 863 'version': isolated_format.ISOLATED_FILE_VERSION,
860 } 864 }
861 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':')) 865 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
862 isolated_hash = isolateserver_mock.hash_content(isolated_data) 866 isolated_hash = isolateserver_mock.hash_content(isolated_data)
(...skipping 26 matching lines...) Expand all
889 self.expected_requests(requests) 893 self.expected_requests(requests)
890 self.assertEqual(0, isolateserver.main(cmd)) 894 self.assertEqual(0, isolateserver.main(cmd))
891 expected = dict( 895 expected = dict(
892 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems()) 896 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
893 self.assertEqual(expected, actual) 897 self.assertEqual(expected, actual)
894 expected_stdout = ( 898 expected_stdout = (
895 'To run this test please run from the directory %s:\n Absurb command\n' 899 'To run this test please run from the directory %s:\n Absurb command\n'
896 % os.path.join(self.tempdir, 'a')) 900 % os.path.join(self.tempdir, 'a'))
897 self.checkOutput(expected_stdout, '') 901 self.checkOutput(expected_stdout, '')
898 902
903 def test_download_isolated_archive(self):
904 # Test downloading an isolated tree.
905 actual = {}
906 def putfile_mock(srcfileobj, dstpath, file_mode=None, size=-1):
907 actual[dstpath] = srcfileobj.read(size)
908 self.mock(isolateserver, 'putfile', putfile_mock)
909 self.mock(os, 'makedirs', lambda _: None)
910 server = 'http://example.com'
911
912 files = {
913 os.path.join('a', 'foo'): 'Content',
914 'b': 'More content',
915 'c': 'Even more content!',
916 }
917
918 archive = (
919 # ar file header
920 '!<arch>\n'
921 # File 1 -------------------------
922 # (16 bytes) filename len
923 '#1/5 '
924 # file metadata
925 '1447140471 1000 1000 100640 '
926 # (10 bytes) Data size
927 '12 '
928 # (2 bytes) File magic
929 '\x60\n'
930 # (5 bytes) File name
931 'a/foo'
932 # (7 bytes) File data
933 'Content'
934 # File 2 -------------------------
935 # (16 bytes) filename
936 'b '
937 # file metadata
938 '1447140471 1000 1000 100640 '
939 # (12 bytes) Data size
940 '12 '
941 # (2 bytes) File magic
942 '\x60\n'
943 # (12 bytes) File data
944 'More content'
945 '')
946
947 isolated = {
948 'command': ['Absurb', 'command'],
949 'relative_cwd': 'a',
950 'files': {
951 'archive1': {
952 'h': isolateserver_mock.hash_content(archive),
953 's': len(archive),
954 't': 'smallfiles-archive',
955 },
956 'c': {
957 'h': isolateserver_mock.hash_content(files['c']),
958 's': len(files['c']),
959 },
960 },
961 'version': isolated_format.ISOLATED_FILE_VERSION,
962 }
963 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
964 isolated_hash = isolateserver_mock.hash_content(isolated_data)
965 requests = [
966 (isolated['files']['archive1']['h'], archive),
967 (isolated['files']['c']['h'], files['c']),
968 ]
969 requests.append((isolated_hash, isolated_data))
970 requests = [
971 (
972 server + '/_ah/api/isolateservice/v1/retrieve',
973 {
974 'data': {
975 'digest': h.encode('utf-8'),
976 'namespace': {
977 'namespace': 'default-gzip',
978 'digest_hash': 'sha-1',
979 'compression': 'flate',
980 },
981 'offset': 0,
982 },
983 'read_timeout': 60,
984 },
985 {'content': base64.b64encode(zlib.compress(v))},
986 ) for h, v in requests
987 ]
988 cmd = [
989 'download',
990 '--isolate-server', server,
991 '--target', self.tempdir,
992 '--isolated', isolated_hash,
993 ]
994 self.expected_requests(requests)
995 self.assertEqual(0, isolateserver.main(cmd))
996 expected = dict(
997 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
998 self.assertEqual(expected, actual)
999 expected_stdout = (
1000 'To run this test please run from the directory %s:\n Absurb command\n'
1001 % os.path.join(self.tempdir, 'a'))
1002 self.checkOutput(expected_stdout, '')
1003
899 1004
1005
900 def get_storage(_isolate_server, namespace): 1006 def get_storage(_isolate_server, namespace):
901 class StorageFake(object): 1007 class StorageFake(object):
902 def __enter__(self, *_): 1008 def __enter__(self, *_):
903 return self 1009 return self
904 1010
905 def __exit__(self, *_): 1011 def __exit__(self, *_):
906 pass 1012 pass
907 1013
908 @property 1014 @property
909 def hash_algo(self): # pylint: disable=R0201 1015 def hash_algo(self): # pylint: disable=R0201
(...skipping 171 matching lines...) Expand 10 before | Expand all | Expand 10 after
1081 1187
1082 1188
1083 if __name__ == '__main__': 1189 if __name__ == '__main__':
1084 fix_encoding.fix_encoding() 1190 fix_encoding.fix_encoding()
1085 if '-v' in sys.argv: 1191 if '-v' in sys.argv:
1086 unittest.TestCase.maxDiff = None 1192 unittest.TestCase.maxDiff = None
1087 logging.basicConfig( 1193 logging.basicConfig(
1088 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL)) 1194 level=(logging.DEBUG if '-v' in sys.argv else logging.CRITICAL))
1089 clear_env_vars() 1195 clear_env_vars()
1090 unittest.main() 1196 unittest.main()
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698