OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
3 # Use of this source code is governed by a BSD-style license that can be | |
4 # found in the LICENSE file. | |
5 | |
6 import hashlib | |
7 import json | |
8 import logging | |
9 import os | |
10 import shutil | |
11 import subprocess | |
12 import sys | |
13 import unittest | |
14 | |
15 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) | |
16 sys.path.insert(0, ROOT_DIR) | |
17 | |
18 import isolateserver | |
19 import run_isolated | |
20 | |
21 VERBOSE = False | |
22 | |
23 ALGO = hashlib.sha1 | |
24 | |
25 | |
26 class CalledProcessError(subprocess.CalledProcessError): | |
27 """Makes 2.6 version act like 2.7""" | |
28 def __init__(self, returncode, cmd, output, stderr, cwd): | |
29 super(CalledProcessError, self).__init__(returncode, cmd) | |
30 self.output = output | |
31 self.stderr = stderr | |
32 self.cwd = cwd | |
33 | |
34 def __str__(self): | |
35 return super(CalledProcessError, self).__str__() + ( | |
36 '\n' | |
37 'cwd=%s\n%s\n%s\n%s') % ( | |
38 self.cwd, | |
39 self.output, | |
40 self.stderr, | |
41 ' '.join(self.cmd)) | |
42 | |
43 | |
44 def list_files_tree(directory): | |
45 """Returns the list of all the files in a tree.""" | |
46 actual = [] | |
47 for root, _dirs, files in os.walk(directory): | |
48 actual.extend(os.path.join(root, f)[len(directory)+1:] for f in files) | |
49 return sorted(actual) | |
50 | |
51 | |
52 def write_content(filepath, content): | |
53 with open(filepath, 'wb') as f: | |
54 f.write(content) | |
55 | |
56 | |
57 def write_json(filepath, data): | |
58 with open(filepath, 'wb') as f: | |
59 json.dump(data, f, sort_keys=True, indent=2) | |
60 | |
61 | |
62 class RunSwarmStep(unittest.TestCase): | |
63 def setUp(self): | |
64 self.tempdir = run_isolated.make_temp_dir( | |
65 'run_isolated_smoke_test', ROOT_DIR) | |
66 logging.debug(self.tempdir) | |
67 # run_isolated.zip executable package. | |
68 self.run_isolated_zip = os.path.join(self.tempdir, 'run_isolated.zip') | |
69 run_isolated.get_as_zip_package().zip_into_file( | |
70 self.run_isolated_zip, compress=False) | |
71 # The "source" hash table. | |
72 self.table = os.path.join(self.tempdir, 'table') | |
73 os.mkdir(self.table) | |
74 # The slave-side cache. | |
75 self.cache = os.path.join(self.tempdir, 'cache') | |
76 | |
77 self.data_dir = os.path.join(ROOT_DIR, 'tests', 'run_isolated') | |
78 | |
79 def tearDown(self): | |
80 shutil.rmtree(self.tempdir) | |
81 | |
82 def _result_tree(self): | |
83 return list_files_tree(self.tempdir) | |
84 | |
85 def _run(self, args): | |
86 cmd = [sys.executable, self.run_isolated_zip] | |
87 cmd.extend(args) | |
88 if VERBOSE: | |
89 cmd.extend(['-v'] * 2) | |
90 pipe = None | |
91 else: | |
92 pipe = subprocess.PIPE | |
93 logging.debug(' '.join(cmd)) | |
94 proc = subprocess.Popen( | |
95 cmd, | |
96 stdout=pipe, | |
97 stderr=pipe, | |
98 universal_newlines=True, | |
99 cwd=self.tempdir) | |
100 out, err = proc.communicate() | |
101 return out, err, proc.returncode | |
102 | |
103 def _store_result(self, result_data): | |
104 """Stores a .isolated file in the hash table.""" | |
105 result_text = json.dumps(result_data, sort_keys=True, indent=2) | |
106 result_hash = ALGO(result_text).hexdigest() | |
107 write_content(os.path.join(self.table, result_hash), result_text) | |
108 return result_hash | |
109 | |
110 def _store(self, filename): | |
111 """Stores a test data file in the table. | |
112 | |
113 Returns its sha-1 hash. | |
114 """ | |
115 filepath = os.path.join(self.data_dir, filename) | |
116 h = isolateserver.hash_file(filepath, ALGO) | |
117 shutil.copyfile(filepath, os.path.join(self.table, h)) | |
118 return h | |
119 | |
120 def _generate_args_with_isolated(self, isolated): | |
121 """Generates the standard arguments used with isolated as the isolated file. | |
122 | |
123 Returns a list of the required arguments. | |
124 """ | |
125 return [ | |
126 '--isolated', isolated, | |
127 '--cache', self.cache, | |
128 '--isolate-server', self.table, | |
129 '--namespace', 'default', | |
130 ] | |
131 | |
132 def _generate_args_with_hash(self, hash_value): | |
133 """Generates the standard arguments used with |hash_value| as the hash. | |
134 | |
135 Returns a list of the required arguments. | |
136 """ | |
137 return [ | |
138 '--hash', hash_value, | |
139 '--cache', self.cache, | |
140 '--isolate-server', self.table, | |
141 '--namespace', 'default', | |
142 ] | |
143 | |
144 def test_result(self): | |
145 # Loads an arbitrary .isolated on the file system. | |
146 isolated = os.path.join(self.data_dir, 'repeated_files.isolated') | |
147 expected = [ | |
148 'state.json', | |
149 self._store('file1.txt'), | |
150 self._store('file1_copy.txt'), | |
151 self._store('repeated_files.py'), | |
152 isolateserver.hash_file(isolated, ALGO), | |
153 ] | |
154 out, err, returncode = self._run( | |
155 self._generate_args_with_isolated(isolated)) | |
156 if not VERBOSE: | |
157 self.assertEqual('Success\n', out, (out, err)) | |
158 self.assertEqual(0, returncode) | |
159 actual = list_files_tree(self.cache) | |
160 self.assertEqual(sorted(set(expected)), actual) | |
161 | |
162 def test_hash(self): | |
163 # Loads the .isolated from the store as a hash. | |
164 result_hash = self._store('repeated_files.isolated') | |
165 expected = [ | |
166 'state.json', | |
167 self._store('file1.txt'), | |
168 self._store('file1_copy.txt'), | |
169 self._store('repeated_files.py'), | |
170 result_hash, | |
171 ] | |
172 | |
173 out, err, returncode = self._run(self._generate_args_with_hash(result_hash)) | |
174 if not VERBOSE: | |
175 self.assertEqual('', err) | |
176 self.assertEqual('Success\n', out, out) | |
177 self.assertEqual(0, returncode) | |
178 actual = list_files_tree(self.cache) | |
179 self.assertEqual(sorted(set(expected)), actual) | |
180 | |
181 def test_fail_empty_isolated(self): | |
182 result_hash = self._store_result({}) | |
183 expected = [ | |
184 'state.json', | |
185 result_hash, | |
186 ] | |
187 out, err, returncode = self._run(self._generate_args_with_hash(result_hash)) | |
188 if not VERBOSE: | |
189 self.assertEqual('', out) | |
190 self.assertIn('No command to run\n', err) | |
191 self.assertEqual(1, returncode) | |
192 actual = list_files_tree(self.cache) | |
193 self.assertEqual(sorted(expected), actual) | |
194 | |
195 def test_includes(self): | |
196 # Loads an .isolated that includes another one. | |
197 | |
198 # References manifest2.isolated and repeated_files.isolated. Maps file3.txt | |
199 # as file2.txt. | |
200 result_hash = self._store('check_files.isolated') | |
201 expected = [ | |
202 'state.json', | |
203 self._store('check_files.py'), | |
204 self._store('file1.txt'), | |
205 self._store('file3.txt'), | |
206 # Maps file1.txt. | |
207 self._store('manifest1.isolated'), | |
208 # References manifest1.isolated. Maps file2.txt but it is overriden. | |
209 self._store('manifest2.isolated'), | |
210 result_hash, | |
211 self._store('repeated_files.py'), | |
212 self._store('repeated_files.isolated'), | |
213 ] | |
214 out, err, returncode = self._run(self._generate_args_with_hash(result_hash)) | |
215 if not VERBOSE: | |
216 self.assertEqual('', err) | |
217 self.assertEqual('Success\n', out) | |
218 self.assertEqual(0, returncode) | |
219 actual = list_files_tree(self.cache) | |
220 self.assertEqual(sorted(expected), actual) | |
221 | |
222 def test_link_all_hash_instances(self): | |
223 # Load an isolated file with the same file (same sha-1 hash), listed under | |
224 # two different names and ensure both are created. | |
225 result_hash = self._store('repeated_files.isolated') | |
226 expected = [ | |
227 'state.json', | |
228 result_hash, | |
229 self._store('file1.txt'), | |
230 self._store('repeated_files.py') | |
231 ] | |
232 | |
233 out, err, returncode = self._run(self._generate_args_with_hash(result_hash)) | |
234 if not VERBOSE: | |
235 self.assertEqual('', err) | |
236 self.assertEqual('Success\n', out) | |
237 self.assertEqual(0, returncode) | |
238 actual = list_files_tree(self.cache) | |
239 self.assertEqual(sorted(expected), actual) | |
240 | |
241 def test_delete_invalid_cache_entry(self): | |
242 isolated_file = os.path.join(self.data_dir, 'file_with_size.isolated') | |
243 file1_hash = self._store('file1.txt') | |
244 | |
245 # Run the test once to generate the cache. | |
246 out, err, returncode = self._run(self._generate_args_with_isolated( | |
247 isolated_file)) | |
248 if VERBOSE: | |
249 print out | |
250 print err | |
251 self.assertEqual(0, returncode) | |
252 | |
253 # Modify one of the files in the cache to be invalid. | |
254 cached_file_path = os.path.join(self.cache, file1_hash) | |
255 with open(cached_file_path, 'w') as f: | |
256 f.write('invalid size') | |
257 logging.info('Modified %s', cached_file_path) | |
258 # Ensure that the cache has an invalid file. | |
259 self.assertNotEqual( | |
260 os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size, | |
261 os.stat(cached_file_path).st_size) | |
262 | |
263 # Rerun the test and make sure the cache contains the right file afterwards. | |
264 out, err, returncode = self._run(self._generate_args_with_isolated( | |
265 isolated_file)) | |
266 if VERBOSE: | |
267 print out | |
268 print err | |
269 self.assertEqual(0, returncode) | |
270 | |
271 self.assertEqual(os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size, | |
272 os.stat(cached_file_path).st_size) | |
273 | |
274 | |
275 if __name__ == '__main__': | |
276 VERBOSE = '-v' in sys.argv | |
277 logging.basicConfig(level=logging.DEBUG if VERBOSE else logging.ERROR) | |
278 unittest.main() | |
OLD | NEW |