Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(563)

Side by Side Diff: swarm_client/tests/isolateserver_test.py

Issue 69143004: Delete swarm_client. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/tools/
Patch Set: Created 7 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « swarm_client/tests/isolateserver_smoke_test.py ('k') | swarm_client/tests/lru_test.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 #!/usr/bin/env python
2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
5
6 # pylint: disable=W0212
7 # pylint: disable=W0223
8 # pylint: disable=W0231
9
10 import hashlib
11 import json
12 import logging
13 import os
14 import shutil
15 import StringIO
16 import sys
17 import tempfile
18 import threading
19 import unittest
20 import urllib
21 import zlib
22
23 BASE_PATH = os.path.dirname(os.path.abspath(__file__))
24 ROOT_DIR = os.path.dirname(BASE_PATH)
25 sys.path.insert(0, ROOT_DIR)
26
27 import auto_stub
28 import isolateserver
29
30 from utils import threading_utils
31
32
33 ALGO = hashlib.sha1
34
35
36 class TestCase(auto_stub.TestCase):
37 def setUp(self):
38 super(TestCase, self).setUp()
39 self.mock(isolateserver.net, 'url_open', self._url_open)
40 self.mock(isolateserver.net, 'sleep_before_retry', lambda *_: None)
41 self._lock = threading.Lock()
42 self._requests = []
43
44 def tearDown(self):
45 try:
46 self.assertEqual([], self._requests)
47 finally:
48 super(TestCase, self).tearDown()
49
50 def _url_open(self, url, **kwargs):
51 logging.warn('url_open(%s, %s)', url[:500], str(kwargs)[:500])
52 with self._lock:
53 if not self._requests:
54 return None
55 # Ignore 'stream' argument, it's not important for these tests.
56 kwargs.pop('stream', None)
57 for i, n in enumerate(self._requests):
58 if n[0] == url:
59 _, expected_kwargs, result = self._requests.pop(i)
60 self.assertEqual(expected_kwargs, kwargs)
61 if result is not None:
62 return isolateserver.net.HttpResponse.get_fake_response(result, url)
63 return None
64 self.fail('Unknown request %s' % url)
65
66
67 class TestZipCompression(TestCase):
68 """Test zip_compress and zip_decompress generators."""
69
70 def test_compress_and_decompress(self):
71 """Test data === decompress(compress(data))."""
72 original = [str(x) for x in xrange(0, 1000)]
73 processed = isolateserver.zip_decompress(
74 isolateserver.zip_compress(original))
75 self.assertEqual(''.join(original), ''.join(processed))
76
77 def test_zip_bomb(self):
78 """Verify zip_decompress always returns small chunks."""
79 original = '\x00' * 100000
80 bomb = ''.join(isolateserver.zip_compress(original))
81 decompressed = []
82 chunk_size = 1000
83 for chunk in isolateserver.zip_decompress([bomb], chunk_size):
84 self.assertLessEqual(len(chunk), chunk_size)
85 decompressed.append(chunk)
86 self.assertEqual(original, ''.join(decompressed))
87
88 def test_bad_zip_file(self):
89 """Verify decompressing broken file raises IOError."""
90 with self.assertRaises(IOError):
91 ''.join(isolateserver.zip_decompress(['Im not a zip file']))
92
93
94 class FakeItem(isolateserver.Item):
95 def __init__(self, data, is_isolated=False):
96 super(FakeItem, self).__init__(
97 ALGO(data).hexdigest(), len(data), is_isolated)
98 self.data = data
99
100 def content(self, _chunk_size):
101 return [self.data]
102
103 @property
104 def zipped(self):
105 return zlib.compress(self.data, self.compression_level)
106
107
108 class StorageTest(TestCase):
109 """Tests for Storage methods."""
110
111 @staticmethod
112 def mock_push(side_effect=None):
113 """Returns StorageApi subclass with mocked 'push' method."""
114 class MockedStorageApi(isolateserver.StorageApi):
115 def __init__(self):
116 self.pushed = []
117 def push(self, item, content):
118 self.pushed.append((item, ''.join(content)))
119 if side_effect:
120 side_effect()
121 return MockedStorageApi()
122
123 def assertEqualIgnoringOrder(self, a, b):
124 """Asserts that containers |a| and |b| contain same items."""
125 self.assertEqual(len(a), len(b))
126 self.assertEqual(set(a), set(b))
127
128 def test_batch_items_for_check(self):
129 items = [
130 isolateserver.Item('foo', 12),
131 isolateserver.Item('blow', 0),
132 isolateserver.Item('bizz', 1222),
133 isolateserver.Item('buzz', 1223),
134 ]
135 expected = [
136 [items[3], items[2], items[0], items[1]],
137 ]
138 batches = list(isolateserver.Storage.batch_items_for_check(items))
139 self.assertEqual(batches, expected)
140
141 def test_get_missing_items(self):
142 items = [
143 isolateserver.Item('foo', 12),
144 isolateserver.Item('blow', 0),
145 isolateserver.Item('bizz', 1222),
146 isolateserver.Item('buzz', 1223),
147 ]
148 missing = [
149 [items[2], items[3]],
150 ]
151
152 class MockedStorageApi(isolateserver.StorageApi):
153 def contains(self, _items):
154 return missing
155 storage = isolateserver.Storage(MockedStorageApi(), use_zip=False)
156
157 # 'get_missing_items' is a generator, materialize its result in a list.
158 result = list(storage.get_missing_items(items))
159 self.assertEqual(missing, result)
160
161 def test_async_push(self):
162 for use_zip in (False, True):
163 item = FakeItem('1234567')
164 storage_api = self.mock_push()
165 storage = isolateserver.Storage(storage_api, use_zip)
166 channel = threading_utils.TaskChannel()
167 storage.async_push(channel, 0, item)
168 # Wait for push to finish.
169 pushed_item = channel.pull()
170 self.assertEqual(item, pushed_item)
171 # StorageApi.push was called with correct arguments.
172 self.assertEqual(
173 [(item, item.zipped if use_zip else item.data)], storage_api.pushed)
174
175 def test_async_push_generator_errors(self):
176 class FakeException(Exception):
177 pass
178
179 def faulty_generator(_chunk_size):
180 yield 'Hi!'
181 raise FakeException('fake exception')
182
183 for use_zip in (False, True):
184 item = FakeItem('')
185 self.mock(item, 'content', faulty_generator)
186 storage_api = self.mock_push()
187 storage = isolateserver.Storage(storage_api, use_zip)
188 channel = threading_utils.TaskChannel()
189 storage.async_push(channel, 0, item)
190 with self.assertRaises(FakeException):
191 channel.pull()
192 # StorageApi's push should never complete when data can not be read.
193 self.assertEqual(0, len(storage_api.pushed))
194
195 def test_async_push_upload_errors(self):
196 chunk = 'data_chunk'
197
198 def _generator(_chunk_size):
199 yield chunk
200
201 def push_side_effect():
202 raise IOError('Nope')
203
204 # TODO(vadimsh): Retrying push when fetching data from a generator is
205 # broken now (it reuses same generator instance when retrying).
206 content_sources = (
207 # generator(),
208 lambda _chunk_size: [chunk],
209 )
210
211 for use_zip in (False, True):
212 for source in content_sources:
213 item = FakeItem(chunk)
214 self.mock(item, 'content', source)
215 storage_api = self.mock_push(push_side_effect)
216 storage = isolateserver.Storage(storage_api, use_zip)
217 channel = threading_utils.TaskChannel()
218 storage.async_push(channel, 0, item)
219 with self.assertRaises(IOError):
220 channel.pull()
221 # First initial attempt + all retries.
222 attempts = 1 + isolateserver.WorkerPool.RETRIES
223 # Single push attempt parameters.
224 expected_push = (item, item.zipped if use_zip else item.data)
225 # Ensure all pushes are attempted.
226 self.assertEqual(
227 [expected_push] * attempts, storage_api.pushed)
228
229 def test_upload_tree(self):
230 root = 'root'
231 files = {
232 'a': {
233 's': 100,
234 'h': 'hash_a',
235 },
236 'b': {
237 's': 200,
238 'h': 'hash_b',
239 },
240 'c': {
241 's': 300,
242 'h': 'hash_c',
243 },
244 'a_copy': {
245 's': 100,
246 'h': 'hash_a',
247 },
248 }
249 files_data = dict((k, 'x' * files[k]['s']) for k in files)
250 all_hashes = set(f['h'] for f in files.itervalues())
251 missing_hashes = set(['hash_a', 'hash_b'])
252
253 # Files read by mocked_file_read.
254 read_calls = []
255 # 'contains' calls.
256 contains_calls = []
257 # 'push' calls.
258 push_calls = []
259
260 def mocked_file_read(filepath, _chunk_size=0):
261 self.assertEqual(root, os.path.dirname(filepath))
262 filename = os.path.basename(filepath)
263 self.assertIn(filename, files_data)
264 read_calls.append(filename)
265 return files_data[filename]
266 self.mock(isolateserver, 'file_read', mocked_file_read)
267
268 class MockedStorageApi(isolateserver.StorageApi):
269 def contains(self, items):
270 contains_calls.append(items)
271 return [i for i in items
272 if os.path.basename(i.digest) in missing_hashes]
273
274 def push(self, item, content):
275 push_calls.append((item, ''.join(content)))
276
277 storage_api = MockedStorageApi()
278 storage = isolateserver.Storage(storage_api, use_zip=False)
279 storage.upload_tree(root, files)
280
281 # Was reading only missing files.
282 self.assertEqualIgnoringOrder(
283 missing_hashes,
284 [files[path]['h'] for path in read_calls])
285 # 'contains' checked for existence of all files.
286 self.assertEqualIgnoringOrder(
287 all_hashes,
288 [i.digest for i in sum(contains_calls, [])])
289 # Pushed only missing files.
290 self.assertEqualIgnoringOrder(
291 missing_hashes,
292 [call[0].digest for call in push_calls])
293 # Pushing with correct data, size and push urls.
294 for pushed_item, pushed_content in push_calls:
295 filenames = [
296 name for name, metadata in files.iteritems()
297 if metadata['h'] == pushed_item.digest
298 ]
299 # If there are multiple files that map to same hash, upload_tree chooses
300 # a first one.
301 filename = filenames[0]
302 self.assertEqual(os.path.join(root, filename), pushed_item.path)
303 self.assertEqual(files_data[filename], pushed_content)
304
305
306 class IsolateServerStorageApiTest(TestCase):
307 @staticmethod
308 def mock_handshake_request(server, token='fake token', error=None):
309 handshake_request = {
310 'client_app_version': isolateserver.__version__,
311 'fetcher': True,
312 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
313 'pusher': True,
314 }
315 handshake_response = {
316 'access_token': token,
317 'error': error,
318 'protocol_version': isolateserver.ISOLATE_PROTOCOL_VERSION,
319 'server_app_version': 'mocked server T1000',
320 }
321 return (
322 server + '/content-gs/handshake',
323 {
324 'content_type': 'application/json',
325 'method': 'POST',
326 'data': json.dumps(handshake_request, separators=(',', ':')),
327 },
328 json.dumps(handshake_response),
329 )
330
331 @staticmethod
332 def mock_fetch_request(server, namespace, item, data):
333 return (
334 server + '/content-gs/retrieve/%s/%s' % (namespace, item),
335 {'retry_404': True, 'read_timeout': 60},
336 data,
337 )
338
339 @staticmethod
340 def mock_contains_request(server, namespace, token, request, response):
341 url = server + '/content-gs/pre-upload/%s?token=%s' % (
342 namespace, urllib.quote(token))
343 return (
344 url,
345 {
346 'data': json.dumps(request, separators=(',', ':')),
347 'content_type': 'application/json',
348 'method': 'POST',
349 },
350 json.dumps(response),
351 )
352
353 def test_server_capabilities_success(self):
354 server = 'http://example.com'
355 namespace = 'default'
356 access_token = 'fake token'
357 self._requests = [
358 self.mock_handshake_request(server, access_token),
359 ]
360 storage = isolateserver.IsolateServer(server, namespace)
361 caps = storage._server_capabilities
362 self.assertEqual(access_token, caps['access_token'])
363
364 def test_server_capabilities_network_failure(self):
365 self.mock(isolateserver.net, 'url_open', lambda *_args, **_kwargs: None)
366 with self.assertRaises(isolateserver.MappingError):
367 storage = isolateserver.IsolateServer('http://example.com', 'default')
368 _ = storage._server_capabilities
369
370 def test_server_capabilities_format_failure(self):
371 server = 'http://example.com'
372 namespace = 'default'
373 handshake_req = self.mock_handshake_request(server)
374 self._requests = [
375 (handshake_req[0], handshake_req[1], 'Im a bad response'),
376 ]
377 storage = isolateserver.IsolateServer(server, namespace)
378 with self.assertRaises(isolateserver.MappingError):
379 _ = storage._server_capabilities
380
381 def test_server_capabilities_respects_error(self):
382 server = 'http://example.com'
383 namespace = 'default'
384 error = 'Im sorry, Dave. Im afraid I cant do that.'
385 self._requests = [
386 self.mock_handshake_request(server, error=error)
387 ]
388 storage = isolateserver.IsolateServer(server, namespace)
389 with self.assertRaises(isolateserver.MappingError) as context:
390 _ = storage._server_capabilities
391 # Server error message should be reported to user.
392 self.assertIn(error, str(context.exception))
393
394 def test_fetch_success(self):
395 server = 'http://example.com'
396 namespace = 'default'
397 data = ''.join(str(x) for x in xrange(1000))
398 item = ALGO(data).hexdigest()
399 self._requests = [
400 self.mock_fetch_request(server, namespace, item, data),
401 ]
402 storage = isolateserver.IsolateServer(server, namespace)
403 fetched = ''.join(storage.fetch(item))
404 self.assertEqual(data, fetched)
405
406 def test_fetch_failure(self):
407 server = 'http://example.com'
408 namespace = 'default'
409 item = ALGO('something').hexdigest()
410 self._requests = [
411 self.mock_fetch_request(server, namespace, item, None),
412 ]
413 storage = isolateserver.IsolateServer(server, namespace)
414 with self.assertRaises(IOError):
415 _ = ''.join(storage.fetch(item))
416
417 def test_push_success(self):
418 server = 'http://example.com'
419 namespace = 'default'
420 token = 'fake token'
421 data = ''.join(str(x) for x in xrange(1000))
422 item = FakeItem(data)
423 push_urls = (server + '/push_here', server + '/call_this')
424 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
425 contains_response = [push_urls]
426 self._requests = [
427 self.mock_handshake_request(server, token),
428 self.mock_contains_request(
429 server, namespace, token, contains_request, contains_response),
430 (
431 push_urls[0],
432 {
433 'data': data,
434 'content_type': 'application/octet-stream',
435 'method': 'PUT',
436 },
437 ''
438 ),
439 (
440 push_urls[1],
441 {
442 'data': '',
443 'content_type': 'application/json',
444 'method': 'POST',
445 },
446 ''
447 ),
448 ]
449 storage = isolateserver.IsolateServer(server, namespace)
450 missing = storage.contains([item])
451 self.assertEqual([item], missing)
452 storage.push(item, [data])
453 self.assertTrue(item.push_state.uploaded)
454 self.assertTrue(item.push_state.finalized)
455
456 def test_push_failure_upload(self):
457 server = 'http://example.com'
458 namespace = 'default'
459 token = 'fake token'
460 data = ''.join(str(x) for x in xrange(1000))
461 item = FakeItem(data)
462 push_urls = (server + '/push_here', server + '/call_this')
463 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
464 contains_response = [push_urls]
465 self._requests = [
466 self.mock_handshake_request(server, token),
467 self.mock_contains_request(
468 server, namespace, token, contains_request, contains_response),
469 (
470 push_urls[0],
471 {
472 'data': data,
473 'content_type': 'application/octet-stream',
474 'method': 'PUT',
475 },
476 None
477 ),
478 ]
479 storage = isolateserver.IsolateServer(server, namespace)
480 missing = storage.contains([item])
481 self.assertEqual([item], missing)
482 with self.assertRaises(IOError):
483 storage.push(item, [data])
484 self.assertFalse(item.push_state.uploaded)
485 self.assertFalse(item.push_state.finalized)
486
487 def test_push_failure_finalize(self):
488 server = 'http://example.com'
489 namespace = 'default'
490 token = 'fake token'
491 data = ''.join(str(x) for x in xrange(1000))
492 item = FakeItem(data)
493 push_urls = (server + '/push_here', server + '/call_this')
494 contains_request = [{'h': item.digest, 's': item.size, 'i': 0}]
495 contains_response = [push_urls]
496 self._requests = [
497 self.mock_handshake_request(server, token),
498 self.mock_contains_request(
499 server, namespace, token, contains_request, contains_response),
500 (
501 push_urls[0],
502 {
503 'data': data,
504 'content_type': 'application/octet-stream',
505 'method': 'PUT',
506 },
507 ''
508 ),
509 (
510 push_urls[1],
511 {
512 'data': '',
513 'content_type': 'application/json',
514 'method': 'POST',
515 },
516 None
517 ),
518 ]
519 storage = isolateserver.IsolateServer(server, namespace)
520 missing = storage.contains([item])
521 self.assertEqual([item], missing)
522 with self.assertRaises(IOError):
523 storage.push(item, [data])
524 self.assertTrue(item.push_state.uploaded)
525 self.assertFalse(item.push_state.finalized)
526
527 def test_contains_success(self):
528 server = 'http://example.com'
529 namespace = 'default'
530 token = 'fake token'
531 files = [
532 FakeItem('1', is_isolated=True),
533 FakeItem('2' * 100),
534 FakeItem('3' * 200),
535 ]
536 request = [
537 {'h': files[0].digest, 's': files[0].size, 'i': 1},
538 {'h': files[1].digest, 's': files[1].size, 'i': 0},
539 {'h': files[2].digest, 's': files[2].size, 'i': 0},
540 ]
541 response = [
542 None,
543 ['http://example/upload_here_1', None],
544 ['http://example/upload_here_2', 'http://example/call_this'],
545 ]
546 missing = [
547 files[1],
548 files[2],
549 ]
550 self._requests = [
551 self.mock_handshake_request(server, token),
552 self.mock_contains_request(server, namespace, token, request, response),
553 ]
554 storage = isolateserver.IsolateServer(server, namespace)
555 result = storage.contains(files)
556 self.assertEqual(missing, result)
557 self.assertEqual(
558 [x for x in response if x],
559 [[i.push_state.upload_url, i.push_state.finalize_url] for i in missing])
560
561 def test_contains_network_failure(self):
562 server = 'http://example.com'
563 namespace = 'default'
564 token = 'fake token'
565 req = self.mock_contains_request(server, namespace, token, [], [])
566 self._requests = [
567 self.mock_handshake_request(server, token),
568 (req[0], req[1], None),
569 ]
570 storage = isolateserver.IsolateServer(server, namespace)
571 with self.assertRaises(isolateserver.MappingError):
572 storage.contains([])
573
574 def test_contains_format_failure(self):
575 server = 'http://example.com'
576 namespace = 'default'
577 token = 'fake token'
578 self._requests = [
579 self.mock_handshake_request(server, token),
580 self.mock_contains_request(server, namespace, token, [], [1, 2, 3])
581 ]
582 storage = isolateserver.IsolateServer(server, namespace)
583 with self.assertRaises(isolateserver.MappingError):
584 storage.contains([])
585
586
587 class IsolateServerDownloadTest(TestCase):
588 tempdir = None
589
590 def tearDown(self):
591 try:
592 if self.tempdir:
593 shutil.rmtree(self.tempdir)
594 finally:
595 super(IsolateServerDownloadTest, self).tearDown()
596
597 def test_download_two_files(self):
598 # Test downloading two files.
599 actual = {}
600 def out(key, generator):
601 actual[key] = ''.join(generator)
602 self.mock(isolateserver, 'file_write', out)
603 server = 'http://example.com'
604 self._requests = [
605 (
606 server + '/content-gs/retrieve/default-gzip/sha-1',
607 {'read_timeout': 60, 'retry_404': True},
608 zlib.compress('Coucou'),
609 ),
610 (
611 server + '/content-gs/retrieve/default-gzip/sha-2',
612 {'read_timeout': 60, 'retry_404': True},
613 zlib.compress('Bye Bye'),
614 ),
615 ]
616 cmd = [
617 'download',
618 '--isolate-server', server,
619 '--target', ROOT_DIR,
620 '--file', 'sha-1', 'path/to/a',
621 '--file', 'sha-2', 'path/to/b',
622 ]
623 self.assertEqual(0, isolateserver.main(cmd))
624 expected = {
625 os.path.join(ROOT_DIR, 'path/to/a'): 'Coucou',
626 os.path.join(ROOT_DIR, 'path/to/b'): 'Bye Bye',
627 }
628 self.assertEqual(expected, actual)
629
630 def test_download_isolated(self):
631 # Test downloading an isolated tree.
632 self.tempdir = tempfile.mkdtemp(prefix='isolateserver')
633 actual = {}
634 def file_write_mock(key, generator):
635 actual[key] = ''.join(generator)
636 self.mock(isolateserver, 'file_write', file_write_mock)
637 self.mock(os, 'makedirs', lambda _: None)
638 stdout = StringIO.StringIO()
639 self.mock(sys, 'stdout', stdout)
640 server = 'http://example.com'
641
642 files = {
643 'a/foo': 'Content',
644 'b': 'More content',
645 }
646 isolated = {
647 'command': ['Absurb', 'command'],
648 'relative_cwd': 'a',
649 'files': dict(
650 (k, {'h': ALGO(v).hexdigest(), 's': len(v)})
651 for k, v in files.iteritems()),
652 }
653 isolated_data = json.dumps(isolated, sort_keys=True, separators=(',',':'))
654 isolated_hash = ALGO(isolated_data).hexdigest()
655 requests = [(v['h'], files[k]) for k, v in isolated['files'].iteritems()]
656 requests.append((isolated_hash, isolated_data))
657 self._requests = [
658 (
659 server + '/content-gs/retrieve/default-gzip/' + h,
660 {
661 'read_timeout': isolateserver.DOWNLOAD_READ_TIMEOUT,
662 'retry_404': True,
663 },
664 zlib.compress(v),
665 ) for h, v in requests
666 ]
667 cmd = [
668 'download',
669 '--isolate-server', server,
670 '--target', self.tempdir,
671 '--isolated', isolated_hash,
672 ]
673 self.assertEqual(0, isolateserver.main(cmd))
674 expected = dict(
675 (os.path.join(self.tempdir, k), v) for k, v in files.iteritems())
676 self.assertEqual(expected, actual)
677 expected_stdout = (
678 'To run this test please run from the directory %s:\n Absurb command\n'
679 % os.path.join(self.tempdir, 'a'))
680 self.assertEqual(expected_stdout, stdout.getvalue())
681
682
683 class TestIsolated(unittest.TestCase):
684 def test_load_isolated_empty(self):
685 m = isolateserver.load_isolated('{}', None, ALGO)
686 self.assertEqual({}, m)
687
688 def test_load_isolated_good(self):
689 data = {
690 u'command': [u'foo', u'bar'],
691 u'files': {
692 u'a': {
693 u'l': u'somewhere',
694 },
695 u'b': {
696 u'm': 123,
697 u'h': u'0123456789abcdef0123456789abcdef01234567',
698 u's': 3,
699 }
700 },
701 u'includes': [u'0123456789abcdef0123456789abcdef01234567'],
702 u'os': 'oPhone',
703 u'read_only': False,
704 u'relative_cwd': u'somewhere_else'
705 }
706 m = isolateserver.load_isolated(json.dumps(data), None, ALGO)
707 self.assertEqual(data, m)
708
709 def test_load_isolated_bad(self):
710 data = {
711 u'files': {
712 u'a': {
713 u'l': u'somewhere',
714 u'h': u'0123456789abcdef0123456789abcdef01234567'
715 }
716 },
717 }
718 try:
719 isolateserver.load_isolated(json.dumps(data), None, ALGO)
720 self.fail()
721 except isolateserver.ConfigError:
722 pass
723
724 def test_load_isolated_os_only(self):
725 data = {
726 u'os': 'HP/UX',
727 }
728 m = isolateserver.load_isolated(json.dumps(data), 'HP/UX', ALGO)
729 self.assertEqual(data, m)
730
731 def test_load_isolated_os_bad(self):
732 data = {
733 u'os': 'foo',
734 }
735 try:
736 isolateserver.load_isolated(json.dumps(data), 'AS/400', ALGO)
737 self.fail()
738 except isolateserver.ConfigError:
739 pass
740
741 def test_load_isolated_path(self):
742 # Automatically convert the path case.
743 wrong_path_sep = u'\\' if os.path.sep == '/' else u'/'
744 def gen_data(path_sep):
745 return {
746 u'command': [u'foo', u'bar'],
747 u'files': {
748 path_sep.join(('a', 'b')): {
749 u'l': path_sep.join(('..', 'somewhere')),
750 },
751 },
752 u'os': u'oPhone',
753 u'relative_cwd': path_sep.join(('somewhere', 'else')),
754 }
755
756 data = gen_data(wrong_path_sep)
757 actual = isolateserver.load_isolated(json.dumps(data), None, ALGO)
758 expected = gen_data(os.path.sep)
759 self.assertEqual(expected, actual)
760
761
762 if __name__ == '__main__':
763 if '-v' in sys.argv:
764 unittest.TestCase.maxDiff = None
765 logging.basicConfig(
766 level=(logging.DEBUG if '-v' in sys.argv else logging.ERROR))
767 unittest.main()
OLDNEW
« no previous file with comments | « swarm_client/tests/isolateserver_smoke_test.py ('k') | swarm_client/tests/lru_test.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698