Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 # Copyright 2013 The LUCI Authors. All rights reserved. | 2 # Copyright 2013 The LUCI Authors. All rights reserved. |
| 3 # Use of this source code is governed under the Apache License, Version 2.0 | 3 # Use of this source code is governed under the Apache License, Version 2.0 |
| 4 # that can be found in the LICENSE file. | 4 # that can be found in the LICENSE file. |
| 5 | 5 |
| 6 # pylint: disable=R0201 | 6 # pylint: disable=R0201 |
| 7 | 7 |
| 8 import StringIO | 8 import StringIO |
| 9 import base64 | 9 import base64 |
| 10 import contextlib | 10 import contextlib |
| 11 import functools | 11 import functools |
| 12 import hashlib | |
| 12 import json | 13 import json |
| 13 import logging | 14 import logging |
| 14 import os | 15 import os |
| 15 import sys | 16 import sys |
| 16 import tempfile | 17 import tempfile |
| 17 import unittest | 18 import unittest |
| 18 | 19 |
| 19 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath( | 20 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath( |
| 20 __file__.decode(sys.getfilesystemencoding())))) | 21 __file__.decode(sys.getfilesystemencoding())))) |
| 21 sys.path.insert(0, ROOT_DIR) | 22 sys.path.insert(0, ROOT_DIR) |
| 22 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party')) | 23 sys.path.insert(0, os.path.join(ROOT_DIR, 'third_party')) |
| 23 | 24 |
| 24 import cipd | 25 import cipd |
| 25 import isolated_format | 26 import isolated_format |
| 26 import isolateserver | 27 import isolateserver |
| 28 import named_cache | |
| 27 import run_isolated | 29 import run_isolated |
| 28 from depot_tools import auto_stub | 30 from depot_tools import auto_stub |
| 29 from depot_tools import fix_encoding | 31 from depot_tools import fix_encoding |
| 30 from utils import file_path | 32 from utils import file_path |
| 31 from utils import fs | 33 from utils import fs |
| 32 from utils import large | 34 from utils import large |
| 33 from utils import logging_utils | 35 from utils import logging_utils |
| 34 from utils import on_error | 36 from utils import on_error |
| 35 from utils import subprocess42 | 37 from utils import subprocess42 |
| 36 from utils import tools | 38 from utils import tools |
| 37 | 39 |
| 38 import isolateserver_mock | 40 import isolateserver_mock |
| 39 import cipdserver_mock | 41 import cipdserver_mock |
| 40 | 42 |
| 41 | 43 |
| 44 ALGO = hashlib.sha1 | |
| 45 | |
| 46 | |
| 42 def write_content(filepath, content): | 47 def write_content(filepath, content): |
| 43 with open(filepath, 'wb') as f: | 48 with open(filepath, 'wb') as f: |
| 44 f.write(content) | 49 f.write(content) |
| 45 | 50 |
| 46 | 51 |
| 47 def json_dumps(data): | 52 def json_dumps(data): |
| 48 return json.dumps(data, sort_keys=True, separators=(',', ':')) | 53 return json.dumps(data, sort_keys=True, separators=(',', ':')) |
| 49 | 54 |
| 50 | 55 |
| 56 def genTree(path): | |
| 57 """Returns a dict with {filepath: content}.""" | |
| 58 if not os.path.isdir(path): | |
| 59 return None | |
| 60 out = {} | |
| 61 for root, _, filenames in os.walk(path): | |
| 62 for filename in filenames: | |
| 63 p = os.path.join(root, filename) | |
| 64 relpath = p[len(path):].lstrip(os.sep) | |
|
nodir
2017/05/04 07:34:41
why not os.path.relpath?
M-A Ruel
2017/05/04 16:00:56
Done.
| |
| 65 with open(p, 'rb') as f: | |
| 66 out[relpath] = f.read() | |
| 67 return out | |
| 68 | |
| 69 | |
| 51 @contextlib.contextmanager | 70 @contextlib.contextmanager |
| 52 def init_named_caches_stub(_run_dir): | 71 def init_named_caches_stub(_run_dir): |
| 53 yield | 72 yield |
| 54 | 73 |
| 55 | 74 |
| 56 class StorageFake(object): | 75 class StorageFake(object): |
| 57 def __init__(self, files): | 76 def __init__(self, files): |
| 58 self._files = files.copy() | 77 self._files = files.copy() |
| 59 self.namespace = 'default-gzip' | 78 self.namespace = 'default-gzip' |
| 60 self.location = 'http://localhost:1' | 79 self.location = 'http://localhost:1' |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 85 logging.debug(self.tempdir) | 104 logging.debug(self.tempdir) |
| 86 self.mock(run_isolated, 'make_temp_dir', self.fake_make_temp_dir) | 105 self.mock(run_isolated, 'make_temp_dir', self.fake_make_temp_dir) |
| 87 self.mock(run_isolated.auth, 'ensure_logged_in', lambda _: None) | 106 self.mock(run_isolated.auth, 'ensure_logged_in', lambda _: None) |
| 88 self.mock( | 107 self.mock( |
| 89 logging_utils.OptionParserWithLogging, 'logger_root', | 108 logging_utils.OptionParserWithLogging, 'logger_root', |
| 90 logging.Logger('unittest')) | 109 logging.Logger('unittest')) |
| 91 | 110 |
| 92 self.cipd_server = cipdserver_mock.MockCipdServer() | 111 self.cipd_server = cipdserver_mock.MockCipdServer() |
| 93 | 112 |
| 94 def tearDown(self): | 113 def tearDown(self): |
| 114 # Remove mocks. | |
| 115 super(RunIsolatedTestBase, self).tearDown() | |
| 95 file_path.rmtree(self.tempdir) | 116 file_path.rmtree(self.tempdir) |
| 96 self.cipd_server.close() | 117 self.cipd_server.close() |
| 97 super(RunIsolatedTestBase, self).tearDown() | |
| 98 | 118 |
| 99 @property | 119 @property |
| 100 def run_test_temp_dir(self): | 120 def run_test_temp_dir(self): |
| 101 """Where to map all files in run_isolated.run_tha_test.""" | 121 """Where to map all files in run_isolated.run_tha_test.""" |
| 102 return os.path.join(self.tempdir, run_isolated.ISOLATED_RUN_DIR) | 122 return os.path.join(self.tempdir, run_isolated.ISOLATED_RUN_DIR) |
| 103 | 123 |
| 104 def fake_make_temp_dir(self, prefix, _root_dir): | 124 def fake_make_temp_dir(self, prefix, _root_dir): |
| 105 """Predictably returns directory for run_tha_test (one per test case).""" | 125 """Predictably returns directory for run_tha_test (one per test case).""" |
| 106 self.assertIn( | 126 self.assertIn( |
| 107 prefix, | 127 prefix, |
| (...skipping 443 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 551 {'detached': True}), | 571 {'detached': True}), |
| 552 ], | 572 ], |
| 553 self.popen_calls) | 573 self.popen_calls) |
| 554 | 574 |
| 555 def test_run_tha_test_non_isolated(self): | 575 def test_run_tha_test_non_isolated(self): |
| 556 _ = self._run_tha_test(command=['/bin/echo', 'hello', 'world']) | 576 _ = self._run_tha_test(command=['/bin/echo', 'hello', 'world']) |
| 557 self.assertEqual( | 577 self.assertEqual( |
| 558 [([u'/bin/echo', u'hello', u'world'], {'detached': True})], | 578 [([u'/bin/echo', u'hello', u'world'], {'detached': True})], |
| 559 self.popen_calls) | 579 self.popen_calls) |
| 560 | 580 |
| 581 def test_clean_caches(self): | |
| 582 # Create an isolated cache and a named cache each with 2 items. Ensure that | |
| 583 # one item from each is removed. | |
| 584 fake_time = 1 | |
| 585 fake_free_space = [102400] | |
| 586 np = self.temp_join('named_cache') | |
| 587 ip = self.temp_join('isolated_cache') | |
| 588 args = [ | |
| 589 '--named-cache-root', np, '--cache', ip, '--clean', | |
| 590 '--min-free-space', '10240', | |
| 591 ] | |
| 592 self.mock(file_path, 'get_free_space', lambda _: fake_free_space[0]) | |
| 593 parser, options, _ = run_isolated.parse_args(args) | |
| 594 isolate_cache = isolateserver.process_cache_options( | |
| 595 options, trim=False, time_fn=lambda: fake_time) | |
| 596 self.assertIsInstance(isolate_cache, isolateserver.DiskCache) | |
| 597 named_cache_manager = named_cache.process_named_cache_options( | |
| 598 parser, options) | |
| 599 self.assertIsInstance(named_cache_manager, named_cache.CacheManager) | |
| 600 | |
| 601 # Add items to these caches. | |
| 602 small = '0123456789' | |
| 603 big = small * 1014 | |
| 604 small_digest = unicode(ALGO(small).hexdigest()) | |
| 605 big_digest = unicode(ALGO(big).hexdigest()) | |
| 606 with isolate_cache: | |
| 607 fake_time = 1 | |
| 608 isolate_cache.write(big_digest, [big]) | |
| 609 fake_time = 2 | |
| 610 isolate_cache.write(small_digest, [small]) | |
| 611 with named_cache_manager.open(time_fn=lambda: fake_time): | |
| 612 fake_time = 1 | |
| 613 p = named_cache_manager.request('first') | |
| 614 with open(os.path.join(p, 'big'), 'wb') as f: | |
| 615 f.write(big) | |
| 616 fake_time = 3 | |
| 617 p = named_cache_manager.request('second') | |
| 618 with open(os.path.join(p, 'small'), 'wb') as f: | |
| 619 f.write(small) | |
| 620 | |
| 621 # Ensures the cache contain the expected data. | |
| 622 actual = genTree(np) | |
| 623 # Figure out the cache path names. | |
| 624 cache_small = [ | |
| 625 os.path.dirname(n) for n in actual if os.path.basename(n) == 'small'][0] | |
| 626 cache_big = [ | |
| 627 os.path.dirname(n) for n in actual if os.path.basename(n) == 'big'][0] | |
| 628 expected = { | |
| 629 os.path.join(cache_small, u'small'): small, | |
| 630 os.path.join(cache_big, u'big'): big, | |
| 631 u'state.json': | |
| 632 '{"items":[["first",["%s",1]],["second",["%s",3]]],"version":2}' % ( | |
| 633 cache_big, cache_small), | |
| 634 } | |
| 635 self.assertEqual(expected, actual) | |
| 636 expected = { | |
| 637 big_digest: big, | |
| 638 small_digest: small, | |
| 639 u'state.json': | |
| 640 '{"items":[["%s",[10140,1]],["%s",[10,2]]],"version":2}' % ( | |
| 641 big_digest, small_digest), | |
| 642 } | |
| 643 self.assertEqual(expected, genTree(ip)) | |
| 644 | |
| 645 # Request triming. | |
| 646 fake_free_space[0] = 1020 | |
| 647 # Abuse the fact that named cache is trimed after isolated cache. | |
| 648 def rmtree(p): | |
| 649 self.assertEqual(os.path.join(np, cache_big), p) | |
| 650 fake_free_space[0] += 10240 | |
| 651 return old_rmtree(p) | |
| 652 old_rmtree = self.mock(file_path, 'rmtree', rmtree) | |
| 653 isolate_cache = isolateserver.process_cache_options(options, trim=False) | |
| 654 named_cache_manager = named_cache.process_named_cache_options( | |
| 655 parser, options) | |
|
nodir
2017/05/04 07:34:41
why isolate_cache and named_cache_manager are re-c
M-A Ruel
2017/05/04 16:00:56
I wanted to be sure to flush the internal state.
| |
| 656 actual = run_isolated.clean_caches( | |
| 657 options, isolate_cache, named_cache_manager) | |
| 658 self.assertEqual(2, actual) | |
| 659 # One of each entry should have been cleaned up. This only happen to work | |
| 660 # because: | |
| 661 # - file_path.get_free_space() is mocked | |
| 662 # - DiskCache.trim() keeps its own internal counter while deleting files so | |
| 663 # it ignores get_free_space() output while deleting files. | |
| 664 actual = genTree(np) | |
| 665 expected = { | |
| 666 os.path.join(cache_small, u'small'): small, | |
| 667 u'state.json': | |
| 668 '{"items":[["second",["%s",3]]],"version":2}' % cache_small, | |
| 669 } | |
| 670 self.assertEqual(expected, actual) | |
| 671 expected = { | |
| 672 small_digest: small, | |
| 673 u'state.json': | |
| 674 '{"items":[["%s",[10,2]]],"version":2}' % small_digest, | |
| 675 } | |
| 676 self.assertEqual(expected, genTree(ip)) | |
| 677 | |
| 561 | 678 |
| 562 class RunIsolatedTestRun(RunIsolatedTestBase): | 679 class RunIsolatedTestRun(RunIsolatedTestBase): |
| 563 def test_output(self): | 680 def test_output(self): |
| 564 # Starts a full isolate server mock and have run_tha_test() uploads results | 681 # Starts a full isolate server mock and have run_tha_test() uploads results |
| 565 # back after the task completed. | 682 # back after the task completed. |
| 566 server = isolateserver_mock.MockIsolateServer() | 683 server = isolateserver_mock.MockIsolateServer() |
| 567 try: | 684 try: |
| 568 script = ( | 685 script = ( |
| 569 'import sys\n' | 686 'import sys\n' |
| 570 'open(sys.argv[1], "w").write("bar")\n') | 687 'open(sys.argv[1], "w").write("bar")\n') |
| (...skipping 298 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 869 self.assertEqual(expected, actual) | 986 self.assertEqual(expected, actual) |
| 870 | 987 |
| 871 | 988 |
| 872 if __name__ == '__main__': | 989 if __name__ == '__main__': |
| 873 fix_encoding.fix_encoding() | 990 fix_encoding.fix_encoding() |
| 874 if '-v' in sys.argv: | 991 if '-v' in sys.argv: |
| 875 unittest.TestCase.maxDiff = None | 992 unittest.TestCase.maxDiff = None |
| 876 logging.basicConfig( | 993 logging.basicConfig( |
| 877 level=logging.DEBUG if '-v' in sys.argv else logging.ERROR) | 994 level=logging.DEBUG if '-v' in sys.argv else logging.ERROR) |
| 878 unittest.main() | 995 unittest.main() |
| OLD | NEW |