OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # Copyright 2011 The Closure Linter Authors. All Rights Reserved. | |
3 # | |
4 # Licensed under the Apache License, Version 2.0 (the "License"); | |
5 # you may not use this file except in compliance with the License. | |
6 # You may obtain a copy of the License at | |
7 # | |
8 # http://www.apache.org/licenses/LICENSE-2.0 | |
9 # | |
10 # Unless required by applicable law or agreed to in writing, software | |
11 # distributed under the License is distributed on an "AS-IS" BASIS, | |
12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
13 # See the License for the specific language governing permissions and | |
14 # limitations under the License. | |
15 | |
16 | |
17 __author__ = 'nnaze@google.com (Nathan Naze)' | |
18 | |
19 import unittest as googletest | |
20 from closure_linter.common import tokens | |
21 | |
22 | |
23 def _CreateDummyToken(): | |
24 return tokens.Token('foo', None, 1, 1) | |
25 | |
26 | |
27 def _CreateDummyTokens(count): | |
28 dummy_tokens = [] | |
29 for _ in xrange(count): | |
30 dummy_tokens.append(_CreateDummyToken()) | |
31 return dummy_tokens | |
32 | |
33 | |
34 def _SetTokensAsNeighbors(neighbor_tokens): | |
35 for i in xrange(len(neighbor_tokens)): | |
36 prev_index = i - 1 | |
37 next_index = i + 1 | |
38 | |
39 if prev_index >= 0: | |
40 neighbor_tokens[i].previous = neighbor_tokens[prev_index] | |
41 | |
42 if next_index < len(neighbor_tokens): | |
43 neighbor_tokens[i].next = neighbor_tokens[next_index] | |
44 | |
45 | |
46 class TokensTest(googletest.TestCase): | |
47 | |
48 def testIsFirstInLine(self): | |
49 | |
50 # First token in file (has no previous). | |
51 self.assertTrue(_CreateDummyToken().IsFirstInLine()) | |
52 | |
53 a, b = _CreateDummyTokens(2) | |
54 _SetTokensAsNeighbors([a, b]) | |
55 | |
56 # Tokens on same line | |
57 a.line_number = 30 | |
58 b.line_number = 30 | |
59 | |
60 self.assertFalse(b.IsFirstInLine()) | |
61 | |
62 # Tokens on different lines | |
63 b.line_number = 31 | |
64 self.assertTrue(b.IsFirstInLine()) | |
65 | |
66 def testIsLastInLine(self): | |
67 # Last token in file (has no next). | |
68 self.assertTrue(_CreateDummyToken().IsLastInLine()) | |
69 | |
70 a, b = _CreateDummyTokens(2) | |
71 _SetTokensAsNeighbors([a, b]) | |
72 | |
73 # Tokens on same line | |
74 a.line_number = 30 | |
75 b.line_number = 30 | |
76 self.assertFalse(a.IsLastInLine()) | |
77 | |
78 b.line_number = 31 | |
79 self.assertTrue(a.IsLastInLine()) | |
80 | |
81 def testIsType(self): | |
82 a = tokens.Token('foo', 'fakeType1', 1, 1) | |
83 self.assertTrue(a.IsType('fakeType1')) | |
84 self.assertFalse(a.IsType('fakeType2')) | |
85 | |
86 def testIsAnyType(self): | |
87 a = tokens.Token('foo', 'fakeType1', 1, 1) | |
88 self.assertTrue(a.IsAnyType(['fakeType1', 'fakeType2'])) | |
89 self.assertFalse(a.IsAnyType(['fakeType3', 'fakeType4'])) | |
90 | |
91 def testRepr(self): | |
92 a = tokens.Token('foo', 'fakeType1', 1, 1) | |
93 self.assertEquals('<Token: fakeType1, "foo", None, 1, None>', str(a)) | |
94 | |
95 def testIter(self): | |
96 dummy_tokens = _CreateDummyTokens(5) | |
97 _SetTokensAsNeighbors(dummy_tokens) | |
98 a, b, c, d, e = dummy_tokens | |
99 | |
100 i = iter(a) | |
101 self.assertListEqual([a, b, c, d, e], list(i)) | |
102 | |
103 def testReverseIter(self): | |
104 dummy_tokens = _CreateDummyTokens(5) | |
105 _SetTokensAsNeighbors(dummy_tokens) | |
106 a, b, c, d, e = dummy_tokens | |
107 | |
108 ri = reversed(e) | |
109 self.assertListEqual([e, d, c, b, a], list(ri)) | |
110 | |
111 | |
112 if __name__ == '__main__': | |
113 googletest.main() | |
OLD | NEW |