OLD | NEW |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "testing/gtest/include/gtest/gtest.h" | 5 #include "testing/gtest/include/gtest/gtest.h" |
6 #include "tools/gn/input_file.h" | 6 #include "tools/gn/input_file.h" |
7 #include "tools/gn/token.h" | 7 #include "tools/gn/token.h" |
8 #include "tools/gn/tokenizer.h" | 8 #include "tools/gn/tokenizer.h" |
9 | 9 |
10 namespace { | 10 namespace { |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn)); | 133 EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn)); |
134 } | 134 } |
135 | 135 |
136 TEST(Tokenizer, Locations) { | 136 TEST(Tokenizer, Locations) { |
137 InputFile input(SourceFile("/test")); | 137 InputFile input(SourceFile("/test")); |
138 input.SetContents("1 2 \"three\"\n 4"); | 138 input.SetContents("1 2 \"three\"\n 4"); |
139 Err err; | 139 Err err; |
140 std::vector<Token> results = Tokenizer::Tokenize(&input, &err); | 140 std::vector<Token> results = Tokenizer::Tokenize(&input, &err); |
141 | 141 |
142 ASSERT_EQ(4u, results.size()); | 142 ASSERT_EQ(4u, results.size()); |
143 ASSERT_TRUE(results[0].location() == Location(&input, 1, 1)); | 143 ASSERT_TRUE(results[0].location() == Location(&input, 1, 1, 1)); |
144 ASSERT_TRUE(results[1].location() == Location(&input, 1, 3)); | 144 ASSERT_TRUE(results[1].location() == Location(&input, 1, 3, 3)); |
145 ASSERT_TRUE(results[2].location() == Location(&input, 1, 5)); | 145 ASSERT_TRUE(results[2].location() == Location(&input, 1, 5, 5)); |
146 ASSERT_TRUE(results[3].location() == Location(&input, 2, 3)); | 146 ASSERT_TRUE(results[3].location() == Location(&input, 2, 3, 8)); |
147 } | 147 } |
148 | 148 |
149 TEST(Tokenizer, ByteOffsetOfNthLine) { | 149 TEST(Tokenizer, ByteOffsetOfNthLine) { |
150 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1)); | 150 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1)); |
151 | 151 |
152 // Windows and Posix have different line endings, so check the byte at the | 152 // Windows and Posix have different line endings, so check the byte at the |
153 // location rather than the offset. | 153 // location rather than the offset. |
154 char input1[] = "aaa\nxaa\n\nya"; | 154 char input1[] = "aaa\nxaa\n\nya"; |
155 EXPECT_EQ('x', input1[Tokenizer::ByteOffsetOfNthLine(input1, 2)]); | 155 EXPECT_EQ('x', input1[Tokenizer::ByteOffsetOfNthLine(input1, 2)]); |
156 EXPECT_EQ('y', input1[Tokenizer::ByteOffsetOfNthLine(input1, 4)]); | 156 EXPECT_EQ('y', input1[Tokenizer::ByteOffsetOfNthLine(input1, 4)]); |
157 | 157 |
158 char input2[3]; | 158 char input2[3]; |
159 input2[0] = 'a'; | 159 input2[0] = 'a'; |
160 input2[1] = '\n'; // Manually set to avoid Windows double-byte endings. | 160 input2[1] = '\n'; // Manually set to avoid Windows double-byte endings. |
161 input2[2] = 0; | 161 input2[2] = 0; |
162 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1)); | 162 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1)); |
163 EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2)); | 163 EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2)); |
164 } | 164 } |
| 165 |
| 166 TEST(Tokenizer, Comments) { |
| 167 TokenExpectation fn[] = { |
| 168 { Token::LINE_COMMENT, "# Stuff" }, |
| 169 { Token::IDENTIFIER, "fun" }, |
| 170 { Token::LEFT_PAREN, "(" }, |
| 171 { Token::STRING, "\"foo\"" }, |
| 172 { Token::RIGHT_PAREN, ")" }, |
| 173 { Token::LEFT_BRACE, "{" }, |
| 174 { Token::SUFFIX_COMMENT, "# Things" }, |
| 175 { Token::LINE_COMMENT, "#Wee" }, |
| 176 { Token::IDENTIFIER, "foo" }, |
| 177 { Token::EQUAL, "=" }, |
| 178 { Token::INTEGER, "12" }, |
| 179 { Token::SUFFIX_COMMENT, "#Zip" }, |
| 180 { Token::RIGHT_BRACE, "}" }, |
| 181 }; |
| 182 EXPECT_TRUE(CheckTokenizer( |
| 183 "# Stuff\nfun(\"foo\") { # Things\n#Wee\nfoo = 12 #Zip\n}", fn)); |
| 184 } |
OLD | NEW |