Index: tools/gn/tokenizer_unittest.cc |
diff --git a/tools/gn/tokenizer_unittest.cc b/tools/gn/tokenizer_unittest.cc |
index 45fd744c83a6441745b04c9c5a6ebe7e8973b23c..f18f5e797f68bf51d6d2f25b5de92d81b895662b 100644 |
--- a/tools/gn/tokenizer_unittest.cc |
+++ b/tools/gn/tokenizer_unittest.cc |
@@ -140,10 +140,10 @@ TEST(Tokenizer, Locations) { |
std::vector<Token> results = Tokenizer::Tokenize(&input, &err); |
ASSERT_EQ(4u, results.size()); |
- ASSERT_TRUE(results[0].location() == Location(&input, 1, 1)); |
- ASSERT_TRUE(results[1].location() == Location(&input, 1, 3)); |
- ASSERT_TRUE(results[2].location() == Location(&input, 1, 5)); |
- ASSERT_TRUE(results[3].location() == Location(&input, 2, 3)); |
+ ASSERT_TRUE(results[0].location() == Location(&input, 1, 1, 1)); |
+ ASSERT_TRUE(results[1].location() == Location(&input, 1, 3, 3)); |
+ ASSERT_TRUE(results[2].location() == Location(&input, 1, 5, 5)); |
+ ASSERT_TRUE(results[3].location() == Location(&input, 2, 3, 8)); |
} |
TEST(Tokenizer, ByteOffsetOfNthLine) { |
@@ -162,3 +162,23 @@ TEST(Tokenizer, ByteOffsetOfNthLine) { |
EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1)); |
EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2)); |
} |
+ |
+TEST(Tokenizer, Comments) { |
+ TokenExpectation fn[] = { |
+ { Token::LINE_COMMENT, "# Stuff" }, |
+ { Token::IDENTIFIER, "fun" }, |
+ { Token::LEFT_PAREN, "(" }, |
+ { Token::STRING, "\"foo\"" }, |
+ { Token::RIGHT_PAREN, ")" }, |
+ { Token::LEFT_BRACE, "{" }, |
+ { Token::SUFFIX_COMMENT, "# Things" }, |
+ { Token::LINE_COMMENT, "#Wee" }, |
+ { Token::IDENTIFIER, "foo" }, |
+ { Token::EQUAL, "=" }, |
+ { Token::INTEGER, "12" }, |
+ { Token::SUFFIX_COMMENT, "#Zip" }, |
+ { Token::RIGHT_BRACE, "}" }, |
+ }; |
+ EXPECT_TRUE(CheckTokenizer( |
+ "# Stuff\nfun(\"foo\") { # Things\n#Wee\nfoo = 12 #Zip\n}", fn)); |
+} |