Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(377)

Side by Side Diff: tools/gn/tokenizer_unittest.cc

Issue 21114002: Add initial prototype for the GN meta-buildsystem. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: add owners and readme Created 7 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « tools/gn/tokenizer.cc ('k') | tools/gn/toolchain.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
(Empty)
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "testing/gtest/include/gtest/gtest.h"
6 #include "tools/gn/input_file.h"
7 #include "tools/gn/token.h"
8 #include "tools/gn/tokenizer.h"
9
10 namespace {
11
12 struct TokenExpectation {
13 Token::Type type;
14 const char* value;
15 };
16
17 template<size_t len>
18 bool CheckTokenizer(const char* input, const TokenExpectation (&expect)[len]) {
19 InputFile input_file(SourceFile("/test"));
20 input_file.SetContents(input);
21
22 Err err;
23 std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err);
24
25 if (results.size() != len)
26 return false;
27 for (size_t i = 0; i < len; i++) {
28 if (expect[i].type != results[i].type())
29 return false;
30 if (expect[i].value != results[i].value())
31 return false;
32 }
33 return true;
34 }
35
36 } // namespace
37
38 TEST(Tokenizer, Empty) {
39 InputFile empty_string_input(SourceFile("/test"));
40 empty_string_input.SetContents("");
41
42 Err err;
43 std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err);
44 EXPECT_TRUE(results.empty());
45
46 InputFile whitespace_input(SourceFile("/test"));
47 whitespace_input.SetContents(" \n\r");
48
49 results = Tokenizer::Tokenize(&whitespace_input, &err);
50 EXPECT_TRUE(results.empty());
51 }
52
53 TEST(Tokenizer, Identifier) {
54 TokenExpectation one_ident[] = {
55 { Token::IDENTIFIER, "foo" }
56 };
57 EXPECT_TRUE(CheckTokenizer(" foo ", one_ident));
58 }
59
60 TEST(Tokenizer, Integer) {
61 TokenExpectation integers[] = {
62 { Token::INTEGER, "123" },
63 { Token::INTEGER, "-123" }
64 };
65 EXPECT_TRUE(CheckTokenizer(" 123 -123 ", integers));
66 }
67
68 TEST(Tokenizer, String) {
69 TokenExpectation strings[] = {
70 { Token::STRING, "\"foo\"" },
71 { Token::STRING, "\"bar\\\"baz\"" },
72 { Token::STRING, "\"asdf\\\\\"" }
73 };
74 EXPECT_TRUE(CheckTokenizer(" \"foo\" \"bar\\\"baz\" \"asdf\\\\\" ",
75 strings));
76 }
77
78 TEST(Tokenizer, Operator) {
79 TokenExpectation operators[] = {
80 { Token::OPERATOR, "-" },
81 { Token::OPERATOR, "+" },
82 { Token::OPERATOR, "=" },
83 { Token::OPERATOR, "+=" },
84 { Token::OPERATOR, "-=" },
85 { Token::OPERATOR, "!=" },
86 { Token::OPERATOR, "==" },
87 { Token::OPERATOR, "<" },
88 { Token::OPERATOR, ">" },
89 { Token::OPERATOR, "<=" },
90 { Token::OPERATOR, ">=" },
91 };
92 EXPECT_TRUE(CheckTokenizer("- + = += -= != == < > <= >=",
93 operators));
94 }
95
96 TEST(Tokenizer, Scoper) {
97 TokenExpectation scopers[] = {
98 { Token::SCOPER, "{" },
99 { Token::SCOPER, "[" },
100 { Token::SCOPER, "]" },
101 { Token::SCOPER, "}" },
102 { Token::SCOPER, "(" },
103 { Token::SCOPER, ")" },
104 };
105 EXPECT_TRUE(CheckTokenizer("{[ ]} ()", scopers));
106 }
107
108 TEST(Tokenizer, FunctionCall) {
109 TokenExpectation fn[] = {
110 { Token::IDENTIFIER, "fun" },
111 { Token::SCOPER, "(" },
112 { Token::STRING, "\"foo\"" },
113 { Token::SCOPER, ")" },
114 { Token::SCOPER, "{" },
115 { Token::IDENTIFIER, "foo" },
116 { Token::OPERATOR, "=" },
117 { Token::INTEGER, "12" },
118 { Token::SCOPER, "}" },
119 };
120 EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn));
121 }
122
123 TEST(Tokenizer, StringUnescaping) {
124 InputFile input(SourceFile("/test"));
125 input.SetContents("\"asd\\\"f\" \"\"");
126 Err err;
127 std::vector<Token> results = Tokenizer::Tokenize(&input, &err);
128
129 ASSERT_EQ(2u, results.size());
130 EXPECT_EQ("asd\"f", results[0].StringValue());
131 EXPECT_EQ("", results[1].StringValue());
132 }
133
134 TEST(Tokenizer, Locations) {
135 InputFile input(SourceFile("/test"));
136 input.SetContents("1 2 \"three\"\n 4");
137 Err err;
138 std::vector<Token> results = Tokenizer::Tokenize(&input, &err);
139
140 ASSERT_EQ(4u, results.size());
141 ASSERT_TRUE(results[0].location() == Location(&input, 1, 1));
142 ASSERT_TRUE(results[1].location() == Location(&input, 1, 3));
143 ASSERT_TRUE(results[2].location() == Location(&input, 1, 5));
144 ASSERT_TRUE(results[3].location() == Location(&input, 2, 3));
145 }
146
147 TEST(Tokenizer, ByteOffsetOfNthLine) {
148 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1));
149
150 // Windows and Posix have different line endings, so check the byte at the
151 // location rather than the offset.
152 char input1[] = "aaa\nxaa\n\nya";
153 EXPECT_EQ('x', input1[Tokenizer::ByteOffsetOfNthLine(input1, 2)]);
154 EXPECT_EQ('y', input1[Tokenizer::ByteOffsetOfNthLine(input1, 4)]);
155
156 char input2[3];
157 input2[0] = 'a';
158 input2[1] = '\n'; // Manually set to avoid Windows double-byte endings.
159 input2[2] = 0;
160 EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1));
161 EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2));
162 }
OLDNEW
« no previous file with comments | « tools/gn/tokenizer.cc ('k') | tools/gn/toolchain.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698