Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(101)

Side by Side Diff: third_party/WebKit/Source/core/css/parser/CSSTokenizerTest.cpp

Issue 2611823003: ABANDONED CL: Changes to compile and pass tests after Big Blink Rename (excluding functions). (Closed)
Patch Set: Inducing merge conflicts to force human review and changes after rename. Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 The Chromium Authors. All rights reserved. 1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "core/css/parser/CSSTokenizer.h" 5 #include "core/css/parser/CSSTokenizer.h"
6 6
7 #include "core/css/parser/CSSParserTokenRange.h" 7 #include "core/css/parser/CSSParserTokenRange.h"
8 #include "core/css/parser/MediaQueryBlockWatcher.h" 8 #include "core/css/parser/MediaQueryBlockWatcher.h"
9 #include "testing/gtest/include/gtest/gtest.h" 9 #include "testing/gtest/include/gtest/gtest.h"
10 #include "wtf/allocator/Partitions.h" 10 #include "wtf/allocator/Partitions.h"
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
158 DEFINE_TOKEN(badString, (BadStringToken)); 158 DEFINE_TOKEN(badString, (BadStringToken));
159 DEFINE_TOKEN(badUrl, (BadUrlToken)); 159 DEFINE_TOKEN(badUrl, (BadUrlToken));
160 160
161 String fromUChar32(UChar32 c) { 161 String fromUChar32(UChar32 c) {
162 StringBuilder input; 162 StringBuilder input;
163 input.append(c); 163 input.append(c);
164 return input.toString(); 164 return input.toString();
165 } 165 }
166 166
167 TEST(CSSTokenizerTest, SingleCharacterTokens) { 167 TEST(CSSTokenizerTest, SingleCharacterTokens) {
168 TEST_TOKENS("(", leftParenthesis()); 168 TEST_TOKENS("(", left_parenthesis());
169 TEST_TOKENS(")", rightParenthesis()); 169 TEST_TOKENS(")", right_parenthesis());
170 TEST_TOKENS("[", leftBracket()); 170 TEST_TOKENS("[", left_bracket());
171 TEST_TOKENS("]", rightBracket()); 171 TEST_TOKENS("]", right_bracket());
172 TEST_TOKENS(",", comma()); 172 TEST_TOKENS(",", comma());
173 TEST_TOKENS(":", colon()); 173 TEST_TOKENS(":", colon());
174 TEST_TOKENS(";", semicolon()); 174 TEST_TOKENS(";", semicolon());
175 TEST_TOKENS(")[", rightParenthesis(), leftBracket()); 175 TEST_TOKENS(")[", right_parenthesis(), left_bracket());
176 TEST_TOKENS("[)", leftBracket(), rightParenthesis()); 176 TEST_TOKENS("[)", left_bracket(), right_parenthesis());
177 TEST_TOKENS("{}", leftBrace(), rightBrace()); 177 TEST_TOKENS("{}", left_brace(), right_brace());
178 TEST_TOKENS(",,", comma(), comma()); 178 TEST_TOKENS(",,", comma(), comma());
179 } 179 }
180 180
181 TEST(CSSTokenizerTest, MultipleCharacterTokens) { 181 TEST(CSSTokenizerTest, MultipleCharacterTokens) {
182 TEST_TOKENS("~=", includeMatch()); 182 TEST_TOKENS("~=", include_match());
183 TEST_TOKENS("|=", dashMatch()); 183 TEST_TOKENS("|=", dash_match());
184 TEST_TOKENS("^=", prefixMatch()); 184 TEST_TOKENS("^=", prefix_match());
185 TEST_TOKENS("$=", suffixMatch()); 185 TEST_TOKENS("$=", suffix_match());
186 TEST_TOKENS("*=", substringMatch()); 186 TEST_TOKENS("*=", substring_match());
187 TEST_TOKENS("||", column()); 187 TEST_TOKENS("||", column());
188 TEST_TOKENS("|||", column(), delim('|')); 188 TEST_TOKENS("|||", column(), delim('|'));
189 TEST_TOKENS("<!--", cdo()); 189 TEST_TOKENS("<!--", cdo());
190 TEST_TOKENS("<!---", cdo(), delim('-')); 190 TEST_TOKENS("<!---", cdo(), delim('-'));
191 TEST_TOKENS("-->", cdc()); 191 TEST_TOKENS("-->", cdc());
192 } 192 }
193 193
194 TEST(CSSTokenizerTest, DelimiterToken) { 194 TEST(CSSTokenizerTest, DelimiterToken) {
195 TEST_TOKENS("^", delim('^')); 195 TEST_TOKENS("^", delim('^'));
196 TEST_TOKENS("*", delim('*')); 196 TEST_TOKENS("*", delim('*'));
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after
266 ident(fromUChar32(0xA0))); // non-breaking space 266 ident(fromUChar32(0xA0))); // non-breaking space
267 TEST_TOKENS(fromUChar32(0x1234), ident(fromUChar32(0x1234))); 267 TEST_TOKENS(fromUChar32(0x1234), ident(fromUChar32(0x1234)));
268 TEST_TOKENS(fromUChar32(0x12345), ident(fromUChar32(0x12345))); 268 TEST_TOKENS(fromUChar32(0x12345), ident(fromUChar32(0x12345)));
269 TEST_TOKENS(String("\0", 1), ident(fromUChar32(0xFFFD))); 269 TEST_TOKENS(String("\0", 1), ident(fromUChar32(0xFFFD)));
270 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); 270 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c"));
271 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); 271 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c"));
272 } 272 }
273 273
274 TEST(CSSTokenizerTest, FunctionToken) { 274 TEST(CSSTokenizerTest, FunctionToken) {
275 TEST_TOKENS("scale(2)", function("scale"), 275 TEST_TOKENS("scale(2)", function("scale"),
276 number(IntegerValueType, 2, NoSign), rightParenthesis()); 276 number(kIntegerValueType, 2, kNoSign), right_parenthesis());
277 TEST_TOKENS("foo-bar\\ baz(", function("foo-bar baz")); 277 TEST_TOKENS("foo-bar\\ baz(", function("foo-bar baz"));
278 TEST_TOKENS("fun\\(ction(", function("fun(ction")); 278 TEST_TOKENS("fun\\(ction(", function("fun(ction"));
279 TEST_TOKENS("-foo(", function("-foo")); 279 TEST_TOKENS("-foo(", function("-foo"));
280 TEST_TOKENS("url(\"foo.gif\"", function("url"), string("foo.gif")); 280 TEST_TOKENS("url(\"foo.gif\"", function("url"), string("foo.gif"));
281 TEST_TOKENS("foo( \'bar.gif\'", function("foo"), whitespace(), 281 TEST_TOKENS("foo( \'bar.gif\'", function("foo"), whitespace(),
282 string("bar.gif")); 282 string("bar.gif"));
283 // To simplify implementation we drop the whitespace in 283 // To simplify implementation we drop the whitespace in
284 // function(url),whitespace,string() 284 // function(url),whitespace,string()
285 TEST_TOKENS("url( \'bar.gif\'", function("url"), string("bar.gif")); 285 TEST_TOKENS("url( \'bar.gif\'", function("url"), string("bar.gif"));
286 } 286 }
(...skipping 20 matching lines...) Expand all
307 url("https://example.com/cats.png")); 307 url("https://example.com/cats.png"));
308 TEST_TOKENS("uRl(what-a.crazy^URL~this\\ is!)", 308 TEST_TOKENS("uRl(what-a.crazy^URL~this\\ is!)",
309 url("what-a.crazy^URL~this is!")); 309 url("what-a.crazy^URL~this is!"));
310 TEST_TOKENS("uRL(123#test)", url("123#test")); 310 TEST_TOKENS("uRL(123#test)", url("123#test"));
311 TEST_TOKENS("Url(escapes\\ \\\"\\'\\)\\()", url("escapes \"')(")); 311 TEST_TOKENS("Url(escapes\\ \\\"\\'\\)\\()", url("escapes \"')("));
312 TEST_TOKENS("UrL( whitespace )", url("whitespace")); 312 TEST_TOKENS("UrL( whitespace )", url("whitespace"));
313 TEST_TOKENS("URl( whitespace-eof ", url("whitespace-eof")); 313 TEST_TOKENS("URl( whitespace-eof ", url("whitespace-eof"));
314 TEST_TOKENS("URL(eof", url("eof")); 314 TEST_TOKENS("URL(eof", url("eof"));
315 TEST_TOKENS("url(not/*a*/comment)", url("not/*a*/comment")); 315 TEST_TOKENS("url(not/*a*/comment)", url("not/*a*/comment"));
316 TEST_TOKENS("urL()", url("")); 316 TEST_TOKENS("urL()", url(""));
317 TEST_TOKENS("uRl(white space),", badUrl(), comma()); 317 TEST_TOKENS("uRl(white space),", bad_url(), comma());
318 TEST_TOKENS("Url(b(ad),", badUrl(), comma()); 318 TEST_TOKENS("Url(b(ad),", bad_url(), comma());
319 TEST_TOKENS("uRl(ba'd):", badUrl(), colon()); 319 TEST_TOKENS("uRl(ba'd):", bad_url(), colon());
320 TEST_TOKENS("urL(b\"ad):", badUrl(), colon()); 320 TEST_TOKENS("urL(b\"ad):", bad_url(), colon());
321 TEST_TOKENS("uRl(b\"ad):", badUrl(), colon()); 321 TEST_TOKENS("uRl(b\"ad):", bad_url(), colon());
322 TEST_TOKENS("Url(b\\\rad):", badUrl(), colon()); 322 TEST_TOKENS("Url(b\\\rad):", bad_url(), colon());
323 TEST_TOKENS("url(b\\\nad):", badUrl(), colon()); 323 TEST_TOKENS("url(b\\\nad):", bad_url(), colon());
324 TEST_TOKENS("url(/*'bad')*/", badUrl(), delim('*'), delim('/')); 324 TEST_TOKENS("url(/*'bad')*/", bad_url(), delim('*'), delim('/'));
325 TEST_TOKENS("url(ba'd\\\\))", badUrl(), rightParenthesis()); 325 TEST_TOKENS("url(ba'd\\\\))", bad_url(), right_parenthesis());
326 } 326 }
327 327
328 TEST(CSSTokenizerTest, StringToken) { 328 TEST(CSSTokenizerTest, StringToken) {
329 TEST_TOKENS("'text'", string("text")); 329 TEST_TOKENS("'text'", string("text"));
330 TEST_TOKENS("\"text\"", string("text")); 330 TEST_TOKENS("\"text\"", string("text"));
331 TEST_TOKENS("'testing, 123!'", string("testing, 123!")); 331 TEST_TOKENS("'testing, 123!'", string("testing, 123!"));
332 TEST_TOKENS("'es\\'ca\\\"pe'", string("es'ca\"pe")); 332 TEST_TOKENS("'es\\'ca\\\"pe'", string("es'ca\"pe"));
333 TEST_TOKENS("'\"quotes\"'", string("\"quotes\"")); 333 TEST_TOKENS("'\"quotes\"'", string("\"quotes\""));
334 TEST_TOKENS("\"'quotes'\"", string("'quotes'")); 334 TEST_TOKENS("\"'quotes'\"", string("'quotes'"));
335 TEST_TOKENS("\"mismatch'", string("mismatch'")); 335 TEST_TOKENS("\"mismatch'", string("mismatch'"));
336 TEST_TOKENS("'text\5\t\13'", string("text\5\t\13")); 336 TEST_TOKENS("'text\5\t\13'", string("text\5\t\13"));
337 TEST_TOKENS("\"end on eof", string("end on eof")); 337 TEST_TOKENS("\"end on eof", string("end on eof"));
338 TEST_TOKENS("'esca\\\nped'", string("escaped")); 338 TEST_TOKENS("'esca\\\nped'", string("escaped"));
339 TEST_TOKENS("\"esc\\\faped\"", string("escaped")); 339 TEST_TOKENS("\"esc\\\faped\"", string("escaped"));
340 TEST_TOKENS("'new\\\rline'", string("newline")); 340 TEST_TOKENS("'new\\\rline'", string("newline"));
341 TEST_TOKENS("\"new\\\r\nline\"", string("newline")); 341 TEST_TOKENS("\"new\\\r\nline\"", string("newline"));
342 TEST_TOKENS("'bad\nstring", badString(), whitespace(), ident("string")); 342 TEST_TOKENS("'bad\nstring", bad_string(), whitespace(), ident("string"));
343 TEST_TOKENS("'bad\rstring", badString(), whitespace(), ident("string")); 343 TEST_TOKENS("'bad\rstring", bad_string(), whitespace(), ident("string"));
344 TEST_TOKENS("'bad\r\nstring", badString(), whitespace(), ident("string")); 344 TEST_TOKENS("'bad\r\nstring", bad_string(), whitespace(), ident("string"));
345 TEST_TOKENS("'bad\fstring", badString(), whitespace(), ident("string")); 345 TEST_TOKENS("'bad\fstring", bad_string(), whitespace(), ident("string"));
346 TEST_TOKENS(String("'\0'", 3), string(fromUChar32(0xFFFD))); 346 TEST_TOKENS(String("'\0'", 3), string(fromUChar32(0xFFFD)));
347 TEST_TOKENS(String("'hel\0lo'", 8), 347 TEST_TOKENS(String("'hel\0lo'", 8),
348 string("hel" + fromUChar32(0xFFFD) + "lo")); 348 string("hel" + fromUChar32(0xFFFD) + "lo"));
349 TEST_TOKENS(String("'h\\65l\0lo'", 10), 349 TEST_TOKENS(String("'h\\65l\0lo'", 10),
350 string("hel" + fromUChar32(0xFFFD) + "lo")); 350 string("hel" + fromUChar32(0xFFFD) + "lo"));
351 } 351 }
352 352
353 TEST(CSSTokenizerTest, HashToken) { 353 TEST(CSSTokenizerTest, HashToken) {
354 TEST_TOKENS("#id-selector", hash("id-selector", HashTokenId)); 354 TEST_TOKENS("#id-selector", hash("id-selector", HashTokenId));
355 TEST_TOKENS("#FF7700", hash("FF7700", HashTokenId)); 355 TEST_TOKENS("#FF7700", hash("FF7700", HashTokenId));
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
445 TEST_TOKENS("u+z", ident("u"), delim('+'), ident("z")); 445 TEST_TOKENS("u+z", ident("u"), delim('+'), ident("z"));
446 TEST_TOKENS("u+", ident("u"), delim('+')); 446 TEST_TOKENS("u+", ident("u"), delim('+'));
447 TEST_TOKENS("u+-543", ident("u"), delim('+'), 447 TEST_TOKENS("u+-543", ident("u"), delim('+'),
448 number(IntegerValueType, -543, MinusSign)); 448 number(IntegerValueType, -543, MinusSign));
449 } 449 }
450 450
451 TEST(CSSTokenizerTest, CommentToken) { 451 TEST(CSSTokenizerTest, CommentToken) {
452 TEST_TOKENS("/*comment*/a", ident("a")); 452 TEST_TOKENS("/*comment*/a", ident("a"));
453 TEST_TOKENS("/**\\2f**//", delim('/')); 453 TEST_TOKENS("/**\\2f**//", delim('/'));
454 TEST_TOKENS("/**y*a*y**/ ", whitespace()); 454 TEST_TOKENS("/**y*a*y**/ ", whitespace());
455 TEST_TOKENS(",/* \n :) \n */)", comma(), rightParenthesis()); 455 TEST_TOKENS(",/* \n :) \n */)", comma(), right_parenthesis());
456 TEST_TOKENS(":/*/*/", colon()); 456 TEST_TOKENS(":/*/*/", colon());
457 TEST_TOKENS("/**/*", delim('*')); 457 TEST_TOKENS("/**/*", delim('*'));
458 TEST_TOKENS(";/******", semicolon()); 458 TEST_TOKENS(";/******", semicolon());
459 } 459 }
460 460
461 typedef struct { 461 typedef struct {
462 const char* input; 462 const char* input;
463 const unsigned maxLevel; 463 const unsigned maxLevel;
464 const unsigned finalLevel; 464 const unsigned finalLevel;
465 } BlockTestCase; 465 } BlockTestCase;
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
505 blockWatcher.handleToken(range.consume()); 505 blockWatcher.handleToken(range.consume());
506 level = blockWatcher.blockLevel(); 506 level = blockWatcher.blockLevel();
507 maxLevel = std::max(level, maxLevel); 507 maxLevel = std::max(level, maxLevel);
508 } 508 }
509 ASSERT_EQ(testCases[i].maxLevel, maxLevel); 509 ASSERT_EQ(testCases[i].maxLevel, maxLevel);
510 ASSERT_EQ(testCases[i].finalLevel, level); 510 ASSERT_EQ(testCases[i].finalLevel, level);
511 } 511 }
512 } 512 }
513 513
514 } // namespace blink 514 } // namespace blink
OLDNEW
« no previous file with comments | « third_party/WebKit/Source/core/css/parser/CSSTokenizer.cpp ('k') | third_party/WebKit/Source/core/dom/Node.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698