| OLD | NEW |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "config.h" | 5 #include "config.h" |
| 6 #include "core/css/parser/CSSTokenizer.h" | 6 #include "core/css/parser/CSSTokenizer.h" |
| 7 | 7 |
| 8 #include "core/css/parser/CSSParserTokenRange.h" | 8 #include "core/css/parser/CSSParserTokenRange.h" |
| 9 #include "core/css/parser/MediaQueryBlockWatcher.h" | 9 #include "core/css/parser/MediaQueryBlockWatcher.h" |
| 10 #include "wtf/Partitions.h" |
| 10 #include <gtest/gtest.h> | 11 #include <gtest/gtest.h> |
| 11 | 12 |
| 12 namespace blink { | 13 namespace blink { |
| 13 | 14 |
| 14 // This let's us see the line numbers of failing tests | 15 // This let's us see the line numbers of failing tests |
| 15 #define TEST_TOKENS(string, ...) { \ | 16 #define TEST_TOKENS(string, ...) { \ |
| 16 String s = string; \ | 17 String s = string; \ |
| 17 SCOPED_TRACE(s.ascii().data()); \ | 18 SCOPED_TRACE(s.ascii().data()); \ |
| 18 testTokens(string, __VA_ARGS__); \ | 19 testTokens(string, __VA_ARGS__); \ |
| 19 } | 20 } |
| (...skipping 90 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 110 return token; | 111 return token; |
| 111 } | 112 } |
| 112 | 113 |
| 113 static CSSParserToken percentage(NumericValueType type, double value) | 114 static CSSParserToken percentage(NumericValueType type, double value) |
| 114 { | 115 { |
| 115 CSSParserToken token = number(type, value, NoSign); // sign ignored | 116 CSSParserToken token = number(type, value, NoSign); // sign ignored |
| 116 token.convertToPercentage(); | 117 token.convertToPercentage(); |
| 117 return token; | 118 return token; |
| 118 } | 119 } |
| 119 | 120 |
| 120 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, whitespace, (WhitespaceToken)); | 121 // We need to initialize PartitionAlloc before creating CSSParserTokens |
| 121 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, colon, (ColonToken)); | 122 // because CSSParserToken depends on PartitionAlloc. It is safe to call |
| 122 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, semicolon, (SemicolonToken)); | 123 // WTF::Partitions::initialize() multiple times. |
| 123 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, comma, (CommaToken)); | 124 #define DEFINE_TOKEN(name, argument) \ |
| 124 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, includeMatch, (IncludeMatchToken)); | 125 static CSSParserToken& name() \ |
| 125 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, dashMatch, (DashMatchToken)); | 126 { \ |
| 126 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, prefixMatch, (PrefixMatchToken)); | 127 WTF::Partitions::initialize(); \ |
| 127 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, suffixMatch, (SuffixMatchToken)); | 128 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, name, (argument)); \ |
| 128 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, substringMatch, (SubstringMatchToke
n)); | 129 return name; \ |
| 129 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, column, (ColumnToken)); | 130 } |
| 130 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, cdo, (CDOToken)); | 131 |
| 131 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, cdc, (CDCToken)); | 132 DEFINE_TOKEN(whitespace, (WhitespaceToken)) |
| 132 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, leftParenthesis, (LeftParenthesisTo
ken)); | 133 DEFINE_TOKEN(colon, (ColonToken)); |
| 133 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, rightParenthesis, (RightParenthesis
Token)); | 134 DEFINE_TOKEN(semicolon, (SemicolonToken)); |
| 134 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, leftBracket, (LeftBracketToken)); | 135 DEFINE_TOKEN(comma, (CommaToken)); |
| 135 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, rightBracket, (RightBracketToken)); | 136 DEFINE_TOKEN(includeMatch, (IncludeMatchToken)); |
| 136 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, leftBrace, (LeftBraceToken)); | 137 DEFINE_TOKEN(dashMatch, (DashMatchToken)); |
| 137 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, rightBrace, (RightBraceToken)); | 138 DEFINE_TOKEN(prefixMatch, (PrefixMatchToken)); |
| 138 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, badString, (BadStringToken)); | 139 DEFINE_TOKEN(suffixMatch, (SuffixMatchToken)); |
| 139 DEFINE_STATIC_LOCAL_NOASSERT(CSSParserToken, badUrl, (BadUrlToken)); | 140 DEFINE_TOKEN(substringMatch, (SubstringMatchToken)); |
| 141 DEFINE_TOKEN(column, (ColumnToken)); |
| 142 DEFINE_TOKEN(cdo, (CDOToken)); |
| 143 DEFINE_TOKEN(cdc, (CDCToken)); |
| 144 DEFINE_TOKEN(leftParenthesis, (LeftParenthesisToken)); |
| 145 DEFINE_TOKEN(rightParenthesis, (RightParenthesisToken)); |
| 146 DEFINE_TOKEN(leftBracket, (LeftBracketToken)); |
| 147 DEFINE_TOKEN(rightBracket, (RightBracketToken)); |
| 148 DEFINE_TOKEN(leftBrace, (LeftBraceToken)); |
| 149 DEFINE_TOKEN(rightBrace, (RightBraceToken)); |
| 150 DEFINE_TOKEN(badString, (BadStringToken)); |
| 151 DEFINE_TOKEN(badUrl, (BadUrlToken)); |
| 140 | 152 |
| 141 String fromUChar32(UChar32 c) | 153 String fromUChar32(UChar32 c) |
| 142 { | 154 { |
| 143 StringBuilder input; | 155 StringBuilder input; |
| 144 input.append(c); | 156 input.append(c); |
| 145 return input.toString(); | 157 return input.toString(); |
| 146 } | 158 } |
| 147 | 159 |
| 148 TEST(CSSTokenizerTest, SingleCharacterTokens) | 160 TEST(CSSTokenizerTest, SingleCharacterTokens) |
| 149 { | 161 { |
| 150 TEST_TOKENS("(", leftParenthesis); | 162 TEST_TOKENS("(", leftParenthesis()); |
| 151 TEST_TOKENS(")", rightParenthesis); | 163 TEST_TOKENS(")", rightParenthesis()); |
| 152 TEST_TOKENS("[", leftBracket); | 164 TEST_TOKENS("[", leftBracket()); |
| 153 TEST_TOKENS("]", rightBracket); | 165 TEST_TOKENS("]", rightBracket()); |
| 154 TEST_TOKENS(",", comma); | 166 TEST_TOKENS(",", comma()); |
| 155 TEST_TOKENS(":", colon); | 167 TEST_TOKENS(":", colon()); |
| 156 TEST_TOKENS(";", semicolon); | 168 TEST_TOKENS(";", semicolon()); |
| 157 TEST_TOKENS(")[", rightParenthesis, leftBracket); | 169 TEST_TOKENS(")[", rightParenthesis(), leftBracket()); |
| 158 TEST_TOKENS("[)", leftBracket, rightParenthesis); | 170 TEST_TOKENS("[)", leftBracket(), rightParenthesis()); |
| 159 TEST_TOKENS("{}", leftBrace, rightBrace); | 171 TEST_TOKENS("{}", leftBrace(), rightBrace()); |
| 160 TEST_TOKENS(",,", comma, comma); | 172 TEST_TOKENS(",,", comma(), comma()); |
| 161 } | 173 } |
| 162 | 174 |
| 163 TEST(CSSTokenizerTest, MultipleCharacterTokens) | 175 TEST(CSSTokenizerTest, MultipleCharacterTokens) |
| 164 { | 176 { |
| 165 TEST_TOKENS("~=", includeMatch); | 177 TEST_TOKENS("~=", includeMatch()); |
| 166 TEST_TOKENS("|=", dashMatch); | 178 TEST_TOKENS("|=", dashMatch()); |
| 167 TEST_TOKENS("^=", prefixMatch); | 179 TEST_TOKENS("^=", prefixMatch()); |
| 168 TEST_TOKENS("$=", suffixMatch); | 180 TEST_TOKENS("$=", suffixMatch()); |
| 169 TEST_TOKENS("*=", substringMatch); | 181 TEST_TOKENS("*=", substringMatch()); |
| 170 TEST_TOKENS("||", column); | 182 TEST_TOKENS("||", column()); |
| 171 TEST_TOKENS("|||", column, delim('|')); | 183 TEST_TOKENS("|||", column(), delim('|')); |
| 172 TEST_TOKENS("<!--", cdo); | 184 TEST_TOKENS("<!--", cdo()); |
| 173 TEST_TOKENS("<!---", cdo, delim('-')); | 185 TEST_TOKENS("<!---", cdo(), delim('-')); |
| 174 TEST_TOKENS("-->", cdc); | 186 TEST_TOKENS("-->", cdc()); |
| 175 } | 187 } |
| 176 | 188 |
| 177 TEST(CSSTokenizerTest, DelimiterToken) | 189 TEST(CSSTokenizerTest, DelimiterToken) |
| 178 { | 190 { |
| 179 TEST_TOKENS("^", delim('^')); | 191 TEST_TOKENS("^", delim('^')); |
| 180 TEST_TOKENS("*", delim('*')); | 192 TEST_TOKENS("*", delim('*')); |
| 181 TEST_TOKENS("%", delim('%')); | 193 TEST_TOKENS("%", delim('%')); |
| 182 TEST_TOKENS("~", delim('~')); | 194 TEST_TOKENS("~", delim('~')); |
| 183 TEST_TOKENS("&", delim('&')); | 195 TEST_TOKENS("&", delim('&')); |
| 184 TEST_TOKENS("|", delim('|')); | 196 TEST_TOKENS("|", delim('|')); |
| 185 TEST_TOKENS("\x7f", delim('\x7f')); | 197 TEST_TOKENS("\x7f", delim('\x7f')); |
| 186 TEST_TOKENS("\1", delim('\x1')); | 198 TEST_TOKENS("\1", delim('\x1')); |
| 187 TEST_TOKENS("~-", delim('~'), delim('-')); | 199 TEST_TOKENS("~-", delim('~'), delim('-')); |
| 188 TEST_TOKENS("^|", delim('^'), delim('|')); | 200 TEST_TOKENS("^|", delim('^'), delim('|')); |
| 189 TEST_TOKENS("$~", delim('$'), delim('~')); | 201 TEST_TOKENS("$~", delim('$'), delim('~')); |
| 190 TEST_TOKENS("*^", delim('*'), delim('^')); | 202 TEST_TOKENS("*^", delim('*'), delim('^')); |
| 191 } | 203 } |
| 192 | 204 |
| 193 TEST(CSSTokenizerTest, WhitespaceTokens) | 205 TEST(CSSTokenizerTest, WhitespaceTokens) |
| 194 { | 206 { |
| 195 TEST_TOKENS(" ", whitespace); | 207 TEST_TOKENS(" ", whitespace()); |
| 196 TEST_TOKENS("\n\rS", whitespace, ident("S")); | 208 TEST_TOKENS("\n\rS", whitespace(), ident("S")); |
| 197 TEST_TOKENS(" *", whitespace, delim('*')); | 209 TEST_TOKENS(" *", whitespace(), delim('*')); |
| 198 TEST_TOKENS("\r\n\f\t2", whitespace, number(IntegerValueType, 2, NoSign)); | 210 TEST_TOKENS("\r\n\f\t2", whitespace(), number(IntegerValueType, 2, NoSign)); |
| 199 } | 211 } |
| 200 | 212 |
| 201 TEST(CSSTokenizerTest, Escapes) | 213 TEST(CSSTokenizerTest, Escapes) |
| 202 { | 214 { |
| 203 TEST_TOKENS("hel\\6Co", ident("hello")); | 215 TEST_TOKENS("hel\\6Co", ident("hello")); |
| 204 TEST_TOKENS("\\26 B", ident("&B")); | 216 TEST_TOKENS("\\26 B", ident("&B")); |
| 205 TEST_TOKENS("'hel\\6c o'", string("hello")); | 217 TEST_TOKENS("'hel\\6c o'", string("hello")); |
| 206 TEST_TOKENS("'spac\\65\r\ns'", string("spaces")); | 218 TEST_TOKENS("'spac\\65\r\ns'", string("spaces")); |
| 207 TEST_TOKENS("spac\\65\r\ns", ident("spaces")); | 219 TEST_TOKENS("spac\\65\r\ns", ident("spaces")); |
| 208 TEST_TOKENS("spac\\65\n\rs", ident("space"), whitespace, ident("s")); | 220 TEST_TOKENS("spac\\65\n\rs", ident("space"), whitespace(), ident("s")); |
| 209 TEST_TOKENS("sp\\61\tc\\65\fs", ident("spaces")); | 221 TEST_TOKENS("sp\\61\tc\\65\fs", ident("spaces")); |
| 210 TEST_TOKENS("hel\\6c o", ident("hell"), whitespace, ident("o")); | 222 TEST_TOKENS("hel\\6c o", ident("hell"), whitespace(), ident("o")); |
| 211 TEST_TOKENS("test\\\n", ident("test"), delim('\\'), whitespace); | 223 TEST_TOKENS("test\\\n", ident("test"), delim('\\'), whitespace()); |
| 212 TEST_TOKENS("test\\D799", ident("test" + fromUChar32(0xD799))); | 224 TEST_TOKENS("test\\D799", ident("test" + fromUChar32(0xD799))); |
| 213 TEST_TOKENS("\\E000", ident(fromUChar32(0xE000))); | 225 TEST_TOKENS("\\E000", ident(fromUChar32(0xE000))); |
| 214 TEST_TOKENS("te\\s\\t", ident("test")); | 226 TEST_TOKENS("te\\s\\t", ident("test")); |
| 215 TEST_TOKENS("spaces\\ in\\\tident", ident("spaces in\tident")); | 227 TEST_TOKENS("spaces\\ in\\\tident", ident("spaces in\tident")); |
| 216 TEST_TOKENS("\\.\\,\\:\\!", ident(".,:!")); | 228 TEST_TOKENS("\\.\\,\\:\\!", ident(".,:!")); |
| 217 TEST_TOKENS("\\\r", delim('\\'), whitespace); | 229 TEST_TOKENS("\\\r", delim('\\'), whitespace()); |
| 218 TEST_TOKENS("\\\f", delim('\\'), whitespace); | 230 TEST_TOKENS("\\\f", delim('\\'), whitespace()); |
| 219 TEST_TOKENS("\\\r\n", delim('\\'), whitespace); | 231 TEST_TOKENS("\\\r\n", delim('\\'), whitespace()); |
| 220 String replacement = fromUChar32(0xFFFD); | 232 String replacement = fromUChar32(0xFFFD); |
| 221 TEST_TOKENS(String("null\\\0", 6), ident("null" + replacement)); | 233 TEST_TOKENS(String("null\\\0", 6), ident("null" + replacement)); |
| 222 TEST_TOKENS(String("null\\\0\0", 7), ident("null" + replacement + replacemen
t)); | 234 TEST_TOKENS(String("null\\\0\0", 7), ident("null" + replacement + replacemen
t)); |
| 223 TEST_TOKENS("null\\0", ident("null" + replacement)); | 235 TEST_TOKENS("null\\0", ident("null" + replacement)); |
| 224 TEST_TOKENS("null\\0000", ident("null" + replacement)); | 236 TEST_TOKENS("null\\0000", ident("null" + replacement)); |
| 225 TEST_TOKENS("large\\110000", ident("large" + replacement)); | 237 TEST_TOKENS("large\\110000", ident("large" + replacement)); |
| 226 TEST_TOKENS("large\\23456a", ident("large" + replacement)); | 238 TEST_TOKENS("large\\23456a", ident("large" + replacement)); |
| 227 TEST_TOKENS("surrogate\\D800", ident("surrogate" + replacement)); | 239 TEST_TOKENS("surrogate\\D800", ident("surrogate" + replacement)); |
| 228 TEST_TOKENS("surrogate\\0DABC", ident("surrogate" + replacement)); | 240 TEST_TOKENS("surrogate\\0DABC", ident("surrogate" + replacement)); |
| 229 TEST_TOKENS("\\00DFFFsurrogate", ident(replacement + "surrogate")); | 241 TEST_TOKENS("\\00DFFFsurrogate", ident(replacement + "surrogate")); |
| 230 TEST_TOKENS("\\10fFfF", ident(fromUChar32(0x10ffff))); | 242 TEST_TOKENS("\\10fFfF", ident(fromUChar32(0x10ffff))); |
| 231 TEST_TOKENS("\\10fFfF0", ident(fromUChar32(0x10ffff) + "0")); | 243 TEST_TOKENS("\\10fFfF0", ident(fromUChar32(0x10ffff) + "0")); |
| 232 TEST_TOKENS("\\10000000", ident(fromUChar32(0x100000) + "00")); | 244 TEST_TOKENS("\\10000000", ident(fromUChar32(0x100000) + "00")); |
| 233 TEST_TOKENS("eof\\", ident("eof" + replacement)); | 245 TEST_TOKENS("eof\\", ident("eof" + replacement)); |
| 234 } | 246 } |
| 235 | 247 |
| 236 TEST(CSSTokenizerTest, IdentToken) | 248 TEST(CSSTokenizerTest, IdentToken) |
| 237 { | 249 { |
| 238 TEST_TOKENS("simple-ident", ident("simple-ident")); | 250 TEST_TOKENS("simple-ident", ident("simple-ident")); |
| 239 TEST_TOKENS("testing123", ident("testing123")); | 251 TEST_TOKENS("testing123", ident("testing123")); |
| 240 TEST_TOKENS("hello!", ident("hello"), delim('!')); | 252 TEST_TOKENS("hello!", ident("hello"), delim('!')); |
| 241 TEST_TOKENS("world\5", ident("world"), delim('\5')); | 253 TEST_TOKENS("world\5", ident("world"), delim('\5')); |
| 242 TEST_TOKENS("_under score", ident("_under"), whitespace, ident("score")); | 254 TEST_TOKENS("_under score", ident("_under"), whitespace(), ident("score")); |
| 243 TEST_TOKENS("-_underscore", ident("-_underscore")); | 255 TEST_TOKENS("-_underscore", ident("-_underscore")); |
| 244 TEST_TOKENS("-text", ident("-text")); | 256 TEST_TOKENS("-text", ident("-text")); |
| 245 TEST_TOKENS("-\\6d", ident("-m")); | 257 TEST_TOKENS("-\\6d", ident("-m")); |
| 246 TEST_TOKENS("--abc", ident("--abc")); | 258 TEST_TOKENS("--abc", ident("--abc")); |
| 247 TEST_TOKENS("--", ident("--")); | 259 TEST_TOKENS("--", ident("--")); |
| 248 TEST_TOKENS("--11", ident("--11")); | 260 TEST_TOKENS("--11", ident("--11")); |
| 249 TEST_TOKENS("---", ident("---")); | 261 TEST_TOKENS("---", ident("---")); |
| 250 TEST_TOKENS(fromUChar32(0x2003), ident(fromUChar32(0x2003))); // em-space | 262 TEST_TOKENS(fromUChar32(0x2003), ident(fromUChar32(0x2003))); // em-space |
| 251 TEST_TOKENS(fromUChar32(0xA0), ident(fromUChar32(0xA0))); // non-breaking sp
ace | 263 TEST_TOKENS(fromUChar32(0xA0), ident(fromUChar32(0xA0))); // non-breaking sp
ace |
| 252 TEST_TOKENS(fromUChar32(0x1234), ident(fromUChar32(0x1234))); | 264 TEST_TOKENS(fromUChar32(0x1234), ident(fromUChar32(0x1234))); |
| 253 TEST_TOKENS(fromUChar32(0x12345), ident(fromUChar32(0x12345))); | 265 TEST_TOKENS(fromUChar32(0x12345), ident(fromUChar32(0x12345))); |
| 254 TEST_TOKENS(String("\0", 1), ident(fromUChar32(0xFFFD))); | 266 TEST_TOKENS(String("\0", 1), ident(fromUChar32(0xFFFD))); |
| 255 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); | 267 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); |
| 256 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); | 268 TEST_TOKENS(String("ab\0c", 4), ident("ab" + fromUChar32(0xFFFD) + "c")); |
| 257 } | 269 } |
| 258 | 270 |
| 259 TEST(CSSTokenizerTest, FunctionToken) | 271 TEST(CSSTokenizerTest, FunctionToken) |
| 260 { | 272 { |
| 261 TEST_TOKENS("scale(2)", function("scale"), number(IntegerValueType, 2, NoSig
n), rightParenthesis); | 273 TEST_TOKENS("scale(2)", function("scale"), number(IntegerValueType, 2, NoSig
n), rightParenthesis()); |
| 262 TEST_TOKENS("foo-bar\\ baz(", function("foo-bar baz")); | 274 TEST_TOKENS("foo-bar\\ baz(", function("foo-bar baz")); |
| 263 TEST_TOKENS("fun\\(ction(", function("fun(ction")); | 275 TEST_TOKENS("fun\\(ction(", function("fun(ction")); |
| 264 TEST_TOKENS("-foo(", function("-foo")); | 276 TEST_TOKENS("-foo(", function("-foo")); |
| 265 TEST_TOKENS("url(\"foo.gif\"", function("url"), string("foo.gif")); | 277 TEST_TOKENS("url(\"foo.gif\"", function("url"), string("foo.gif")); |
| 266 TEST_TOKENS("foo( \'bar.gif\'", function("foo"), whitespace, string("bar.gi
f")); | 278 TEST_TOKENS("foo( \'bar.gif\'", function("foo"), whitespace(), string("bar.
gif")); |
| 267 // To simplify implementation we drop the whitespace in function(url),whites
pace,string() | 279 // To simplify implementation we drop the whitespace in function(url),whites
pace,string() |
| 268 TEST_TOKENS("url( \'bar.gif\'", function("url"), string("bar.gif")); | 280 TEST_TOKENS("url( \'bar.gif\'", function("url"), string("bar.gif")); |
| 269 } | 281 } |
| 270 | 282 |
| 271 TEST(CSSTokenizerTest, AtKeywordToken) | 283 TEST(CSSTokenizerTest, AtKeywordToken) |
| 272 { | 284 { |
| 273 TEST_TOKENS("@at-keyword", atKeyword("at-keyword")); | 285 TEST_TOKENS("@at-keyword", atKeyword("at-keyword")); |
| 274 TEST_TOKENS("@testing123", atKeyword("testing123")); | 286 TEST_TOKENS("@testing123", atKeyword("testing123")); |
| 275 TEST_TOKENS("@hello!", atKeyword("hello"), delim('!')); | 287 TEST_TOKENS("@hello!", atKeyword("hello"), delim('!')); |
| 276 TEST_TOKENS("@-text", atKeyword("-text")); | 288 TEST_TOKENS("@-text", atKeyword("-text")); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 290 TEST_TOKENS("url(foo.gif)", url("foo.gif")); | 302 TEST_TOKENS("url(foo.gif)", url("foo.gif")); |
| 291 TEST_TOKENS("urL(https://example.com/cats.png)", url("https://example.com/ca
ts.png")); | 303 TEST_TOKENS("urL(https://example.com/cats.png)", url("https://example.com/ca
ts.png")); |
| 292 TEST_TOKENS("uRl(what-a.crazy^URL~this\\ is!)", url("what-a.crazy^URL~this i
s!")); | 304 TEST_TOKENS("uRl(what-a.crazy^URL~this\\ is!)", url("what-a.crazy^URL~this i
s!")); |
| 293 TEST_TOKENS("uRL(123#test)", url("123#test")); | 305 TEST_TOKENS("uRL(123#test)", url("123#test")); |
| 294 TEST_TOKENS("Url(escapes\\ \\\"\\'\\)\\()", url("escapes \"')(")); | 306 TEST_TOKENS("Url(escapes\\ \\\"\\'\\)\\()", url("escapes \"')(")); |
| 295 TEST_TOKENS("UrL( whitespace )", url("whitespace")); | 307 TEST_TOKENS("UrL( whitespace )", url("whitespace")); |
| 296 TEST_TOKENS("URl( whitespace-eof ", url("whitespace-eof")); | 308 TEST_TOKENS("URl( whitespace-eof ", url("whitespace-eof")); |
| 297 TEST_TOKENS("URL(eof", url("eof")); | 309 TEST_TOKENS("URL(eof", url("eof")); |
| 298 TEST_TOKENS("url(not/*a*/comment)", url("not/*a*/comment")); | 310 TEST_TOKENS("url(not/*a*/comment)", url("not/*a*/comment")); |
| 299 TEST_TOKENS("urL()", url("")); | 311 TEST_TOKENS("urL()", url("")); |
| 300 TEST_TOKENS("uRl(white space),", badUrl, comma); | 312 TEST_TOKENS("uRl(white space),", badUrl(), comma()); |
| 301 TEST_TOKENS("Url(b(ad),", badUrl, comma); | 313 TEST_TOKENS("Url(b(ad),", badUrl(), comma()); |
| 302 TEST_TOKENS("uRl(ba'd):", badUrl, colon); | 314 TEST_TOKENS("uRl(ba'd):", badUrl(), colon()); |
| 303 TEST_TOKENS("urL(b\"ad):", badUrl, colon); | 315 TEST_TOKENS("urL(b\"ad):", badUrl(), colon()); |
| 304 TEST_TOKENS("uRl(b\"ad):", badUrl, colon); | 316 TEST_TOKENS("uRl(b\"ad):", badUrl(), colon()); |
| 305 TEST_TOKENS("Url(b\\\rad):", badUrl, colon); | 317 TEST_TOKENS("Url(b\\\rad):", badUrl(), colon()); |
| 306 TEST_TOKENS("url(b\\\nad):", badUrl, colon); | 318 TEST_TOKENS("url(b\\\nad):", badUrl(), colon()); |
| 307 TEST_TOKENS("url(/*'bad')*/", badUrl, delim('*'), delim('/')); | 319 TEST_TOKENS("url(/*'bad')*/", badUrl(), delim('*'), delim('/')); |
| 308 TEST_TOKENS("url(ba'd\\\\))", badUrl, rightParenthesis); | 320 TEST_TOKENS("url(ba'd\\\\))", badUrl(), rightParenthesis()); |
| 309 } | 321 } |
| 310 | 322 |
| 311 TEST(CSSTokenizerTest, StringToken) | 323 TEST(CSSTokenizerTest, StringToken) |
| 312 { | 324 { |
| 313 TEST_TOKENS("'text'", string("text")); | 325 TEST_TOKENS("'text'", string("text")); |
| 314 TEST_TOKENS("\"text\"", string("text")); | 326 TEST_TOKENS("\"text\"", string("text")); |
| 315 TEST_TOKENS("'testing, 123!'", string("testing, 123!")); | 327 TEST_TOKENS("'testing, 123!'", string("testing, 123!")); |
| 316 TEST_TOKENS("'es\\'ca\\\"pe'", string("es'ca\"pe")); | 328 TEST_TOKENS("'es\\'ca\\\"pe'", string("es'ca\"pe")); |
| 317 TEST_TOKENS("'\"quotes\"'", string("\"quotes\"")); | 329 TEST_TOKENS("'\"quotes\"'", string("\"quotes\"")); |
| 318 TEST_TOKENS("\"'quotes'\"", string("'quotes'")); | 330 TEST_TOKENS("\"'quotes'\"", string("'quotes'")); |
| 319 TEST_TOKENS("\"mismatch'", string("mismatch'")); | 331 TEST_TOKENS("\"mismatch'", string("mismatch'")); |
| 320 TEST_TOKENS("'text\5\t\13'", string("text\5\t\13")); | 332 TEST_TOKENS("'text\5\t\13'", string("text\5\t\13")); |
| 321 TEST_TOKENS("\"end on eof", string("end on eof")); | 333 TEST_TOKENS("\"end on eof", string("end on eof")); |
| 322 TEST_TOKENS("'esca\\\nped'", string("escaped")); | 334 TEST_TOKENS("'esca\\\nped'", string("escaped")); |
| 323 TEST_TOKENS("\"esc\\\faped\"", string("escaped")); | 335 TEST_TOKENS("\"esc\\\faped\"", string("escaped")); |
| 324 TEST_TOKENS("'new\\\rline'", string("newline")); | 336 TEST_TOKENS("'new\\\rline'", string("newline")); |
| 325 TEST_TOKENS("\"new\\\r\nline\"", string("newline")); | 337 TEST_TOKENS("\"new\\\r\nline\"", string("newline")); |
| 326 TEST_TOKENS("'bad\nstring", badString, whitespace, ident("string")); | 338 TEST_TOKENS("'bad\nstring", badString(), whitespace(), ident("string")); |
| 327 TEST_TOKENS("'bad\rstring", badString, whitespace, ident("string")); | 339 TEST_TOKENS("'bad\rstring", badString(), whitespace(), ident("string")); |
| 328 TEST_TOKENS("'bad\r\nstring", badString, whitespace, ident("string")); | 340 TEST_TOKENS("'bad\r\nstring", badString(), whitespace(), ident("string")); |
| 329 TEST_TOKENS("'bad\fstring", badString, whitespace, ident("string")); | 341 TEST_TOKENS("'bad\fstring", badString(), whitespace(), ident("string")); |
| 330 TEST_TOKENS(String("'\0'", 3), string(fromUChar32(0xFFFD))); | 342 TEST_TOKENS(String("'\0'", 3), string(fromUChar32(0xFFFD))); |
| 331 TEST_TOKENS(String("'hel\0lo'", 8), string("hel" + fromUChar32(0xFFFD) + "lo
")); | 343 TEST_TOKENS(String("'hel\0lo'", 8), string("hel" + fromUChar32(0xFFFD) + "lo
")); |
| 332 TEST_TOKENS(String("'h\\65l\0lo'", 10), string("hel" + fromUChar32(0xFFFD) +
"lo")); | 344 TEST_TOKENS(String("'h\\65l\0lo'", 10), string("hel" + fromUChar32(0xFFFD) +
"lo")); |
| 333 } | 345 } |
| 334 | 346 |
| 335 TEST(CSSTokenizerTest, HashToken) | 347 TEST(CSSTokenizerTest, HashToken) |
| 336 { | 348 { |
| 337 TEST_TOKENS("#id-selector", hash("id-selector", HashTokenId)); | 349 TEST_TOKENS("#id-selector", hash("id-selector", HashTokenId)); |
| 338 TEST_TOKENS("#FF7700", hash("FF7700", HashTokenId)); | 350 TEST_TOKENS("#FF7700", hash("FF7700", HashTokenId)); |
| 339 TEST_TOKENS("#3377FF", hash("3377FF", HashTokenUnrestricted)); | 351 TEST_TOKENS("#3377FF", hash("3377FF", HashTokenUnrestricted)); |
| 340 TEST_TOKENS("#\\ ", hash(" ", HashTokenId)); | 352 TEST_TOKENS("#\\ ", hash(" ", HashTokenId)); |
| 341 TEST_TOKENS("# ", delim('#'), whitespace); | 353 TEST_TOKENS("# ", delim('#'), whitespace()); |
| 342 TEST_TOKENS("#\\\n", delim('#'), delim('\\'), whitespace); | 354 TEST_TOKENS("#\\\n", delim('#'), delim('\\'), whitespace()); |
| 343 TEST_TOKENS("#\\\r\n", delim('#'), delim('\\'), whitespace); | 355 TEST_TOKENS("#\\\r\n", delim('#'), delim('\\'), whitespace()); |
| 344 TEST_TOKENS("#!", delim('#'), delim('!')); | 356 TEST_TOKENS("#!", delim('#'), delim('!')); |
| 345 } | 357 } |
| 346 | 358 |
| 347 TEST(CSSTokenizerTest, NumberToken) | 359 TEST(CSSTokenizerTest, NumberToken) |
| 348 { | 360 { |
| 349 TEST_TOKENS("10", number(IntegerValueType, 10, NoSign)); | 361 TEST_TOKENS("10", number(IntegerValueType, 10, NoSign)); |
| 350 TEST_TOKENS("12.0", number(NumberValueType, 12, NoSign)); | 362 TEST_TOKENS("12.0", number(NumberValueType, 12, NoSign)); |
| 351 TEST_TOKENS("+45.6", number(NumberValueType, 45.6, PlusSign)); | 363 TEST_TOKENS("+45.6", number(NumberValueType, 45.6, PlusSign)); |
| 352 TEST_TOKENS("-7", number(IntegerValueType, -7, MinusSign)); | 364 TEST_TOKENS("-7", number(IntegerValueType, -7, MinusSign)); |
| 353 TEST_TOKENS("010", number(IntegerValueType, 10, NoSign)); | 365 TEST_TOKENS("010", number(IntegerValueType, 10, NoSign)); |
| 354 TEST_TOKENS("10e0", number(NumberValueType, 10, NoSign)); | 366 TEST_TOKENS("10e0", number(NumberValueType, 10, NoSign)); |
| 355 TEST_TOKENS("12e3", number(NumberValueType, 12000, NoSign)); | 367 TEST_TOKENS("12e3", number(NumberValueType, 12000, NoSign)); |
| 356 TEST_TOKENS("3e+1", number(NumberValueType, 30, NoSign)); | 368 TEST_TOKENS("3e+1", number(NumberValueType, 30, NoSign)); |
| 357 TEST_TOKENS("12E-1", number(NumberValueType, 1.2, NoSign)); | 369 TEST_TOKENS("12E-1", number(NumberValueType, 1.2, NoSign)); |
| 358 TEST_TOKENS(".7", number(NumberValueType, 0.7, NoSign)); | 370 TEST_TOKENS(".7", number(NumberValueType, 0.7, NoSign)); |
| 359 TEST_TOKENS("-.3", number(NumberValueType, -0.3, MinusSign)); | 371 TEST_TOKENS("-.3", number(NumberValueType, -0.3, MinusSign)); |
| 360 TEST_TOKENS("+637.54e-2", number(NumberValueType, 6.3754, PlusSign)); | 372 TEST_TOKENS("+637.54e-2", number(NumberValueType, 6.3754, PlusSign)); |
| 361 TEST_TOKENS("-12.34E+2", number(NumberValueType, -1234, MinusSign)); | 373 TEST_TOKENS("-12.34E+2", number(NumberValueType, -1234, MinusSign)); |
| 362 | 374 |
| 363 TEST_TOKENS("+ 5", delim('+'), whitespace, number(IntegerValueType, 5, NoSig
n)); | 375 TEST_TOKENS("+ 5", delim('+'), whitespace(), number(IntegerValueType, 5, NoS
ign)); |
| 364 TEST_TOKENS("-+12", delim('-'), number(IntegerValueType, 12, PlusSign)); | 376 TEST_TOKENS("-+12", delim('-'), number(IntegerValueType, 12, PlusSign)); |
| 365 TEST_TOKENS("+-21", delim('+'), number(IntegerValueType, -21, MinusSign)); | 377 TEST_TOKENS("+-21", delim('+'), number(IntegerValueType, -21, MinusSign)); |
| 366 TEST_TOKENS("++22", delim('+'), number(IntegerValueType, 22, PlusSign)); | 378 TEST_TOKENS("++22", delim('+'), number(IntegerValueType, 22, PlusSign)); |
| 367 TEST_TOKENS("13.", number(IntegerValueType, 13, NoSign), delim('.')); | 379 TEST_TOKENS("13.", number(IntegerValueType, 13, NoSign), delim('.')); |
| 368 TEST_TOKENS("1.e2", number(IntegerValueType, 1, NoSign), delim('.'), ident("
e2")); | 380 TEST_TOKENS("1.e2", number(IntegerValueType, 1, NoSign), delim('.'), ident("
e2")); |
| 369 TEST_TOKENS("2e3.5", number(NumberValueType, 2000, NoSign), number(NumberVal
ueType, 0.5, NoSign)); | 381 TEST_TOKENS("2e3.5", number(NumberValueType, 2000, NoSign), number(NumberVal
ueType, 0.5, NoSign)); |
| 370 TEST_TOKENS("2e3.", number(NumberValueType, 2000, NoSign), delim('.')); | 382 TEST_TOKENS("2e3.", number(NumberValueType, 2000, NoSign), delim('.')); |
| 371 TEST_TOKENS("1000000000000000000000000", number(IntegerValueType, 1e24, NoSi
gn)); | 383 TEST_TOKENS("1000000000000000000000000", number(IntegerValueType, 1e24, NoSi
gn)); |
| 372 } | 384 } |
| 373 | 385 |
| 374 TEST(CSSTokenizerTest, DimensionToken) | 386 TEST(CSSTokenizerTest, DimensionToken) |
| 375 { | 387 { |
| 376 TEST_TOKENS("10px", dimension(IntegerValueType, 10, "px")); | 388 TEST_TOKENS("10px", dimension(IntegerValueType, 10, "px")); |
| 377 TEST_TOKENS("12.0em", dimension(NumberValueType, 12, "em")); | 389 TEST_TOKENS("12.0em", dimension(NumberValueType, 12, "em")); |
| 378 TEST_TOKENS("-12.0em", dimension(NumberValueType, -12, "em")); | 390 TEST_TOKENS("-12.0em", dimension(NumberValueType, -12, "em")); |
| 379 TEST_TOKENS("+45.6__qem", dimension(NumberValueType, 45.6, "__qem")); | 391 TEST_TOKENS("+45.6__qem", dimension(NumberValueType, 45.6, "__qem")); |
| 380 TEST_TOKENS("5e", dimension(IntegerValueType, 5, "e")); | 392 TEST_TOKENS("5e", dimension(IntegerValueType, 5, "e")); |
| 381 TEST_TOKENS("5px-2px", dimension(IntegerValueType, 5, "px-2px")); | 393 TEST_TOKENS("5px-2px", dimension(IntegerValueType, 5, "px-2px")); |
| 382 TEST_TOKENS("5e-", dimension(IntegerValueType, 5, "e-")); | 394 TEST_TOKENS("5e-", dimension(IntegerValueType, 5, "e-")); |
| 383 TEST_TOKENS("5\\ ", dimension(IntegerValueType, 5, " ")); | 395 TEST_TOKENS("5\\ ", dimension(IntegerValueType, 5, " ")); |
| 384 TEST_TOKENS("40\\70\\78", dimension(IntegerValueType, 40, "px")); | 396 TEST_TOKENS("40\\70\\78", dimension(IntegerValueType, 40, "px")); |
| 385 TEST_TOKENS("4e3e2", dimension(NumberValueType, 4000, "e2")); | 397 TEST_TOKENS("4e3e2", dimension(NumberValueType, 4000, "e2")); |
| 386 TEST_TOKENS("0x10px", dimension(IntegerValueType, 0, "x10px")); | 398 TEST_TOKENS("0x10px", dimension(IntegerValueType, 0, "x10px")); |
| 387 TEST_TOKENS("4unit ", dimension(IntegerValueType, 4, "unit"), whitespace); | 399 TEST_TOKENS("4unit ", dimension(IntegerValueType, 4, "unit"), whitespace()); |
| 388 TEST_TOKENS("5e+", dimension(IntegerValueType, 5, "e"), delim('+')); | 400 TEST_TOKENS("5e+", dimension(IntegerValueType, 5, "e"), delim('+')); |
| 389 TEST_TOKENS("2e.5", dimension(IntegerValueType, 2, "e"), number(NumberValueT
ype, 0.5, NoSign)); | 401 TEST_TOKENS("2e.5", dimension(IntegerValueType, 2, "e"), number(NumberValueT
ype, 0.5, NoSign)); |
| 390 TEST_TOKENS("2e+.5", dimension(IntegerValueType, 2, "e"), number(NumberValue
Type, 0.5, PlusSign)); | 402 TEST_TOKENS("2e+.5", dimension(IntegerValueType, 2, "e"), number(NumberValue
Type, 0.5, PlusSign)); |
| 391 } | 403 } |
| 392 | 404 |
| 393 TEST(CSSTokenizerTest, PercentageToken) | 405 TEST(CSSTokenizerTest, PercentageToken) |
| 394 { | 406 { |
| 395 TEST_TOKENS("10%", percentage(IntegerValueType, 10)); | 407 TEST_TOKENS("10%", percentage(IntegerValueType, 10)); |
| 396 TEST_TOKENS("+12.0%", percentage(NumberValueType, 12)); | 408 TEST_TOKENS("+12.0%", percentage(NumberValueType, 12)); |
| 397 TEST_TOKENS("-48.99%", percentage(NumberValueType, -48.99)); | 409 TEST_TOKENS("-48.99%", percentage(NumberValueType, -48.99)); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 420 TEST_TOKENS("u+1??4", unicodeRange(0x100, 0x1ff), number(IntegerValueType, 4
, NoSign)); | 432 TEST_TOKENS("u+1??4", unicodeRange(0x100, 0x1ff), number(IntegerValueType, 4
, NoSign)); |
| 421 TEST_TOKENS("u+z", ident("u"), delim('+'), ident("z")); | 433 TEST_TOKENS("u+z", ident("u"), delim('+'), ident("z")); |
| 422 TEST_TOKENS("u+", ident("u"), delim('+')); | 434 TEST_TOKENS("u+", ident("u"), delim('+')); |
| 423 TEST_TOKENS("u+-543", ident("u"), delim('+'), number(IntegerValueType, -543,
MinusSign)); | 435 TEST_TOKENS("u+-543", ident("u"), delim('+'), number(IntegerValueType, -543,
MinusSign)); |
| 424 } | 436 } |
| 425 | 437 |
| 426 TEST(CSSTokenizerTest, CommentToken) | 438 TEST(CSSTokenizerTest, CommentToken) |
| 427 { | 439 { |
| 428 TEST_TOKENS("/*comment*/a", ident("a")); | 440 TEST_TOKENS("/*comment*/a", ident("a")); |
| 429 TEST_TOKENS("/**\\2f**//", delim('/')); | 441 TEST_TOKENS("/**\\2f**//", delim('/')); |
| 430 TEST_TOKENS("/**y*a*y**/ ", whitespace); | 442 TEST_TOKENS("/**y*a*y**/ ", whitespace()); |
| 431 TEST_TOKENS(",/* \n :) \n */)", comma, rightParenthesis); | 443 TEST_TOKENS(",/* \n :) \n */)", comma(), rightParenthesis()); |
| 432 TEST_TOKENS(":/*/*/", colon); | 444 TEST_TOKENS(":/*/*/", colon()); |
| 433 TEST_TOKENS("/**/*", delim('*')); | 445 TEST_TOKENS("/**/*", delim('*')); |
| 434 TEST_TOKENS(";/******", semicolon); | 446 TEST_TOKENS(";/******", semicolon()); |
| 435 } | 447 } |
| 436 | 448 |
| 437 typedef struct { | 449 typedef struct { |
| 438 const char* input; | 450 const char* input; |
| 439 const unsigned maxLevel; | 451 const unsigned maxLevel; |
| 440 const unsigned finalLevel; | 452 const unsigned finalLevel; |
| 441 } BlockTestCase; | 453 } BlockTestCase; |
| 442 | 454 |
| 443 TEST(CSSTokenizerBlockTest, Basic) | 455 TEST(CSSTokenizerBlockTest, Basic) |
| 444 { | 456 { |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 482 blockWatcher.handleToken(range.consume()); | 494 blockWatcher.handleToken(range.consume()); |
| 483 level = blockWatcher.blockLevel(); | 495 level = blockWatcher.blockLevel(); |
| 484 maxLevel = std::max(level, maxLevel); | 496 maxLevel = std::max(level, maxLevel); |
| 485 } | 497 } |
| 486 ASSERT_EQ(testCases[i].maxLevel, maxLevel); | 498 ASSERT_EQ(testCases[i].maxLevel, maxLevel); |
| 487 ASSERT_EQ(testCases[i].finalLevel, level); | 499 ASSERT_EQ(testCases[i].finalLevel, level); |
| 488 } | 500 } |
| 489 } | 501 } |
| 490 | 502 |
| 491 } // namespace | 503 } // namespace |
| OLD | NEW |