| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 library csslib.parser; | 5 library csslib.parser; |
| 6 | 6 |
| 7 import 'dart:math' as math; | 7 import 'dart:math' as math; |
| 8 | 8 |
| 9 import 'package:source_span/source_span.dart'; | 9 import 'package:source_span/source_span.dart'; |
| 10 | 10 |
| 11 import "visitor.dart"; | 11 import 'visitor.dart'; |
| 12 import 'src/messages.dart'; | 12 import 'src/messages.dart'; |
| 13 import 'src/options.dart'; | 13 import 'src/options.dart'; |
| 14 | 14 |
| 15 export 'src/messages.dart' show Message; |
| 15 export 'src/options.dart'; | 16 export 'src/options.dart'; |
| 16 | 17 |
| 17 part 'src/analyzer.dart'; | 18 part 'src/analyzer.dart'; |
| 18 part 'src/polyfill.dart'; | 19 part 'src/polyfill.dart'; |
| 19 part 'src/property.dart'; | 20 part 'src/property.dart'; |
| 20 part 'src/token.dart'; | 21 part 'src/token.dart'; |
| 21 part 'src/tokenizer_base.dart'; | 22 part 'src/tokenizer_base.dart'; |
| 22 part 'src/tokenizer.dart'; | 23 part 'src/tokenizer.dart'; |
| 23 part 'src/tokenkind.dart'; | 24 part 'src/tokenkind.dart'; |
| 24 | 25 |
| 26 enum ClauseType { |
| 27 none, |
| 28 conjunction, |
| 29 disjunction, |
| 30 } |
| 31 |
| 25 /** Used for parser lookup ahead (used for nested selectors Less support). */ | 32 /** Used for parser lookup ahead (used for nested selectors Less support). */ |
| 26 class ParserState extends TokenizerState { | 33 class ParserState extends TokenizerState { |
| 27 final Token peekToken; | 34 final Token peekToken; |
| 28 final Token previousToken; | 35 final Token previousToken; |
| 29 | 36 |
| 30 ParserState(this.peekToken, this.previousToken, Tokenizer tokenizer) | 37 ParserState(this.peekToken, this.previousToken, Tokenizer tokenizer) |
| 31 : super(tokenizer); | 38 : super(tokenizer); |
| 32 } | 39 } |
| 33 | 40 |
| 34 // TODO(jmesserly): this should not be global | 41 // TODO(jmesserly): this should not be global |
| 35 void _createMessages({List<Message> errors, PreprocessorOptions options}) { | 42 void _createMessages({List<Message> errors, PreprocessorOptions options}) { |
| 36 if (errors == null) errors = []; | 43 if (errors == null) errors = []; |
| 37 | 44 |
| 38 if (options == null) { | 45 if (options == null) { |
| 39 options = new PreprocessorOptions(useColors: false, inputFile: 'memory'); | 46 options = new PreprocessorOptions(useColors: false, inputFile: 'memory'); |
| 40 } | 47 } |
| 41 | 48 |
| 42 messages = new Messages(options: options, printHandler: errors.add); | 49 messages = new Messages(options: options, printHandler: errors.add); |
| 43 } | 50 } |
| 44 | 51 |
| 45 /** CSS checked mode enabled. */ | 52 /** CSS checked mode enabled. */ |
| 46 bool get isChecked => messages.options.checked; | 53 bool get isChecked => messages.options.checked; |
| 47 | 54 |
| 48 // TODO(terry): Remove nested name parameter. | 55 // TODO(terry): Remove nested name parameter. |
| 49 /** Parse and analyze the CSS file. */ | 56 /** Parse and analyze the CSS file. */ |
| 50 StyleSheet compile(input, {List<Message> errors, PreprocessorOptions options, | 57 StyleSheet compile(input, |
| 51 bool nested: true, bool polyfill: false, List<StyleSheet> includes: null}) { | 58 {List<Message> errors, |
| 59 PreprocessorOptions options, |
| 60 bool nested: true, |
| 61 bool polyfill: false, |
| 62 List<StyleSheet> includes: null}) { |
| 52 if (includes == null) { | 63 if (includes == null) { |
| 53 includes = []; | 64 includes = []; |
| 54 } | 65 } |
| 55 | 66 |
| 56 var source = _inputAsString(input); | 67 var source = _inputAsString(input); |
| 57 | 68 |
| 58 _createMessages(errors: errors, options: options); | 69 _createMessages(errors: errors, options: options); |
| 59 | 70 |
| 60 var file = new SourceFile(source); | 71 var file = new SourceFile(source); |
| 61 | 72 |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 108 .parseSelector(); | 119 .parseSelector(); |
| 109 } | 120 } |
| 110 | 121 |
| 111 SelectorGroup parseSelectorGroup(input, {List<Message> errors}) { | 122 SelectorGroup parseSelectorGroup(input, {List<Message> errors}) { |
| 112 var source = _inputAsString(input); | 123 var source = _inputAsString(input); |
| 113 | 124 |
| 114 _createMessages(errors: errors); | 125 _createMessages(errors: errors); |
| 115 | 126 |
| 116 var file = new SourceFile(source); | 127 var file = new SourceFile(source); |
| 117 return (new _Parser(file, source) | 128 return (new _Parser(file, source) |
| 118 // TODO(jmesserly): this fix should be applied to the parser. It's tricky | 129 // TODO(jmesserly): this fix should be applied to the parser. It's trick
y |
| 119 // because by the time the flag is set one token has already been fetched. | 130 // because by the time the flag is set one token has already been fetche
d. |
| 120 ..tokenizer.inSelector = true).processSelectorGroup(); | 131 ..tokenizer.inSelector = true) |
| 132 .processSelectorGroup(); |
| 121 } | 133 } |
| 122 | 134 |
| 123 String _inputAsString(input) { | 135 String _inputAsString(input) { |
| 124 String source; | 136 String source; |
| 125 | 137 |
| 126 if (input is String) { | 138 if (input is String) { |
| 127 source = input; | 139 source = input; |
| 128 } else if (input is List) { | 140 } else if (input is List) { |
| 129 // TODO(terry): The parse function needs an "encoding" argument and will | 141 // TODO(terry): The parse function needs an "encoding" argument and will |
| 130 // default to whatever encoding CSS defaults to. | 142 // default to whatever encoding CSS defaults to. |
| (...skipping 20 matching lines...) Expand all Loading... |
| 151 return source; | 163 return source; |
| 152 } | 164 } |
| 153 | 165 |
| 154 // TODO(terry): Consider removing this class when all usages can be eliminated | 166 // TODO(terry): Consider removing this class when all usages can be eliminated |
| 155 // or replaced with compile API. | 167 // or replaced with compile API. |
| 156 /** Public parsing interface for csslib. */ | 168 /** Public parsing interface for csslib. */ |
| 157 class Parser { | 169 class Parser { |
| 158 final _Parser _parser; | 170 final _Parser _parser; |
| 159 | 171 |
| 160 // TODO(jmesserly): having file and text is redundant. | 172 // TODO(jmesserly): having file and text is redundant. |
| 173 // TODO(rnystrom): baseUrl isn't used. Remove from API. |
| 161 Parser(SourceFile file, String text, {int start: 0, String baseUrl}) | 174 Parser(SourceFile file, String text, {int start: 0, String baseUrl}) |
| 162 : _parser = new _Parser(file, text, start: start, baseUrl: baseUrl); | 175 : _parser = new _Parser(file, text, start: start); |
| 163 | 176 |
| 164 StyleSheet parse() => _parser.parse(); | 177 StyleSheet parse() => _parser.parse(); |
| 165 } | 178 } |
| 166 | 179 |
| 180 // CSS2.1 pseudo-elements which were defined with a single ':'. |
| 181 final _legacyPseudoElements = new Set<String>.from(const [ |
| 182 'after', |
| 183 'before', |
| 184 'first-letter', |
| 185 'first-line', |
| 186 ]); |
| 187 |
| 167 /** A simple recursive descent parser for CSS. */ | 188 /** A simple recursive descent parser for CSS. */ |
| 168 class _Parser { | 189 class _Parser { |
| 169 final Tokenizer tokenizer; | 190 final Tokenizer tokenizer; |
| 170 | 191 |
| 171 /** Base url of CSS file. */ | |
| 172 final String _baseUrl; | |
| 173 | |
| 174 /** | 192 /** |
| 175 * File containing the source being parsed, used to report errors with | 193 * File containing the source being parsed, used to report errors with |
| 176 * source-span locations. | 194 * source-span locations. |
| 177 */ | 195 */ |
| 178 final SourceFile file; | 196 final SourceFile file; |
| 179 | 197 |
| 180 Token _previousToken; | 198 Token _previousToken; |
| 181 Token _peekToken; | 199 Token _peekToken; |
| 182 | 200 |
| 183 _Parser(SourceFile file, String text, {int start: 0, String baseUrl}) | 201 _Parser(SourceFile file, String text, {int start: 0}) |
| 184 : this.file = file, | 202 : this.file = file, |
| 185 _baseUrl = baseUrl, | |
| 186 tokenizer = new Tokenizer(file, text, true, start) { | 203 tokenizer = new Tokenizer(file, text, true, start) { |
| 187 _peekToken = tokenizer.next(); | 204 _peekToken = tokenizer.next(); |
| 188 } | 205 } |
| 189 | 206 |
| 190 /** Main entry point for parsing an entire CSS file. */ | 207 /** Main entry point for parsing an entire CSS file. */ |
| 191 StyleSheet parse() { | 208 StyleSheet parse() { |
| 192 List<TreeNode> productions = []; | 209 List<TreeNode> productions = []; |
| 193 | 210 |
| 194 var start = _peekToken.span; | 211 var start = _peekToken.span; |
| 195 while (!_maybeEat(TokenKind.END_OF_FILE) && !_peekKind(TokenKind.RBRACE)) { | 212 while (!_maybeEat(TokenKind.END_OF_FILE) && !_peekKind(TokenKind.RBRACE)) { |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 349 * : [ONLY | NOT]? S* media_type S* [ AND S* expression ]* | 366 * : [ONLY | NOT]? S* media_type S* [ AND S* expression ]* |
| 350 * | expression [ AND S* expression ]* | 367 * | expression [ AND S* expression ]* |
| 351 * media_type | 368 * media_type |
| 352 * : IDENT | 369 * : IDENT |
| 353 * expression | 370 * expression |
| 354 * : '(' S* media_feature S* [ ':' S* expr ]? ')' S* | 371 * : '(' S* media_feature S* [ ':' S* expr ]? ')' S* |
| 355 * media_feature | 372 * media_feature |
| 356 * : IDENT | 373 * : IDENT |
| 357 */ | 374 */ |
| 358 List<MediaQuery> processMediaQueryList() { | 375 List<MediaQuery> processMediaQueryList() { |
| 359 var mediaQueries = []; | 376 var mediaQueries = <MediaQuery>[]; |
| 360 | 377 |
| 361 bool firstTime = true; | |
| 362 var mediaQuery; | |
| 363 do { | 378 do { |
| 364 mediaQuery = processMediaQuery(firstTime == true); | 379 var mediaQuery = processMediaQuery(); |
| 365 if (mediaQuery != null) { | 380 if (mediaQuery != null) { |
| 366 mediaQueries.add(mediaQuery); | 381 mediaQueries.add(mediaQuery); |
| 367 firstTime = false; | 382 } else { |
| 368 continue; | 383 break; |
| 369 } | 384 } |
| 370 | 385 } while (_maybeEat(TokenKind.COMMA)); |
| 371 // Any more more media types separated by comma. | |
| 372 if (!_maybeEat(TokenKind.COMMA)) break; | |
| 373 | |
| 374 // Yep more media types start again. | |
| 375 firstTime = true; | |
| 376 } while ((!firstTime && mediaQuery != null) || firstTime); | |
| 377 | 386 |
| 378 return mediaQueries; | 387 return mediaQueries; |
| 379 } | 388 } |
| 380 | 389 |
| 381 MediaQuery processMediaQuery([bool startQuery = true]) { | 390 MediaQuery processMediaQuery() { |
| 382 // Grammar: [ONLY | NOT]? S* media_type S* | 391 // Grammar: [ONLY | NOT]? S* media_type S* |
| 383 // [ AND S* MediaExpr ]* | MediaExpr [ AND S* MediaExpr ]* | 392 // [ AND S* MediaExpr ]* | MediaExpr [ AND S* MediaExpr ]* |
| 384 | 393 |
| 385 var start = _peekToken.span; | 394 var start = _peekToken.span; |
| 386 | 395 |
| 387 // Is it a unary media operator? | 396 // Is it a unary media operator? |
| 388 var op = _peekToken.text; | 397 var op = _peekToken.text; |
| 389 var opLen = op.length; | 398 var opLen = op.length; |
| 390 var unaryOp = TokenKind.matchMediaOperator(op, 0, opLen); | 399 var unaryOp = TokenKind.matchMediaOperator(op, 0, opLen); |
| 391 if (unaryOp != -1) { | 400 if (unaryOp != -1) { |
| 392 if (isChecked) { | 401 if (isChecked) { |
| 393 if (startQuery && unaryOp != TokenKind.MEDIA_OP_NOT || | 402 if (unaryOp != TokenKind.MEDIA_OP_NOT || |
| 394 unaryOp != TokenKind.MEDIA_OP_ONLY) { | 403 unaryOp != TokenKind.MEDIA_OP_ONLY) { |
| 395 _warning("Only the unary operators NOT and ONLY allowed", | 404 _warning("Only the unary operators NOT and ONLY allowed", |
| 396 _makeSpan(start)); | 405 _makeSpan(start)); |
| 397 } | 406 } |
| 398 if (!startQuery && unaryOp != TokenKind.MEDIA_OP_AND) { | |
| 399 _warning("Only the binary AND operator allowed", _makeSpan(start)); | |
| 400 } | |
| 401 } | 407 } |
| 402 _next(); | 408 _next(); |
| 403 start = _peekToken.span; | 409 start = _peekToken.span; |
| 404 } | 410 } |
| 405 | 411 |
| 406 var type; | 412 var type; |
| 407 if (startQuery && unaryOp != TokenKind.MEDIA_OP_AND) { | 413 // Get the media type. |
| 408 // Get the media type. | 414 if (_peekIdentifier()) type = identifier(); |
| 409 if (_peekIdentifier()) type = identifier(); | |
| 410 } | |
| 411 | 415 |
| 412 var exprs = []; | 416 var exprs = <MediaExpression>[]; |
| 413 | 417 |
| 414 if (unaryOp == -1 || unaryOp == TokenKind.MEDIA_OP_AND) { | 418 while (true) { |
| 415 var andOp = false; | 419 // Parse AND if query has a media_type or previous expression. |
| 416 while (true) { | 420 var andOp = exprs.isNotEmpty || type != null; |
| 417 var expr = processMediaExpression(andOp); | 421 if (andOp) { |
| 418 if (expr == null) break; | |
| 419 | |
| 420 exprs.add(expr); | |
| 421 op = _peekToken.text; | 422 op = _peekToken.text; |
| 422 opLen = op.length; | 423 opLen = op.length; |
| 423 andOp = TokenKind.matchMediaOperator(op, 0, opLen) == | 424 if (TokenKind.matchMediaOperator(op, 0, opLen) != |
| 424 TokenKind.MEDIA_OP_AND; | 425 TokenKind.MEDIA_OP_AND) { |
| 425 if (!andOp) break; | 426 break; |
| 427 } |
| 426 _next(); | 428 _next(); |
| 427 } | 429 } |
| 430 |
| 431 var expr = processMediaExpression(andOp); |
| 432 if (expr == null) break; |
| 433 |
| 434 exprs.add(expr); |
| 428 } | 435 } |
| 429 | 436 |
| 430 if (unaryOp != -1 || type != null || exprs.length > 0) { | 437 if (unaryOp != -1 || type != null || exprs.length > 0) { |
| 431 return new MediaQuery(unaryOp, type, exprs, _makeSpan(start)); | 438 return new MediaQuery(unaryOp, type, exprs, _makeSpan(start)); |
| 432 } | 439 } |
| 433 return null; | 440 return null; |
| 434 } | 441 } |
| 435 | 442 |
| 436 MediaExpression processMediaExpression([bool andOperator = false]) { | 443 MediaExpression processMediaExpression([bool andOperator = false]) { |
| 437 var start = _peekToken.span; | 444 var start = _peekToken.span; |
| 438 | 445 |
| 439 // Grammar: '(' S* media_feature S* [ ':' S* expr ]? ')' S* | 446 // Grammar: '(' S* media_feature S* [ ':' S* expr ]? ')' S* |
| 440 if (_maybeEat(TokenKind.LPAREN)) { | 447 if (_maybeEat(TokenKind.LPAREN)) { |
| 441 if (_peekIdentifier()) { | 448 if (_peekIdentifier()) { |
| 442 var feature = identifier(); // Media feature. | 449 var feature = identifier(); // Media feature. |
| 443 while (_maybeEat(TokenKind.COLON)) { | 450 var exprs = _maybeEat(TokenKind.COLON) |
| 444 var startExpr = _peekToken.span; | 451 ? processExpr() |
| 445 var exprs = processExpr(); | 452 : new Expressions(_makeSpan(_peekToken.span)); |
| 446 if (_maybeEat(TokenKind.RPAREN)) { | 453 if (_maybeEat(TokenKind.RPAREN)) { |
| 447 return new MediaExpression( | 454 return new MediaExpression( |
| 448 andOperator, feature, exprs, _makeSpan(startExpr)); | 455 andOperator, feature, exprs, _makeSpan(start)); |
| 449 } else if (isChecked) { | 456 } else if (isChecked) { |
| 450 _warning("Missing parenthesis around media expression", | 457 _warning( |
| 451 _makeSpan(start)); | 458 "Missing parenthesis around media expression", _makeSpan(start)); |
| 452 return null; | 459 return null; |
| 453 } | |
| 454 } | 460 } |
| 455 } else if (isChecked) { | 461 } else if (isChecked) { |
| 456 _warning("Missing media feature in media expression", _makeSpan(start)); | 462 _warning("Missing media feature in media expression", _makeSpan(start)); |
| 457 } | 463 } |
| 458 } | 464 } |
| 459 return null; | 465 return null; |
| 460 } | 466 } |
| 461 | 467 |
| 462 /** | 468 /** |
| 463 * Directive grammar: | 469 * Directive grammar: |
| 464 * | 470 * |
| 465 * import: '@import' [string | URI] media_list? | 471 * import: '@import' [string | URI] media_list? |
| 466 * media: '@media' media_query_list '{' ruleset '}' | 472 * media: '@media' media_query_list '{' ruleset '}' |
| 467 * page: '@page' [':' IDENT]? '{' declarations '}' | 473 * page: '@page' [':' IDENT]? '{' declarations '}' |
| 468 * stylet: '@stylet' IDENT '{' ruleset '}' | 474 * stylet: '@stylet' IDENT '{' ruleset '}' |
| 469 * media_query_list: IDENT [',' IDENT] | 475 * media_query_list: IDENT [',' IDENT] |
| 470 * keyframes: '@-webkit-keyframes ...' (see grammar below). | 476 * keyframes: '@-webkit-keyframes ...' (see grammar below). |
| 471 * font_face: '@font-face' '{' declarations '}' | 477 * font_face: '@font-face' '{' declarations '}' |
| 472 * namespace: '@namespace name url("xmlns") | 478 * namespace: '@namespace name url("xmlns") |
| 473 * host: '@host '{' ruleset '}' | 479 * host: '@host '{' ruleset '}' |
| 474 * mixin: '@mixin name [(args,...)] '{' declarations/ruleset '}' | 480 * mixin: '@mixin name [(args,...)] '{' declarations/ruleset '}' |
| 475 * include: '@include name [(@arg,@arg1)] | 481 * include: '@include name [(@arg,@arg1)] |
| 476 * '@include name [(@arg...)] | 482 * '@include name [(@arg...)] |
| 477 * content '@content' | 483 * content: '@content' |
| 484 * -moz-document: '@-moz-document' [ <url> | url-prefix(<string>) | |
| 485 * domain(<string>) | regexp(<string) ]# '{' |
| 486 * declarations |
| 487 * '}' |
| 488 * supports: '@supports' supports_condition group_rule_body |
| 478 */ | 489 */ |
| 479 processDirective() { | 490 processDirective() { |
| 480 var start = _peekToken.span; | 491 var start = _peekToken.span; |
| 481 | 492 |
| 482 var tokId = processVariableOrDirective(); | 493 var tokId = processVariableOrDirective(); |
| 483 if (tokId is VarDefinitionDirective) return tokId; | 494 if (tokId is VarDefinitionDirective) return tokId; |
| 484 switch (tokId) { | 495 switch (tokId) { |
| 485 case TokenKind.DIRECTIVE_IMPORT: | 496 case TokenKind.DIRECTIVE_IMPORT: |
| 486 _next(); | 497 _next(); |
| 487 | 498 |
| (...skipping 256 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 744 } | 755 } |
| 745 | 756 |
| 746 return new NamespaceDirective( | 757 return new NamespaceDirective( |
| 747 prefix != null ? prefix.name : '', namespaceUri, _makeSpan(start)); | 758 prefix != null ? prefix.name : '', namespaceUri, _makeSpan(start)); |
| 748 | 759 |
| 749 case TokenKind.DIRECTIVE_MIXIN: | 760 case TokenKind.DIRECTIVE_MIXIN: |
| 750 return processMixin(); | 761 return processMixin(); |
| 751 | 762 |
| 752 case TokenKind.DIRECTIVE_INCLUDE: | 763 case TokenKind.DIRECTIVE_INCLUDE: |
| 753 return processInclude(_makeSpan(start)); | 764 return processInclude(_makeSpan(start)); |
| 754 | |
| 755 case TokenKind.DIRECTIVE_CONTENT: | 765 case TokenKind.DIRECTIVE_CONTENT: |
| 756 // TODO(terry): TBD | 766 // TODO(terry): TBD |
| 757 _warning("@content not implemented.", _makeSpan(start)); | 767 _warning("@content not implemented.", _makeSpan(start)); |
| 758 return null; | 768 return null; |
| 769 case TokenKind.DIRECTIVE_MOZ_DOCUMENT: |
| 770 return processDocumentDirective(); |
| 771 case TokenKind.DIRECTIVE_SUPPORTS: |
| 772 return processSupportsDirective(); |
| 773 case TokenKind.DIRECTIVE_VIEWPORT: |
| 774 case TokenKind.DIRECTIVE_MS_VIEWPORT: |
| 775 return processViewportDirective(); |
| 759 } | 776 } |
| 760 return null; | 777 return null; |
| 761 } | 778 } |
| 762 | 779 |
| 763 /** | 780 /** |
| 764 * Parse the mixin beginning token offset [start]. Returns a [MixinDefinition] | 781 * Parse the mixin beginning token offset [start]. Returns a [MixinDefinition] |
| 765 * node. | 782 * node. |
| 766 * | 783 * |
| 767 * Mixin grammar: | 784 * Mixin grammar: |
| 768 * | 785 * |
| 769 * @mixin IDENT [(args,...)] '{' | 786 * @mixin IDENT [(args,...)] '{' |
| 770 * [ruleset | property | directive]* | 787 * [ruleset | property | directive]* |
| 771 * '}' | 788 * '}' |
| 772 */ | 789 */ |
| 773 MixinDefinition processMixin() { | 790 MixinDefinition processMixin() { |
| 774 _next(); | 791 _next(); |
| 775 | 792 |
| 776 var name = identifier(); | 793 var name = identifier(); |
| 777 | 794 |
| 778 List<VarDefinitionDirective> params = []; | 795 var params = <TreeNode>[]; |
| 779 // Any parameters? | 796 // Any parameters? |
| 780 if (_maybeEat(TokenKind.LPAREN)) { | 797 if (_maybeEat(TokenKind.LPAREN)) { |
| 781 var mustHaveParam = false; | 798 var mustHaveParam = false; |
| 782 var keepGoing = true; | 799 var keepGoing = true; |
| 783 while (keepGoing) { | 800 while (keepGoing) { |
| 784 var varDef = processVariableOrDirective(mixinParameter: true); | 801 var varDef = processVariableOrDirective(mixinParameter: true); |
| 785 if (varDef is VarDefinitionDirective || varDef is VarDefinition) { | 802 if (varDef is VarDefinitionDirective || varDef is VarDefinition) { |
| 786 params.add(varDef); | 803 params.add(varDef); |
| 787 } else if (mustHaveParam) { | 804 } else if (mustHaveParam) { |
| 788 _warning("Expecting parameter", _makeSpan(_peekToken.span)); | 805 _warning("Expecting parameter", _makeSpan(_peekToken.span)); |
| (...skipping 17 matching lines...) Expand all Loading... |
| 806 var directive = processDirective(); | 823 var directive = processDirective(); |
| 807 if (directive != null) { | 824 if (directive != null) { |
| 808 productions.add(directive); | 825 productions.add(directive); |
| 809 continue; | 826 continue; |
| 810 } | 827 } |
| 811 | 828 |
| 812 var declGroup = processDeclarations(checkBrace: false); | 829 var declGroup = processDeclarations(checkBrace: false); |
| 813 if (declGroup.declarations.any((decl) { | 830 if (declGroup.declarations.any((decl) { |
| 814 return decl is Declaration && decl is! IncludeMixinAtDeclaration; | 831 return decl is Declaration && decl is! IncludeMixinAtDeclaration; |
| 815 })) { | 832 })) { |
| 816 var newDecls = []; | 833 var newDecls = <Declaration>[]; |
| 817 productions.forEach((include) { | 834 productions.forEach((include) { |
| 818 // If declGroup has items that are declarations then we assume | 835 // If declGroup has items that are declarations then we assume |
| 819 // this mixin is a declaration mixin not a top-level mixin. | 836 // this mixin is a declaration mixin not a top-level mixin. |
| 820 if (include is IncludeDirective) { | 837 if (include is IncludeDirective) { |
| 821 newDecls.add(new IncludeMixinAtDeclaration(include, include.span)); | 838 newDecls.add(new IncludeMixinAtDeclaration(include, include.span)); |
| 822 } else { | 839 } else { |
| 823 _warning("Error mixing of top-level vs declarations mixins", | 840 _warning("Error mixing of top-level vs declarations mixins", |
| 824 _makeSpan(include.span)); | 841 _makeSpan(include.span)); |
| 825 } | 842 } |
| 826 }); | 843 }); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 936 * | 953 * |
| 937 * @include IDENT [(args,...)]; | 954 * @include IDENT [(args,...)]; |
| 938 */ | 955 */ |
| 939 _next(); | 956 _next(); |
| 940 | 957 |
| 941 var name; | 958 var name; |
| 942 if (_peekIdentifier()) { | 959 if (_peekIdentifier()) { |
| 943 name = identifier(); | 960 name = identifier(); |
| 944 } | 961 } |
| 945 | 962 |
| 946 var params = []; | 963 var params = <List<Expression>>[]; |
| 947 | 964 |
| 948 // Any parameters? Parameters can be multiple terms per argument e.g., | 965 // Any parameters? Parameters can be multiple terms per argument e.g., |
| 949 // 3px solid yellow, green is two parameters: | 966 // 3px solid yellow, green is two parameters: |
| 950 // 1. 3px solid yellow | 967 // 1. 3px solid yellow |
| 951 // 2. green | 968 // 2. green |
| 952 // the first has 3 terms and the second has 1 term. | 969 // the first has 3 terms and the second has 1 term. |
| 953 if (_maybeEat(TokenKind.LPAREN)) { | 970 if (_maybeEat(TokenKind.LPAREN)) { |
| 954 var terms = []; | 971 var terms = <Expression>[]; |
| 955 var expr; | 972 var expr; |
| 956 var keepGoing = true; | 973 var keepGoing = true; |
| 957 while (keepGoing && (expr = processTerm()) != null) { | 974 while (keepGoing && (expr = processTerm()) != null) { |
| 958 // VarUsage is returns as a list | 975 // VarUsage is returns as a list |
| 959 terms.add(expr is List ? expr[0] : expr); | 976 terms.add(expr is List ? expr[0] : expr); |
| 960 keepGoing = !_peekKind(TokenKind.RPAREN); | 977 keepGoing = !_peekKind(TokenKind.RPAREN); |
| 961 if (keepGoing) { | 978 if (keepGoing) { |
| 962 if (_maybeEat(TokenKind.COMMA)) { | 979 if (_maybeEat(TokenKind.COMMA)) { |
| 963 params.add(terms); | 980 params.add(terms); |
| 964 terms = []; | 981 terms = []; |
| 965 } | 982 } |
| 966 } | 983 } |
| 967 } | 984 } |
| 968 params.add(terms); | 985 params.add(terms); |
| 969 _maybeEat(TokenKind.RPAREN); | 986 _maybeEat(TokenKind.RPAREN); |
| 970 } | 987 } |
| 971 | 988 |
| 972 if (eatSemiColon) { | 989 if (eatSemiColon) { |
| 973 _eat(TokenKind.SEMICOLON); | 990 _eat(TokenKind.SEMICOLON); |
| 974 } | 991 } |
| 975 | 992 |
| 976 return new IncludeDirective(name.name, params, span); | 993 return new IncludeDirective(name.name, params, span); |
| 977 } | 994 } |
| 978 | 995 |
| 996 DocumentDirective processDocumentDirective() { |
| 997 var start = _peekToken.span; |
| 998 _next(); // '@-moz-document' |
| 999 var functions = <LiteralTerm>[]; |
| 1000 do { |
| 1001 var function; |
| 1002 |
| 1003 // Consume function token: IDENT '(' |
| 1004 var ident = identifier(); |
| 1005 _eat(TokenKind.LPAREN); |
| 1006 |
| 1007 // Consume function arguments. |
| 1008 if (ident.name == 'url-prefix' || ident.name == 'domain') { |
| 1009 // @-moz-document allows the 'url-prefix' and 'domain' functions to |
| 1010 // omit quotations around their argument, contrary to the standard |
| 1011 // in which they must be strings. To support this we consume a |
| 1012 // string with optional quotation marks, then reapply quotation |
| 1013 // marks so they're present in the emitted CSS. |
| 1014 var argumentStart = _peekToken.span; |
| 1015 var value = processQuotedString(true); |
| 1016 // Don't quote the argument if it's empty. '@-moz-document url-prefix()' |
| 1017 // is a common pattern used for browser detection. |
| 1018 var argument = value.isNotEmpty ? '"$value"' : ''; |
| 1019 var argumentSpan = _makeSpan(argumentStart); |
| 1020 |
| 1021 _eat(TokenKind.RPAREN); |
| 1022 |
| 1023 var arguments = new Expressions(_makeSpan(argumentSpan)) |
| 1024 ..add(new LiteralTerm(argument, argument, argumentSpan)); |
| 1025 function = new FunctionTerm( |
| 1026 ident.name, ident.name, arguments, _makeSpan(ident.span)); |
| 1027 } else { |
| 1028 function = processFunction(ident); |
| 1029 } |
| 1030 |
| 1031 functions.add(function); |
| 1032 } while (_maybeEat(TokenKind.COMMA)); |
| 1033 |
| 1034 _eat(TokenKind.LBRACE); |
| 1035 var groupRuleBody = processGroupRuleBody(); |
| 1036 _eat(TokenKind.RBRACE); |
| 1037 return new DocumentDirective(functions, groupRuleBody, _makeSpan(start)); |
| 1038 } |
| 1039 |
| 1040 SupportsDirective processSupportsDirective() { |
| 1041 var start = _peekToken.span; |
| 1042 _next(); // '@supports' |
| 1043 var condition = processSupportsCondition(); |
| 1044 _eat(TokenKind.LBRACE); |
| 1045 var groupRuleBody = processGroupRuleBody(); |
| 1046 _eat(TokenKind.RBRACE); |
| 1047 return new SupportsDirective(condition, groupRuleBody, _makeSpan(start)); |
| 1048 } |
| 1049 |
| 1050 SupportsCondition processSupportsCondition() { |
| 1051 if (_peekKind(TokenKind.IDENTIFIER)) { |
| 1052 return processSupportsNegation(); |
| 1053 } |
| 1054 |
| 1055 var start = _peekToken.span; |
| 1056 var conditions = <SupportsConditionInParens>[]; |
| 1057 var clauseType = ClauseType.none; |
| 1058 |
| 1059 while (true) { |
| 1060 conditions.add(processSupportsConditionInParens()); |
| 1061 |
| 1062 var type; |
| 1063 var text = _peekToken.text.toLowerCase(); |
| 1064 |
| 1065 if (text == 'and') { |
| 1066 type = ClauseType.conjunction; |
| 1067 } else if (text == 'or') { |
| 1068 type = ClauseType.disjunction; |
| 1069 } else { |
| 1070 break; // Done parsing clause. |
| 1071 } |
| 1072 |
| 1073 if (clauseType == ClauseType.none) { |
| 1074 clauseType = type; // First operand and operator of clause. |
| 1075 } else if (clauseType != type) { |
| 1076 _error("Operators can't be mixed without a layer of parentheses", |
| 1077 _peekToken.span); |
| 1078 break; |
| 1079 } |
| 1080 |
| 1081 _next(); // Consume operator. |
| 1082 } |
| 1083 |
| 1084 if (clauseType == ClauseType.conjunction) { |
| 1085 return new SupportsConjunction(conditions, _makeSpan(start)); |
| 1086 } else if (clauseType == ClauseType.disjunction) { |
| 1087 return new SupportsDisjunction(conditions, _makeSpan(start)); |
| 1088 } else { |
| 1089 return conditions.first; |
| 1090 } |
| 1091 } |
| 1092 |
| 1093 SupportsNegation processSupportsNegation() { |
| 1094 var start = _peekToken.span; |
| 1095 var text = _peekToken.text.toLowerCase(); |
| 1096 if (text != 'not') return null; |
| 1097 _next(); // 'not' |
| 1098 var condition = processSupportsConditionInParens(); |
| 1099 return new SupportsNegation(condition, _makeSpan(start)); |
| 1100 } |
| 1101 |
| 1102 SupportsConditionInParens processSupportsConditionInParens() { |
| 1103 var start = _peekToken.span; |
| 1104 _eat(TokenKind.LPAREN); |
| 1105 // Try to parse a condition. |
| 1106 var condition = processSupportsCondition(); |
| 1107 if (condition != null) { |
| 1108 _eat(TokenKind.RPAREN); |
| 1109 return new SupportsConditionInParens.nested(condition, _makeSpan(start)); |
| 1110 } |
| 1111 // Otherwise, parse a declaration. |
| 1112 var declaration = processDeclaration([]); |
| 1113 _eat(TokenKind.RPAREN); |
| 1114 return new SupportsConditionInParens(declaration, _makeSpan(start)); |
| 1115 } |
| 1116 |
| 1117 ViewportDirective processViewportDirective() { |
| 1118 var start = _peekToken.span; |
| 1119 var name = _next().text; |
| 1120 var declarations = processDeclarations(); |
| 1121 return new ViewportDirective(name, declarations, _makeSpan(start)); |
| 1122 } |
| 1123 |
| 979 RuleSet processRuleSet([SelectorGroup selectorGroup]) { | 1124 RuleSet processRuleSet([SelectorGroup selectorGroup]) { |
| 980 if (selectorGroup == null) { | 1125 if (selectorGroup == null) { |
| 981 selectorGroup = processSelectorGroup(); | 1126 selectorGroup = processSelectorGroup(); |
| 982 } | 1127 } |
| 983 if (selectorGroup != null) { | 1128 if (selectorGroup != null) { |
| 984 return new RuleSet( | 1129 return new RuleSet( |
| 985 selectorGroup, processDeclarations(), selectorGroup.span); | 1130 selectorGroup, processDeclarations(), selectorGroup.span); |
| 986 } | 1131 } |
| 987 return null; | 1132 return null; |
| 988 } | 1133 } |
| 989 | 1134 |
| 1135 List<TreeNode> processGroupRuleBody() { |
| 1136 var nodes = <TreeNode>[]; |
| 1137 while (!(_peekKind(TokenKind.RBRACE) || _peekKind(TokenKind.END_OF_FILE))) { |
| 1138 var directive = processDirective(); |
| 1139 if (directive != null) { |
| 1140 nodes.add(directive); |
| 1141 continue; |
| 1142 } |
| 1143 var ruleSet = processRuleSet(); |
| 1144 if (ruleSet != null) { |
| 1145 nodes.add(ruleSet); |
| 1146 continue; |
| 1147 } |
| 1148 break; |
| 1149 } |
| 1150 return nodes; |
| 1151 } |
| 1152 |
| 990 /** | 1153 /** |
| 991 * Look ahead to see if what should be a declaration is really a selector. | 1154 * Look ahead to see if what should be a declaration is really a selector. |
| 992 * If it's a selector than it's a nested selector. This support's Less' | 1155 * If it's a selector than it's a nested selector. This support's Less' |
| 993 * nested selector syntax (requires a look ahead). E.g., | 1156 * nested selector syntax (requires a look ahead). E.g., |
| 994 * | 1157 * |
| 995 * div { | 1158 * div { |
| 996 * width : 20px; | 1159 * width : 20px; |
| 997 * span { | 1160 * span { |
| 998 * color: red; | 1161 * color: red; |
| 999 * } | 1162 * } |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1034 messages = oldMessages; | 1197 messages = oldMessages; |
| 1035 return selGroup; | 1198 return selGroup; |
| 1036 } | 1199 } |
| 1037 } | 1200 } |
| 1038 | 1201 |
| 1039 DeclarationGroup processDeclarations({bool checkBrace: true}) { | 1202 DeclarationGroup processDeclarations({bool checkBrace: true}) { |
| 1040 var start = _peekToken.span; | 1203 var start = _peekToken.span; |
| 1041 | 1204 |
| 1042 if (checkBrace) _eat(TokenKind.LBRACE); | 1205 if (checkBrace) _eat(TokenKind.LBRACE); |
| 1043 | 1206 |
| 1044 List decls = []; | 1207 var decls = <TreeNode>[]; |
| 1045 List dartStyles = []; // List of latest styles exposed to Dart. | 1208 var dartStyles = []; // List of latest styles exposed to Dart. |
| 1046 | 1209 |
| 1047 do { | 1210 do { |
| 1048 var selectorGroup = _nestedSelector(); | 1211 var selectorGroup = _nestedSelector(); |
| 1049 while (selectorGroup != null) { | 1212 while (selectorGroup != null) { |
| 1050 // Nested selector so process as a ruleset. | 1213 // Nested selector so process as a ruleset. |
| 1051 var ruleset = processRuleSet(selectorGroup); | 1214 var ruleset = processRuleSet(selectorGroup); |
| 1052 decls.add(ruleset); | 1215 decls.add(ruleset); |
| 1053 selectorGroup = _nestedSelector(); | 1216 selectorGroup = _nestedSelector(); |
| 1054 } | 1217 } |
| 1055 | 1218 |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1086 // Dart style not live, ignore these styles in this Declarations. | 1249 // Dart style not live, ignore these styles in this Declarations. |
| 1087 decl.dartStyle = null; | 1250 decl.dartStyle = null; |
| 1088 } | 1251 } |
| 1089 } | 1252 } |
| 1090 } | 1253 } |
| 1091 | 1254 |
| 1092 return new DeclarationGroup(decls, _makeSpan(start)); | 1255 return new DeclarationGroup(decls, _makeSpan(start)); |
| 1093 } | 1256 } |
| 1094 | 1257 |
| 1095 List<DeclarationGroup> processMarginsDeclarations() { | 1258 List<DeclarationGroup> processMarginsDeclarations() { |
| 1096 List groups = []; | 1259 var groups = <DeclarationGroup>[]; |
| 1097 | 1260 |
| 1098 var start = _peekToken.span; | 1261 var start = _peekToken.span; |
| 1099 | 1262 |
| 1100 _eat(TokenKind.LBRACE); | 1263 _eat(TokenKind.LBRACE); |
| 1101 | 1264 |
| 1102 List<Declaration> decls = []; | 1265 List<Declaration> decls = []; |
| 1103 List dartStyles = []; // List of latest styles exposed to Dart. | 1266 List dartStyles = []; // List of latest styles exposed to Dart. |
| 1104 | 1267 |
| 1105 do { | 1268 do { |
| 1106 switch (_peek()) { | 1269 switch (_peek()) { |
| (...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1196 } | 1359 } |
| 1197 | 1360 |
| 1198 /** | 1361 /** |
| 1199 * Return list of selectors | 1362 * Return list of selectors |
| 1200 */ | 1363 */ |
| 1201 Selector processSelector() { | 1364 Selector processSelector() { |
| 1202 var simpleSequences = <SimpleSelectorSequence>[]; | 1365 var simpleSequences = <SimpleSelectorSequence>[]; |
| 1203 var start = _peekToken.span; | 1366 var start = _peekToken.span; |
| 1204 while (true) { | 1367 while (true) { |
| 1205 // First item is never descendant make sure it's COMBINATOR_NONE. | 1368 // First item is never descendant make sure it's COMBINATOR_NONE. |
| 1206 var selectorItem = simpleSelectorSequence(simpleSequences.length == 0); | 1369 var selectorItem = simpleSelectorSequence(simpleSequences.isEmpty); |
| 1207 if (selectorItem != null) { | 1370 if (selectorItem != null) { |
| 1208 simpleSequences.add(selectorItem); | 1371 simpleSequences.add(selectorItem); |
| 1209 } else { | 1372 } else { |
| 1210 break; | 1373 break; |
| 1211 } | 1374 } |
| 1212 } | 1375 } |
| 1213 | 1376 |
| 1214 if (simpleSequences.length > 0) { | 1377 if (simpleSequences.isEmpty) return null; |
| 1215 return new Selector(simpleSequences, _makeSpan(start)); | 1378 |
| 1216 } | 1379 return new Selector(simpleSequences, _makeSpan(start)); |
| 1380 } |
| 1381 |
| 1382 /// Same as [processSelector] but reports an error for each combinator. |
| 1383 /// |
| 1384 /// This is a quick fix for parsing <compound-selectors> until the parser |
| 1385 /// supports Selector Level 4 grammar: |
| 1386 /// https://drafts.csswg.org/selectors-4/#typedef-compound-selector |
| 1387 Selector processCompoundSelector() { |
| 1388 return processSelector() |
| 1389 ..simpleSelectorSequences.forEach((sequence) { |
| 1390 if (!sequence.isCombinatorNone) { |
| 1391 _error('compound selector can not contain combinator', sequence.span); |
| 1392 } |
| 1393 }); |
| 1217 } | 1394 } |
| 1218 | 1395 |
| 1219 simpleSelectorSequence(bool forceCombinatorNone) { | 1396 simpleSelectorSequence(bool forceCombinatorNone) { |
| 1220 var start = _peekToken.span; | 1397 var start = _peekToken.span; |
| 1221 var combinatorType = TokenKind.COMBINATOR_NONE; | 1398 var combinatorType = TokenKind.COMBINATOR_NONE; |
| 1222 var thisOperator = false; | 1399 var thisOperator = false; |
| 1223 | 1400 |
| 1224 switch (_peek()) { | 1401 switch (_peek()) { |
| 1225 case TokenKind.PLUS: | 1402 case TokenKind.PLUS: |
| 1226 _eat(TokenKind.PLUS); | 1403 _eat(TokenKind.PLUS); |
| 1227 combinatorType = TokenKind.COMBINATOR_PLUS; | 1404 combinatorType = TokenKind.COMBINATOR_PLUS; |
| 1228 break; | 1405 break; |
| 1229 case TokenKind.GREATER: | 1406 case TokenKind.GREATER: |
| 1407 // Parse > or >>> |
| 1230 _eat(TokenKind.GREATER); | 1408 _eat(TokenKind.GREATER); |
| 1231 combinatorType = TokenKind.COMBINATOR_GREATER; | 1409 if (_maybeEat(TokenKind.GREATER)) { |
| 1410 _eat(TokenKind.GREATER); |
| 1411 combinatorType = TokenKind.COMBINATOR_SHADOW_PIERCING_DESCENDANT; |
| 1412 } else { |
| 1413 combinatorType = TokenKind.COMBINATOR_GREATER; |
| 1414 } |
| 1232 break; | 1415 break; |
| 1233 case TokenKind.TILDE: | 1416 case TokenKind.TILDE: |
| 1234 _eat(TokenKind.TILDE); | 1417 _eat(TokenKind.TILDE); |
| 1235 combinatorType = TokenKind.COMBINATOR_TILDE; | 1418 combinatorType = TokenKind.COMBINATOR_TILDE; |
| 1236 break; | 1419 break; |
| 1420 case TokenKind.SLASH: |
| 1421 // Parse /deep/ |
| 1422 _eat(TokenKind.SLASH); |
| 1423 var ate = _maybeEat(TokenKind.IDENTIFIER); |
| 1424 var tok = ate ? _previousToken : _peekToken; |
| 1425 if (!(ate && tok.text == 'deep')) { |
| 1426 _error('expected deep, but found ${tok.text}', tok.span); |
| 1427 } |
| 1428 _eat(TokenKind.SLASH); |
| 1429 combinatorType = TokenKind.COMBINATOR_DEEP; |
| 1430 break; |
| 1237 case TokenKind.AMPERSAND: | 1431 case TokenKind.AMPERSAND: |
| 1238 _eat(TokenKind.AMPERSAND); | 1432 _eat(TokenKind.AMPERSAND); |
| 1239 thisOperator = true; | 1433 thisOperator = true; |
| 1240 break; | 1434 break; |
| 1241 } | 1435 } |
| 1242 | 1436 |
| 1243 // Check if WHITESPACE existed between tokens if so we're descendent. | 1437 // Check if WHITESPACE existed between tokens if so we're descendent. |
| 1244 if (combinatorType == TokenKind.COMBINATOR_NONE && !forceCombinatorNone) { | 1438 if (combinatorType == TokenKind.COMBINATOR_NONE && !forceCombinatorNone) { |
| 1245 if (this._previousToken != null && | 1439 if (this._previousToken != null && |
| 1246 this._previousToken.end != this._peekToken.start) { | 1440 this._previousToken.end != this._peekToken.start) { |
| (...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1415 | 1609 |
| 1416 // TODO(terry): If no identifier specified consider optimizing out the | 1610 // TODO(terry): If no identifier specified consider optimizing out the |
| 1417 // : or :: and making this a normal selector. For now, | 1611 // : or :: and making this a normal selector. For now, |
| 1418 // create an empty pseudoName. | 1612 // create an empty pseudoName. |
| 1419 var pseudoName; | 1613 var pseudoName; |
| 1420 if (_peekIdentifier()) { | 1614 if (_peekIdentifier()) { |
| 1421 pseudoName = identifier(); | 1615 pseudoName = identifier(); |
| 1422 } else { | 1616 } else { |
| 1423 return null; | 1617 return null; |
| 1424 } | 1618 } |
| 1619 var name = pseudoName.name.toLowerCase(); |
| 1425 | 1620 |
| 1426 // Functional pseudo? | 1621 // Functional pseudo? |
| 1427 | |
| 1428 if (_peekToken.kind == TokenKind.LPAREN) { | 1622 if (_peekToken.kind == TokenKind.LPAREN) { |
| 1429 if (!pseudoElement && pseudoName.name.toLowerCase() == 'not') { | 1623 if (!pseudoElement && name == 'not') { |
| 1430 _eat(TokenKind.LPAREN); | 1624 _eat(TokenKind.LPAREN); |
| 1431 | 1625 |
| 1432 // Negation : ':NOT(' S* negation_arg S* ')' | 1626 // Negation : ':NOT(' S* negation_arg S* ')' |
| 1433 var negArg = simpleSelector(); | 1627 var negArg = simpleSelector(); |
| 1434 | 1628 |
| 1435 _eat(TokenKind.RPAREN); | 1629 _eat(TokenKind.RPAREN); |
| 1436 return new NegationSelector(negArg, _makeSpan(start)); | 1630 return new NegationSelector(negArg, _makeSpan(start)); |
| 1631 } else if (!pseudoElement && (name == 'host' || name == 'host-context')) { |
| 1632 _eat(TokenKind.LPAREN); |
| 1633 var selector = processCompoundSelector(); |
| 1634 _eat(TokenKind.RPAREN); |
| 1635 var span = _makeSpan(start); |
| 1636 return new PseudoClassFunctionSelector(pseudoName, selector, span); |
| 1437 } else { | 1637 } else { |
| 1438 // Special parsing for expressions in pseudo functions. Minus is used | 1638 // Special parsing for expressions in pseudo functions. Minus is used |
| 1439 // as operator not identifier. | 1639 // as operator not identifier. |
| 1440 // TODO(jmesserly): we need to flip this before we eat the "(" as the | 1640 // TODO(jmesserly): we need to flip this before we eat the "(" as the |
| 1441 // next token will be fetched when we do that. I think we should try to | 1641 // next token will be fetched when we do that. I think we should try to |
| 1442 // refactor so we don't need this boolean; it seems fragile. | 1642 // refactor so we don't need this boolean; it seems fragile. |
| 1443 tokenizer.inSelectorExpression = true; | 1643 tokenizer.inSelectorExpression = true; |
| 1444 _eat(TokenKind.LPAREN); | 1644 _eat(TokenKind.LPAREN); |
| 1445 | 1645 |
| 1446 // Handle function expression. | 1646 // Handle function expression. |
| 1447 var span = _makeSpan(start); | 1647 var span = _makeSpan(start); |
| 1448 var expr = processSelectorExpression(); | 1648 var expr = processSelectorExpression(); |
| 1449 | 1649 |
| 1450 tokenizer.inSelectorExpression = false; | 1650 tokenizer.inSelectorExpression = false; |
| 1451 | 1651 |
| 1452 // Used during selector look-a-head if not a SelectorExpression is | 1652 // Used during selector look-a-head if not a SelectorExpression is |
| 1453 // bad. | 1653 // bad. |
| 1454 if (expr is! SelectorExpression) { | 1654 if (expr is! SelectorExpression) { |
| 1455 _errorExpected("CSS expression"); | 1655 _errorExpected("CSS expression"); |
| 1456 return null; | 1656 return null; |
| 1457 } | 1657 } |
| 1458 | 1658 |
| 1459 _eat(TokenKind.RPAREN); | 1659 _eat(TokenKind.RPAREN); |
| 1460 return (pseudoElement) | 1660 return (pseudoElement) |
| 1461 ? new PseudoElementFunctionSelector(pseudoName, expr, span) | 1661 ? new PseudoElementFunctionSelector(pseudoName, expr, span) |
| 1462 : new PseudoClassFunctionSelector(pseudoName, expr, span); | 1662 : new PseudoClassFunctionSelector(pseudoName, expr, span); |
| 1463 } | 1663 } |
| 1464 } | 1664 } |
| 1465 | 1665 |
| 1466 // TODO(terry): Need to handle specific pseudo class/element name and | 1666 // Treat CSS2.1 pseudo-elements defined with pseudo class syntax as pseudo- |
| 1467 // backward compatible names that are : as well as :: as well as | 1667 // elements for backwards compatibility. |
| 1468 // parameters. Current, spec uses :: for pseudo-element and : for | 1668 return pseudoElement || _legacyPseudoElements.contains(name) |
| 1469 // pseudo-class. However, CSS2.1 allows for : to specify old | 1669 ? new PseudoElementSelector(pseudoName, _makeSpan(start), |
| 1470 // pseudo-elements (:first-line, :first-letter, :before and :after) any | 1670 isLegacy: !pseudoElement) |
| 1471 // new pseudo-elements defined would require a ::. | |
| 1472 return pseudoElement | |
| 1473 ? new PseudoElementSelector(pseudoName, _makeSpan(start)) | |
| 1474 : new PseudoClassSelector(pseudoName, _makeSpan(start)); | 1671 : new PseudoClassSelector(pseudoName, _makeSpan(start)); |
| 1475 } | 1672 } |
| 1476 | 1673 |
| 1477 /** | 1674 /** |
| 1478 * In CSS3, the expressions are identifiers, strings, or of the form "an+b". | 1675 * In CSS3, the expressions are identifiers, strings, or of the form "an+b". |
| 1479 * | 1676 * |
| 1480 * : [ [ PLUS | '-' | DIMENSION | NUMBER | STRING | IDENT ] S* ]+ | 1677 * : [ [ PLUS | '-' | DIMENSION | NUMBER | STRING | IDENT ] S* ]+ |
| 1481 * | 1678 * |
| 1482 * num [0-9]+|[0-9]*\.[0-9]+ | 1679 * num [0-9]+|[0-9]*\.[0-9]+ |
| 1483 * PLUS '+' | 1680 * PLUS '+' |
| 1484 * DIMENSION {num}{ident} | 1681 * DIMENSION {num}{ident} |
| 1485 * NUMBER {num} | 1682 * NUMBER {num} |
| 1486 */ | 1683 */ |
| 1487 processSelectorExpression() { | 1684 processSelectorExpression() { |
| 1488 var start = _peekToken.span; | 1685 var start = _peekToken.span; |
| 1489 | 1686 |
| 1490 var expressions = []; | 1687 var expressions = <Expression>[]; |
| 1491 | 1688 |
| 1492 Token termToken; | 1689 Token termToken; |
| 1493 var value; | 1690 var value; |
| 1494 | 1691 |
| 1495 var keepParsing = true; | 1692 var keepParsing = true; |
| 1496 while (keepParsing) { | 1693 while (keepParsing) { |
| 1497 switch (_peek()) { | 1694 switch (_peek()) { |
| 1498 case TokenKind.PLUS: | 1695 case TokenKind.PLUS: |
| 1499 start = _peekToken.span; | 1696 start = _peekToken.span; |
| 1500 termToken = _next(); | 1697 termToken = _next(); |
| (...skipping 615 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2116 | 2313 |
| 2117 var unary = ""; | 2314 var unary = ""; |
| 2118 switch (_peek()) { | 2315 switch (_peek()) { |
| 2119 case TokenKind.HASH: | 2316 case TokenKind.HASH: |
| 2120 this._eat(TokenKind.HASH); | 2317 this._eat(TokenKind.HASH); |
| 2121 if (!_anyWhiteSpaceBeforePeekToken(TokenKind.HASH)) { | 2318 if (!_anyWhiteSpaceBeforePeekToken(TokenKind.HASH)) { |
| 2122 String hexText; | 2319 String hexText; |
| 2123 if (_peekKind(TokenKind.INTEGER)) { | 2320 if (_peekKind(TokenKind.INTEGER)) { |
| 2124 String hexText1 = _peekToken.text; | 2321 String hexText1 = _peekToken.text; |
| 2125 _next(); | 2322 _next(); |
| 2126 if (_peekIdentifier()) { | 2323 // Append identifier only if there's no delimiting whitespace. |
| 2324 if (_peekIdentifier() && _previousToken.end == _peekToken.start) { |
| 2127 hexText = '$hexText1${identifier().name}'; | 2325 hexText = '$hexText1${identifier().name}'; |
| 2128 } else { | 2326 } else { |
| 2129 hexText = hexText1; | 2327 hexText = hexText1; |
| 2130 } | 2328 } |
| 2131 } else if (_peekIdentifier()) { | 2329 } else if (_peekIdentifier()) { |
| 2132 hexText = identifier().name; | 2330 hexText = identifier().name; |
| 2133 } | 2331 } |
| 2134 if (hexText != null) { | 2332 if (hexText != null) { |
| 2135 return _parseHex(hexText, _makeSpan(start)); | 2333 return _parseHex(hexText, _makeSpan(start)); |
| 2136 } | 2334 } |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2262 case TokenKind.AT: | 2460 case TokenKind.AT: |
| 2263 if (messages.options.lessSupport) { | 2461 if (messages.options.lessSupport) { |
| 2264 _next(); | 2462 _next(); |
| 2265 | 2463 |
| 2266 var expr = processExpr(); | 2464 var expr = processExpr(); |
| 2267 if (isChecked && expr.expressions.length > 1) { | 2465 if (isChecked && expr.expressions.length > 1) { |
| 2268 _error("only @name for Less syntax", _peekToken.span); | 2466 _error("only @name for Less syntax", _peekToken.span); |
| 2269 } | 2467 } |
| 2270 | 2468 |
| 2271 var param = expr.expressions[0]; | 2469 var param = expr.expressions[0]; |
| 2272 var varUsage = new VarUsage(param.text, [], _makeSpan(start)); | 2470 var varUsage = |
| 2471 new VarUsage((param as LiteralTerm).text, [], _makeSpan(start)); |
| 2273 expr.expressions[0] = varUsage; | 2472 expr.expressions[0] = varUsage; |
| 2274 return expr.expressions; | 2473 return expr.expressions; |
| 2275 } | 2474 } |
| 2276 break; | 2475 break; |
| 2277 } | 2476 } |
| 2278 | 2477 |
| 2279 return processDimension(t, value, _makeSpan(start)); | 2478 return t != null ? processDimension(t, value, _makeSpan(start)) : null; |
| 2280 } | 2479 } |
| 2281 | 2480 |
| 2282 /** Process all dimension units. */ | 2481 /** Process all dimension units. */ |
| 2283 LiteralTerm processDimension(Token t, var value, SourceSpan span) { | 2482 LiteralTerm processDimension(Token t, var value, SourceSpan span) { |
| 2284 LiteralTerm term; | 2483 LiteralTerm term; |
| 2285 var unitType = this._peek(); | 2484 var unitType = this._peek(); |
| 2286 | 2485 |
| 2287 switch (unitType) { | 2486 switch (unitType) { |
| 2288 case TokenKind.UNIT_EM: | 2487 case TokenKind.UNIT_EM: |
| 2289 term = new EmTerm(value, t.text, span); | 2488 term = new EmTerm(value, t.text, span); |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2415 // fully support calc, var(), etc. | 2614 // fully support calc, var(), etc. |
| 2416 /** | 2615 /** |
| 2417 * IE's filter property breaks CSS value parsing. IE's format can be: | 2616 * IE's filter property breaks CSS value parsing. IE's format can be: |
| 2418 * | 2617 * |
| 2419 * filter: progid:DXImageTransform.MS.gradient(Type=0, Color='#9d8b83'); | 2618 * filter: progid:DXImageTransform.MS.gradient(Type=0, Color='#9d8b83'); |
| 2420 * | 2619 * |
| 2421 * We'll just parse everything after the 'progid:' look for the left paren | 2620 * We'll just parse everything after the 'progid:' look for the left paren |
| 2422 * then parse to the right paren ignoring everything in between. | 2621 * then parse to the right paren ignoring everything in between. |
| 2423 */ | 2622 */ |
| 2424 processIEFilter(FileSpan startAfterProgidColon) { | 2623 processIEFilter(FileSpan startAfterProgidColon) { |
| 2624 // Support non-functional filters (i.e. filter: FlipH) |
| 2625 var kind = _peek(); |
| 2626 if (kind == TokenKind.SEMICOLON || kind == TokenKind.RBRACE) { |
| 2627 var tok = tokenizer.makeIEFilter( |
| 2628 startAfterProgidColon.start.offset, _peekToken.start); |
| 2629 return new LiteralTerm(tok.text, tok.text, tok.span); |
| 2630 } |
| 2631 |
| 2425 var parens = 0; | 2632 var parens = 0; |
| 2426 | |
| 2427 while (_peek() != TokenKind.END_OF_FILE) { | 2633 while (_peek() != TokenKind.END_OF_FILE) { |
| 2428 switch (_peek()) { | 2634 switch (_peek()) { |
| 2429 case TokenKind.LPAREN: | 2635 case TokenKind.LPAREN: |
| 2430 _eat(TokenKind.LPAREN); | 2636 _eat(TokenKind.LPAREN); |
| 2431 parens++; | 2637 parens++; |
| 2432 break; | 2638 break; |
| 2433 case TokenKind.RPAREN: | 2639 case TokenKind.RPAREN: |
| 2434 _eat(TokenKind.RPAREN); | 2640 _eat(TokenKind.RPAREN); |
| 2435 if (--parens == 0) { | 2641 if (--parens == 0) { |
| 2436 var tok = tokenizer.makeIEFilter( | 2642 var tok = tokenizer.makeIEFilter( |
| (...skipping 23 matching lines...) Expand all Loading... |
| 2460 tokenizer._inString = false; | 2666 tokenizer._inString = false; |
| 2461 | 2667 |
| 2462 // Gobble up everything until we hit our stop token. | 2668 // Gobble up everything until we hit our stop token. |
| 2463 var stringValue = new StringBuffer(); | 2669 var stringValue = new StringBuffer(); |
| 2464 var left = 1; | 2670 var left = 1; |
| 2465 var matchingParens = false; | 2671 var matchingParens = false; |
| 2466 while (_peek() != TokenKind.END_OF_FILE && !matchingParens) { | 2672 while (_peek() != TokenKind.END_OF_FILE && !matchingParens) { |
| 2467 var token = _peek(); | 2673 var token = _peek(); |
| 2468 if (token == TokenKind.LPAREN) | 2674 if (token == TokenKind.LPAREN) |
| 2469 left++; | 2675 left++; |
| 2470 else if (token == TokenKind.RPAREN) | 2676 else if (token == TokenKind.RPAREN) left--; |
| 2471 left--; | |
| 2472 | 2677 |
| 2473 matchingParens = left == 0; | 2678 matchingParens = left == 0; |
| 2474 if (!matchingParens) stringValue.write(_next().text); | 2679 if (!matchingParens) stringValue.write(_next().text); |
| 2475 } | 2680 } |
| 2476 | 2681 |
| 2477 if (!matchingParens) { | 2682 if (!matchingParens) { |
| 2478 _error("problem parsing function expected ), ", _peekToken.span); | 2683 _error("problem parsing function expected ), ", _peekToken.span); |
| 2479 } | 2684 } |
| 2480 | 2685 |
| 2481 tokenizer._inString = inString; | 2686 tokenizer._inString = inString; |
| 2482 | 2687 |
| 2483 return stringValue.toString(); | 2688 return stringValue.toString(); |
| 2484 } | 2689 } |
| 2485 | 2690 |
| 2486 CalcTerm processCalc(Identifier func) { | 2691 CalcTerm processCalc(Identifier func) { |
| 2487 var start = _peekToken.span; | 2692 var start = _peekToken.span; |
| 2488 | 2693 |
| 2489 var name = func.name; | 2694 var name = func.name; |
| 2490 if (name == 'calc') { | 2695 if (name == 'calc' || name == '-webkit-calc' || name == '-moz-calc') { |
| 2491 // TODO(terry): Implement expression parsing properly. | 2696 // TODO(terry): Implement expression parsing properly. |
| 2492 String expression = processCalcExpression(); | 2697 String expression = processCalcExpression(); |
| 2493 var calcExpr = new LiteralTerm(expression, expression, _makeSpan(start)); | 2698 var calcExpr = new LiteralTerm(expression, expression, _makeSpan(start)); |
| 2494 | 2699 |
| 2495 if (!_maybeEat(TokenKind.RPAREN)) { | 2700 if (!_maybeEat(TokenKind.RPAREN)) { |
| 2496 _error("problem parsing function expected ), ", _peekToken.span); | 2701 _error("problem parsing function expected ), ", _peekToken.span); |
| 2497 } | 2702 } |
| 2498 | 2703 |
| 2499 return new CalcTerm(name, name, calcExpr, _makeSpan(start)); | 2704 return new CalcTerm(name, name, calcExpr, _makeSpan(start)); |
| 2500 } | 2705 } |
| 2501 | 2706 |
| 2502 return null; | 2707 return null; |
| 2503 } | 2708 } |
| 2504 | 2709 |
| 2505 // Function grammar: | 2710 // Function grammar: |
| 2506 // | 2711 // |
| 2507 // function: IDENT '(' expr ')' | 2712 // function: IDENT '(' expr ')' |
| 2508 // | 2713 // |
| 2509 processFunction(Identifier func) { | 2714 processFunction(Identifier func) { |
| 2510 var start = _peekToken.span; | 2715 var start = _peekToken.span; |
| 2511 | |
| 2512 var name = func.name; | 2716 var name = func.name; |
| 2513 | 2717 |
| 2514 switch (name) { | 2718 switch (name) { |
| 2515 case 'url': | 2719 case 'url': |
| 2516 // URI term sucks up everything inside of quotes(' or ") or between pare
ns | 2720 // URI term sucks up everything inside of quotes(' or ") or between pare
ns |
| 2517 var urlParam = processQuotedString(true); | 2721 var urlParam = processQuotedString(true); |
| 2518 | 2722 |
| 2519 // TODO(terry): Better error messge and checking for mismatched quotes. | 2723 // TODO(terry): Better error message and checking for mismatched quotes. |
| 2520 if (_peek() == TokenKind.END_OF_FILE) { | 2724 if (_peek() == TokenKind.END_OF_FILE) { |
| 2521 _error("problem parsing URI", _peekToken.span); | 2725 _error("problem parsing URI", _peekToken.span); |
| 2522 } | 2726 } |
| 2523 | 2727 |
| 2524 if (_peek() == TokenKind.RPAREN) { | 2728 if (_peek() == TokenKind.RPAREN) { |
| 2525 _next(); | 2729 _next(); |
| 2526 } | 2730 } |
| 2527 | 2731 |
| 2528 return new UriTerm(urlParam, _makeSpan(start)); | 2732 return new UriTerm(urlParam, _makeSpan(start)); |
| 2529 case 'var': | 2733 case 'var': |
| 2530 // TODO(terry): Consider handling var in IE specific filter/progid. Thi
s | 2734 // TODO(terry): Consider handling var in IE specific filter/progid. Thi
s |
| 2531 // will require parsing entire IE specific syntax e.g., | 2735 // will require parsing entire IE specific syntax e.g., |
| 2532 // param = value or progid:com_id, etc. for example: | 2736 // param = value or progid:com_id, etc. for example: |
| 2533 // | 2737 // |
| 2534 // var-blur: Blur(Add = 0, Direction = 225, Strength = 10); | 2738 // var-blur: Blur(Add = 0, Direction = 225, Strength = 10); |
| 2535 // var-gradient: progid:DXImageTransform.Microsoft.gradient" | 2739 // var-gradient: progid:DXImageTransform.Microsoft.gradient" |
| 2536 // (GradientType=0,StartColorStr='#9d8b83', EndColorStr='#847670'); | 2740 // (GradientType=0,StartColorStr='#9d8b83', EndColorStr='#847670'); |
| 2537 var expr = processExpr(); | 2741 var expr = processExpr(); |
| 2538 if (!_maybeEat(TokenKind.RPAREN)) { | 2742 if (!_maybeEat(TokenKind.RPAREN)) { |
| 2539 _error("problem parsing var expected ), ", _peekToken.span); | 2743 _error("problem parsing var expected ), ", _peekToken.span); |
| 2540 } | 2744 } |
| 2541 if (isChecked && | 2745 if (isChecked && |
| 2542 expr.expressions.where((e) => e is OperatorComma).length > 1) { | 2746 expr.expressions.where((e) => e is OperatorComma).length > 1) { |
| 2543 _error("too many parameters to var()", _peekToken.span); | 2747 _error("too many parameters to var()", _peekToken.span); |
| 2544 } | 2748 } |
| 2545 | 2749 |
| 2546 var paramName = expr.expressions[0].text; | 2750 var paramName = (expr.expressions[0] as LiteralTerm).text; |
| 2547 | 2751 |
| 2548 // [0] - var name, [1] - OperatorComma, [2] - default value. | 2752 // [0] - var name, [1] - OperatorComma, [2] - default value. |
| 2549 var defaultValues = | 2753 var defaultValues = expr.expressions.length >= 3 |
| 2550 expr.expressions.length >= 3 ? expr.expressions.sublist(2) : []; | 2754 ? expr.expressions.sublist(2) |
| 2755 : <Expression>[]; |
| 2551 return new VarUsage(paramName, defaultValues, _makeSpan(start)); | 2756 return new VarUsage(paramName, defaultValues, _makeSpan(start)); |
| 2552 default: | 2757 default: |
| 2553 var expr = processExpr(); | 2758 var expr = processExpr(); |
| 2554 if (!_maybeEat(TokenKind.RPAREN)) { | 2759 if (!_maybeEat(TokenKind.RPAREN)) { |
| 2555 _error("problem parsing function expected ), ", _peekToken.span); | 2760 _error("problem parsing function expected ), ", _peekToken.span); |
| 2556 } | 2761 } |
| 2557 | 2762 |
| 2558 return new FunctionTerm(name, name, expr, _makeSpan(start)); | 2763 return new FunctionTerm(name, name, expr, _makeSpan(start)); |
| 2559 } | 2764 } |
| 2560 | |
| 2561 return null; | |
| 2562 } | 2765 } |
| 2563 | 2766 |
| 2564 Identifier identifier() { | 2767 Identifier identifier() { |
| 2565 var tok = _next(); | 2768 var tok = _next(); |
| 2566 | 2769 |
| 2567 if (!TokenKind.isIdentifier(tok.kind) && | 2770 if (!TokenKind.isIdentifier(tok.kind) && |
| 2568 !TokenKind.isKindIdentifier(tok.kind)) { | 2771 !TokenKind.isKindIdentifier(tok.kind)) { |
| 2569 if (isChecked) { | 2772 if (isChecked) { |
| 2570 _warning('expected identifier, but found $tok', tok.span); | 2773 _warning('expected identifier, but found $tok', tok.span); |
| 2571 } | 2774 } |
| (...skipping 174 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2746 | 2949 |
| 2747 if (replace != null && result == null) { | 2950 if (replace != null && result == null) { |
| 2748 result = new StringBuffer(text.substring(0, i)); | 2951 result = new StringBuffer(text.substring(0, i)); |
| 2749 } | 2952 } |
| 2750 | 2953 |
| 2751 if (result != null) result.write(replace != null ? replace : text[i]); | 2954 if (result != null) result.write(replace != null ? replace : text[i]); |
| 2752 } | 2955 } |
| 2753 | 2956 |
| 2754 return result == null ? text : result.toString(); | 2957 return result == null ? text : result.toString(); |
| 2755 } | 2958 } |
| OLD | NEW |