| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # | |
| 3 # Copyright 2012 The Closure Linter Authors. All Rights Reserved. | |
| 4 # | |
| 5 # Licensed under the Apache License, Version 2.0 (the "License"); | |
| 6 # you may not use this file except in compliance with the License. | |
| 7 # You may obtain a copy of the License at | |
| 8 # | |
| 9 # http://www.apache.org/licenses/LICENSE-2.0 | |
| 10 # | |
| 11 # Unless required by applicable law or agreed to in writing, software | |
| 12 # distributed under the License is distributed on an "AS-IS" BASIS, | |
| 13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| 14 # See the License for the specific language governing permissions and | |
| 15 # limitations under the License. | |
| 16 | |
| 17 """Utility functions for testing gjslint components.""" | |
| 18 | |
| 19 # Allow non-Google copyright | |
| 20 # pylint: disable=g-bad-file-header | |
| 21 | |
| 22 __author__ = ('nnaze@google.com (Nathan Naze)') | |
| 23 | |
| 24 import StringIO | |
| 25 | |
| 26 from closure_linter import ecmametadatapass | |
| 27 from closure_linter import javascriptstatetracker | |
| 28 from closure_linter import javascripttokenizer | |
| 29 | |
| 30 | |
| 31 def TokenizeSource(source): | |
| 32 """Convert a source into a string of tokens. | |
| 33 | |
| 34 Args: | |
| 35 source: A source file as a string or file-like object (iterates lines). | |
| 36 | |
| 37 Returns: | |
| 38 The first token of the resulting token stream. | |
| 39 """ | |
| 40 | |
| 41 if isinstance(source, basestring): | |
| 42 source = StringIO.StringIO(source) | |
| 43 | |
| 44 tokenizer = javascripttokenizer.JavaScriptTokenizer() | |
| 45 return tokenizer.TokenizeFile(source) | |
| 46 | |
| 47 | |
| 48 def TokenizeSourceAndRunEcmaPass(source): | |
| 49 """Tokenize a source and run the EcmaMetaDataPass on it. | |
| 50 | |
| 51 Args: | |
| 52 source: A source file as a string or file-like object (iterates lines). | |
| 53 | |
| 54 Returns: | |
| 55 The first token of the resulting token stream. | |
| 56 """ | |
| 57 start_token = TokenizeSource(source) | |
| 58 ecma_pass = ecmametadatapass.EcmaMetaDataPass() | |
| 59 ecma_pass.Process(start_token) | |
| 60 return start_token | |
| 61 | |
| 62 | |
| 63 def ParseFunctionsAndComments(source, error_handler=None): | |
| 64 """Run the tokenizer and tracker and return comments and functions found. | |
| 65 | |
| 66 Args: | |
| 67 source: A source file as a string or file-like object (iterates lines). | |
| 68 error_handler: An error handler. | |
| 69 | |
| 70 Returns: | |
| 71 The functions and comments as a tuple. | |
| 72 """ | |
| 73 start_token = TokenizeSourceAndRunEcmaPass(source) | |
| 74 | |
| 75 tracker = javascriptstatetracker.JavaScriptStateTracker() | |
| 76 if error_handler is not None: | |
| 77 tracker.DocFlagPass(start_token, error_handler) | |
| 78 | |
| 79 functions = [] | |
| 80 comments = [] | |
| 81 for token in start_token: | |
| 82 tracker.HandleToken(token, tracker.GetLastNonSpaceToken()) | |
| 83 | |
| 84 function = tracker.GetFunction() | |
| 85 if function and function not in functions: | |
| 86 functions.append(function) | |
| 87 | |
| 88 comment = tracker.GetDocComment() | |
| 89 if comment and comment not in comments: | |
| 90 comments.append(comment) | |
| 91 | |
| 92 tracker.HandleAfterToken(token) | |
| 93 | |
| 94 return functions, comments | |
| OLD | NEW |