| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # | |
| 3 # Copyright 2012 The Closure Linter Authors. All Rights Reserved. | |
| 4 # Licensed under the Apache License, Version 2.0 (the "License"); | |
| 5 # you may not use this file except in compliance with the License. | |
| 6 # You may obtain a copy of the License at | |
| 7 # | |
| 8 # http://www.apache.org/licenses/LICENSE-2.0 | |
| 9 # | |
| 10 # Unless required by applicable law or agreed to in writing, software | |
| 11 # distributed under the License is distributed on an "AS-IS" BASIS, | |
| 12 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| 13 # See the License for the specific language governing permissions and | |
| 14 # limitations under the License. | |
| 15 | |
| 16 """Main lint function. Tokenizes file, runs passes, and feeds to checker.""" | |
| 17 | |
| 18 # Allow non-Google copyright | |
| 19 # pylint: disable=g-bad-file-header | |
| 20 | |
| 21 __author__ = 'nnaze@google.com (Nathan Naze)' | |
| 22 | |
| 23 import traceback | |
| 24 | |
| 25 import gflags as flags | |
| 26 | |
| 27 from closure_linter import checker | |
| 28 from closure_linter import ecmalintrules | |
| 29 from closure_linter import ecmametadatapass | |
| 30 from closure_linter import error_check | |
| 31 from closure_linter import errors | |
| 32 from closure_linter import javascriptstatetracker | |
| 33 from closure_linter import javascripttokenizer | |
| 34 | |
| 35 from closure_linter.common import error | |
| 36 from closure_linter.common import htmlutil | |
| 37 from closure_linter.common import tokens | |
| 38 | |
| 39 flags.DEFINE_list('limited_doc_files', ['dummy.js', 'externs.js'], | |
| 40 'List of files with relaxed documentation checks. Will not ' | |
| 41 'report errors for missing documentation, some missing ' | |
| 42 'descriptions, or methods whose @return tags don\'t have a ' | |
| 43 'matching return statement.') | |
| 44 flags.DEFINE_boolean('error_trace', False, | |
| 45 'Whether to show error exceptions.') | |
| 46 flags.ADOPT_module_key_flags(checker) | |
| 47 flags.ADOPT_module_key_flags(ecmalintrules) | |
| 48 flags.ADOPT_module_key_flags(error_check) | |
| 49 | |
| 50 | |
| 51 def _GetLastNonWhiteSpaceToken(start_token): | |
| 52 """Get the last non-whitespace token in a token stream.""" | |
| 53 ret_token = None | |
| 54 | |
| 55 whitespace_tokens = frozenset([ | |
| 56 tokens.TokenType.WHITESPACE, tokens.TokenType.BLANK_LINE]) | |
| 57 for t in start_token: | |
| 58 if t.type not in whitespace_tokens: | |
| 59 ret_token = t | |
| 60 | |
| 61 return ret_token | |
| 62 | |
| 63 | |
| 64 def _IsHtml(filename): | |
| 65 return filename.endswith('.html') or filename.endswith('.htm') | |
| 66 | |
| 67 | |
| 68 def _Tokenize(fileobj): | |
| 69 """Tokenize a file. | |
| 70 | |
| 71 Args: | |
| 72 fileobj: file-like object (or iterable lines) with the source. | |
| 73 | |
| 74 Returns: | |
| 75 The first token in the token stream and the ending mode of the tokenizer. | |
| 76 """ | |
| 77 tokenizer = javascripttokenizer.JavaScriptTokenizer() | |
| 78 start_token = tokenizer.TokenizeFile(fileobj) | |
| 79 return start_token, tokenizer.mode | |
| 80 | |
| 81 | |
| 82 def _IsLimitedDocCheck(filename, limited_doc_files): | |
| 83 """Whether this this a limited-doc file. | |
| 84 | |
| 85 Args: | |
| 86 filename: The filename. | |
| 87 limited_doc_files: Iterable of strings. Suffixes of filenames that should | |
| 88 be limited doc check. | |
| 89 | |
| 90 Returns: | |
| 91 Whether the file should be limited check. | |
| 92 """ | |
| 93 for limited_doc_filename in limited_doc_files: | |
| 94 if filename.endswith(limited_doc_filename): | |
| 95 return True | |
| 96 return False | |
| 97 | |
| 98 | |
| 99 def Run(filename, error_handler, source=None): | |
| 100 """Tokenize, run passes, and check the given file. | |
| 101 | |
| 102 Args: | |
| 103 filename: The path of the file to check | |
| 104 error_handler: The error handler to report errors to. | |
| 105 source: A file-like object with the file source. If omitted, the file will | |
| 106 be read from the filename path. | |
| 107 """ | |
| 108 if not source: | |
| 109 try: | |
| 110 source = open(filename) | |
| 111 except IOError: | |
| 112 error_handler.HandleFile(filename, None) | |
| 113 error_handler.HandleError( | |
| 114 error.Error(errors.FILE_NOT_FOUND, 'File not found')) | |
| 115 error_handler.FinishFile() | |
| 116 return | |
| 117 | |
| 118 if _IsHtml(filename): | |
| 119 source_file = htmlutil.GetScriptLines(source) | |
| 120 else: | |
| 121 source_file = source | |
| 122 | |
| 123 token, tokenizer_mode = _Tokenize(source_file) | |
| 124 | |
| 125 error_handler.HandleFile(filename, token) | |
| 126 | |
| 127 # If we did not end in the basic mode, this a failed parse. | |
| 128 if tokenizer_mode is not javascripttokenizer.JavaScriptModes.TEXT_MODE: | |
| 129 error_handler.HandleError( | |
| 130 error.Error(errors.FILE_IN_BLOCK, | |
| 131 'File ended in mode "%s".' % tokenizer_mode, | |
| 132 _GetLastNonWhiteSpaceToken(token))) | |
| 133 | |
| 134 # Run the ECMA pass | |
| 135 error_token = None | |
| 136 | |
| 137 ecma_pass = ecmametadatapass.EcmaMetaDataPass() | |
| 138 error_token = RunMetaDataPass(token, ecma_pass, error_handler, filename) | |
| 139 | |
| 140 is_limited_doc_check = ( | |
| 141 _IsLimitedDocCheck(filename, flags.FLAGS.limited_doc_files)) | |
| 142 | |
| 143 _RunChecker(token, error_handler, | |
| 144 is_limited_doc_check, | |
| 145 is_html=_IsHtml(filename), | |
| 146 stop_token=error_token) | |
| 147 | |
| 148 error_handler.FinishFile() | |
| 149 | |
| 150 | |
| 151 def RunMetaDataPass(start_token, metadata_pass, error_handler, filename=''): | |
| 152 """Run a metadata pass over a token stream. | |
| 153 | |
| 154 Args: | |
| 155 start_token: The first token in a token stream. | |
| 156 metadata_pass: Metadata pass to run. | |
| 157 error_handler: The error handler to report errors to. | |
| 158 filename: Filename of the source. | |
| 159 | |
| 160 Returns: | |
| 161 The token where the error occurred (if any). | |
| 162 """ | |
| 163 | |
| 164 try: | |
| 165 metadata_pass.Process(start_token) | |
| 166 except ecmametadatapass.ParseError, parse_err: | |
| 167 if flags.FLAGS.error_trace: | |
| 168 traceback.print_exc() | |
| 169 error_token = parse_err.token | |
| 170 error_msg = str(parse_err) | |
| 171 error_handler.HandleError( | |
| 172 error.Error(errors.FILE_DOES_NOT_PARSE, | |
| 173 ('Error parsing file at token "%s". Unable to ' | |
| 174 'check the rest of file.' | |
| 175 '\nError "%s"' % (error_token, error_msg)), error_token)) | |
| 176 return error_token | |
| 177 except Exception: # pylint: disable=broad-except | |
| 178 traceback.print_exc() | |
| 179 error_handler.HandleError( | |
| 180 error.Error( | |
| 181 errors.FILE_DOES_NOT_PARSE, | |
| 182 'Internal error in %s' % filename)) | |
| 183 | |
| 184 | |
| 185 def _RunChecker(start_token, error_handler, | |
| 186 limited_doc_checks, is_html, | |
| 187 stop_token=None): | |
| 188 | |
| 189 state_tracker = javascriptstatetracker.JavaScriptStateTracker() | |
| 190 | |
| 191 style_checker = checker.JavaScriptStyleChecker( | |
| 192 state_tracker=state_tracker, | |
| 193 error_handler=error_handler) | |
| 194 | |
| 195 style_checker.Check(start_token, | |
| 196 is_html=is_html, | |
| 197 limited_doc_checks=limited_doc_checks, | |
| 198 stop_token=stop_token) | |
| OLD | NEW |