Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(319)

Side by Side Diff: third_party/closure_linter/closure_linter/error_fixer.py

Issue 2328693002: Updated linter with upstream release (2.3.19) (Closed)
Patch Set: Created 4 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 #!/usr/bin/env python 1 #!/usr/bin/env python
2 # 2 #
3 # Copyright 2007 The Closure Linter Authors. All Rights Reserved. 3 # Copyright 2007 The Closure Linter Authors. All Rights Reserved.
4 # 4 #
5 # Licensed under the Apache License, Version 2.0 (the "License"); 5 # Licensed under the Apache License, Version 2.0 (the "License");
6 # you may not use this file except in compliance with the License. 6 # you may not use this file except in compliance with the License.
7 # You may obtain a copy of the License at 7 # You may obtain a copy of the License at
8 # 8 #
9 # http://www.apache.org/licenses/LICENSE-2.0 9 # http://www.apache.org/licenses/LICENSE-2.0
10 # 10 #
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
43 r'(?P<name>[^(]+)' 43 r'(?P<name>[^(]+)'
44 r'(?P<whitespace_after_name>\s+)' 44 r'(?P<whitespace_after_name>\s+)'
45 r'\(' 45 r'\('
46 r'(?P<email>[^\s]+@[^)\s]+)' 46 r'(?P<email>[^\s]+@[^)\s]+)'
47 r'\)' 47 r'\)'
48 r'(?P<trailing_characters>.*)') 48 r'(?P<trailing_characters>.*)')
49 49
50 FLAGS = flags.FLAGS 50 FLAGS = flags.FLAGS
51 flags.DEFINE_boolean('disable_indentation_fixing', False, 51 flags.DEFINE_boolean('disable_indentation_fixing', False,
52 'Whether to disable automatic fixing of indentation.') 52 'Whether to disable automatic fixing of indentation.')
53 flags.DEFINE_list('fix_error_codes', [], 'A list of specific error codes to '
54 'fix. Defaults to all supported error codes when empty. '
55 'See errors.py for a list of error codes.')
53 56
54 57
55 class ErrorFixer(errorhandler.ErrorHandler): 58 class ErrorFixer(errorhandler.ErrorHandler):
56 """Object that fixes simple style errors.""" 59 """Object that fixes simple style errors."""
57 60
58 def __init__(self, external_file=None): 61 def __init__(self, external_file=None):
59 """Initialize the error fixer. 62 """Initialize the error fixer.
60 63
61 Args: 64 Args:
62 external_file: If included, all output will be directed to this file 65 external_file: If included, all output will be directed to this file
63 instead of overwriting the files the errors are found in. 66 instead of overwriting the files the errors are found in.
64 """ 67 """
65 errorhandler.ErrorHandler.__init__(self) 68 errorhandler.ErrorHandler.__init__(self)
66 69
67 self._file_name = None 70 self._file_name = None
68 self._file_token = None 71 self._file_token = None
69 self._external_file = external_file 72 self._external_file = external_file
70 73
74 try:
75 self._fix_error_codes = set([errors.ByName(error.upper()) for error in
76 FLAGS.fix_error_codes])
77 except KeyError as ke:
78 raise ValueError('Unknown error code ' + ke.args[0])
79
71 def HandleFile(self, filename, first_token): 80 def HandleFile(self, filename, first_token):
72 """Notifies this ErrorPrinter that subsequent errors are in filename. 81 """Notifies this ErrorPrinter that subsequent errors are in filename.
73 82
74 Args: 83 Args:
75 filename: The name of the file about to be checked. 84 filename: The name of the file about to be checked.
76 first_token: The first token in the file. 85 first_token: The first token in the file.
77 """ 86 """
78 self._file_name = filename 87 self._file_name = filename
79 self._file_is_html = filename.endswith('.html') or filename.endswith('.htm') 88 self._file_is_html = filename.endswith('.html') or filename.endswith('.htm')
80 self._file_token = first_token 89 self._file_token = first_token
81 self._file_fix_count = 0 90 self._file_fix_count = 0
82 self._file_changed_lines = set() 91 self._file_changed_lines = set()
83 92
84 def _AddFix(self, tokens): 93 def _AddFix(self, tokens):
85 """Adds the fix to the internal count. 94 """Adds the fix to the internal count.
86 95
87 Args: 96 Args:
88 tokens: The token or sequence of tokens changed to fix an error. 97 tokens: The token or sequence of tokens changed to fix an error.
89 """ 98 """
90 self._file_fix_count += 1 99 self._file_fix_count += 1
91 if hasattr(tokens, 'line_number'): 100 if hasattr(tokens, 'line_number'):
92 self._file_changed_lines.add(tokens.line_number) 101 self._file_changed_lines.add(tokens.line_number)
93 else: 102 else:
94 for token in tokens: 103 for token in tokens:
95 self._file_changed_lines.add(token.line_number) 104 self._file_changed_lines.add(token.line_number)
96 105
106 def _FixJsDocPipeNull(self, js_type):
107 """Change number|null or null|number to ?number.
108
109 Args:
110 js_type: The typeannotation.TypeAnnotation instance to fix.
111 """
112
113 # Recurse into all sub_types if the error was at a deeper level.
114 map(self._FixJsDocPipeNull, js_type.IterTypes())
115
116 if js_type.type_group and len(js_type.sub_types) == 2:
117 # Find and remove the null sub_type:
118 sub_type = None
119 for sub_type in js_type.sub_types:
120 if sub_type.identifier == 'null':
121 map(tokenutil.DeleteToken, sub_type.tokens)
122 self._AddFix(sub_type.tokens)
123 break
124 else:
125 return
126
127 first_token = js_type.FirstToken()
128 question_mark = Token('?', Type.DOC_TYPE_MODIFIER, first_token.line,
129 first_token.line_number)
130 tokenutil.InsertTokenBefore(question_mark, first_token)
131 js_type.tokens.insert(0, question_mark)
132 js_type.tokens.remove(sub_type)
133 js_type.sub_types.remove(sub_type)
134 js_type.or_null = True
135
136 # Now also remove the separator, which is in the parent's token list,
137 # either before or after the sub_type, there is exactly one. Scan for it.
138 for token in js_type.tokens:
139 if (token and isinstance(token, Token) and
140 token.type == Type.DOC_TYPE_MODIFIER and token.string == '|'):
141 tokenutil.DeleteToken(token)
142 js_type.tokens.remove(token)
143 self._AddFix(token)
144 break
145
97 def HandleError(self, error): 146 def HandleError(self, error):
98 """Attempts to fix the error. 147 """Attempts to fix the error.
99 148
100 Args: 149 Args:
101 error: The error object 150 error: The error object
102 """ 151 """
103 code = error.code 152 code = error.code
104 token = error.token 153 token = error.token
105 154
155 if self._fix_error_codes and code not in self._fix_error_codes:
156 return
157
106 if code == errors.JSDOC_PREFER_QUESTION_TO_PIPE_NULL: 158 if code == errors.JSDOC_PREFER_QUESTION_TO_PIPE_NULL:
107 iterator = token.attached_object.type_start_token 159 self._FixJsDocPipeNull(token.attached_object.jstype)
108 if iterator.type == Type.DOC_START_BRACE or iterator.string.isspace():
109 iterator = iterator.next
110
111 leading_space = len(iterator.string) - len(iterator.string.lstrip())
112 iterator.string = '%s?%s' % (' ' * leading_space,
113 iterator.string.lstrip())
114
115 # Cover the no outer brace case where the end token is part of the type.
116 while iterator and iterator != token.attached_object.type_end_token.next:
117 iterator.string = iterator.string.replace(
118 'null|', '').replace('|null', '')
119 iterator = iterator.next
120
121 # Create a new flag object with updated type info.
122 token.attached_object = javascriptstatetracker.JsDocFlag(token)
123 self._AddFix(token)
124 160
125 elif code == errors.JSDOC_MISSING_OPTIONAL_TYPE: 161 elif code == errors.JSDOC_MISSING_OPTIONAL_TYPE:
126 iterator = token.attached_object.type_end_token 162 iterator = token.attached_object.type_end_token
127 if iterator.type == Type.DOC_END_BRACE or iterator.string.isspace(): 163 if iterator.type == Type.DOC_END_BRACE or iterator.string.isspace():
128 iterator = iterator.previous 164 iterator = iterator.previous
129 165
130 ending_space = len(iterator.string) - len(iterator.string.rstrip()) 166 ending_space = len(iterator.string) - len(iterator.string.rstrip())
131 iterator.string = '%s=%s' % (iterator.string.rstrip(), 167 iterator.string = '%s=%s' % (iterator.string.rstrip(),
132 ' ' * ending_space) 168 ' ' * ending_space)
133 169
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after
278 len(last_type.string) - trailing_space) 314 len(last_type.string) - trailing_space)
279 315
280 new_token = Token('}', Type.DOC_END_BRACE, last_type.line, 316 new_token = Token('}', Type.DOC_END_BRACE, last_type.line,
281 last_type.line_number) 317 last_type.line_number)
282 tokenutil.InsertTokenAfter(new_token, last_type) 318 tokenutil.InsertTokenAfter(new_token, last_type)
283 token.attached_object.type_end_token = new_token 319 token.attached_object.type_end_token = new_token
284 fixed_tokens.append(new_token) 320 fixed_tokens.append(new_token)
285 321
286 self._AddFix(fixed_tokens) 322 self._AddFix(fixed_tokens)
287 323
324 elif code == errors.LINE_STARTS_WITH_OPERATOR:
325 # Remove whitespace following the operator so the line starts clean.
326 self._StripSpace(token, before=False)
327
328 # Remove the operator.
329 tokenutil.DeleteToken(token)
330 self._AddFix(token)
331
332 insertion_point = tokenutil.GetPreviousCodeToken(token)
333
334 # Insert a space between the previous token and the new operator.
335 space = Token(' ', Type.WHITESPACE, insertion_point.line,
336 insertion_point.line_number)
337 tokenutil.InsertTokenAfter(space, insertion_point)
338
339 # Insert the operator on the end of the previous line.
340 new_token = Token(token.string, token.type, insertion_point.line,
341 insertion_point.line_number)
342 tokenutil.InsertTokenAfter(new_token, space)
343 self._AddFix(new_token)
344
345 elif code == errors.LINE_ENDS_WITH_DOT:
346 # Remove whitespace preceding the operator to remove trailing whitespace.
347 self._StripSpace(token, before=True)
348
349 # Remove the dot.
350 tokenutil.DeleteToken(token)
351 self._AddFix(token)
352
353 insertion_point = tokenutil.GetNextCodeToken(token)
354
355 # Insert the dot at the beginning of the next line of code.
356 new_token = Token(token.string, token.type, insertion_point.line,
357 insertion_point.line_number)
358 tokenutil.InsertTokenBefore(new_token, insertion_point)
359 self._AddFix(new_token)
360
288 elif code == errors.GOOG_REQUIRES_NOT_ALPHABETIZED: 361 elif code == errors.GOOG_REQUIRES_NOT_ALPHABETIZED:
289 require_start_token = error.fix_data 362 require_start_token = error.fix_data
290 sorter = requireprovidesorter.RequireProvideSorter() 363 sorter = requireprovidesorter.RequireProvideSorter()
291 sorter.FixRequires(require_start_token) 364 sorter.FixRequires(require_start_token)
292 365
293 self._AddFix(require_start_token) 366 self._AddFix(require_start_token)
294 367
295 elif code == errors.GOOG_PROVIDES_NOT_ALPHABETIZED: 368 elif code == errors.GOOG_PROVIDES_NOT_ALPHABETIZED:
296 provide_start_token = error.fix_data 369 provide_start_token = error.fix_data
297 sorter = requireprovidesorter.RequireProvideSorter() 370 sorter = requireprovidesorter.RequireProvideSorter()
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
368 comment_token = Token(' goog.scope', Type.COMMENT, token.line, 441 comment_token = Token(' goog.scope', Type.COMMENT, token.line,
369 token.line_number) 442 token.line_number)
370 insertion_tokens = [whitespace_token, start_comment_token, 443 insertion_tokens = [whitespace_token, start_comment_token,
371 comment_token] 444 comment_token]
372 445
373 tokenutil.InsertTokensAfter(insertion_tokens, token.next.next) 446 tokenutil.InsertTokensAfter(insertion_tokens, token.next.next)
374 self._AddFix(removed_tokens + insertion_tokens) 447 self._AddFix(removed_tokens + insertion_tokens)
375 448
376 elif code in [errors.EXTRA_GOOG_PROVIDE, errors.EXTRA_GOOG_REQUIRE]: 449 elif code in [errors.EXTRA_GOOG_PROVIDE, errors.EXTRA_GOOG_REQUIRE]:
377 tokens_in_line = tokenutil.GetAllTokensInSameLine(token) 450 tokens_in_line = tokenutil.GetAllTokensInSameLine(token)
378 self._DeleteTokens(tokens_in_line[0], len(tokens_in_line)) 451 num_delete_tokens = len(tokens_in_line)
452 # If line being deleted is preceded and succeed with blank lines then
453 # delete one blank line also.
454 if (tokens_in_line[0].previous and tokens_in_line[-1].next
455 and tokens_in_line[0].previous.type == Type.BLANK_LINE
456 and tokens_in_line[-1].next.type == Type.BLANK_LINE):
457 num_delete_tokens += 1
458 self._DeleteTokens(tokens_in_line[0], num_delete_tokens)
379 self._AddFix(tokens_in_line) 459 self._AddFix(tokens_in_line)
380 460
381 elif code in [errors.MISSING_GOOG_PROVIDE, errors.MISSING_GOOG_REQUIRE]: 461 elif code in [errors.MISSING_GOOG_PROVIDE, errors.MISSING_GOOG_REQUIRE]:
382 is_provide = code == errors.MISSING_GOOG_PROVIDE 462 missing_namespaces = error.fix_data[0]
383 is_require = code == errors.MISSING_GOOG_REQUIRE 463 need_blank_line = error.fix_data[1] or (not token.previous)
384 464
385 missing_namespaces = error.fix_data[0] 465 insert_location = Token('', Type.NORMAL, '', token.line_number - 1)
386 need_blank_line = error.fix_data[1] 466 dummy_first_token = insert_location
467 tokenutil.InsertTokenBefore(insert_location, token)
387 468
388 if need_blank_line is None: 469 # If inserting a blank line check blank line does not exist before
389 # TODO(user): This happens when there are no existing 470 # token to avoid extra blank lines.
390 # goog.provide or goog.require statements to position new statements 471 if (need_blank_line and insert_location.previous
391 # relative to. Consider handling this case with a heuristic. 472 and insert_location.previous.type != Type.BLANK_LINE):
392 return
393
394 insert_location = token.previous
395
396 # If inserting a missing require with no existing requires, insert a
397 # blank line first.
398 if need_blank_line and is_require:
399 tokenutil.InsertBlankLineAfter(insert_location) 473 tokenutil.InsertBlankLineAfter(insert_location)
400 insert_location = insert_location.next 474 insert_location = insert_location.next
401 475
402 for missing_namespace in missing_namespaces: 476 for missing_namespace in missing_namespaces:
403 new_tokens = self._GetNewRequireOrProvideTokens( 477 new_tokens = self._GetNewRequireOrProvideTokens(
404 is_provide, missing_namespace, insert_location.line_number + 1) 478 code == errors.MISSING_GOOG_PROVIDE,
479 missing_namespace, insert_location.line_number + 1)
405 tokenutil.InsertLineAfter(insert_location, new_tokens) 480 tokenutil.InsertLineAfter(insert_location, new_tokens)
406 insert_location = new_tokens[-1] 481 insert_location = new_tokens[-1]
407 self._AddFix(new_tokens) 482 self._AddFix(new_tokens)
408 483
409 # If inserting a missing provide with no existing provides, insert a 484 # If inserting a blank line check blank line does not exist after
410 # blank line after. 485 # token to avoid extra blank lines.
411 if need_blank_line and is_provide: 486 if (need_blank_line and insert_location.next
487 and insert_location.next.type != Type.BLANK_LINE):
412 tokenutil.InsertBlankLineAfter(insert_location) 488 tokenutil.InsertBlankLineAfter(insert_location)
413 489
490 tokenutil.DeleteToken(dummy_first_token)
491
492 def _StripSpace(self, token, before):
493 """Strip whitespace tokens either preceding or following the given token.
494
495 Args:
496 token: The token.
497 before: If true, strip space before the token, if false, after it.
498 """
499 token = token.previous if before else token.next
500 while token and token.type == Type.WHITESPACE:
501 tokenutil.DeleteToken(token)
502 token = token.previous if before else token.next
503
414 def _GetNewRequireOrProvideTokens(self, is_provide, namespace, line_number): 504 def _GetNewRequireOrProvideTokens(self, is_provide, namespace, line_number):
415 """Returns a list of tokens to create a goog.require/provide statement. 505 """Returns a list of tokens to create a goog.require/provide statement.
416 506
417 Args: 507 Args:
418 is_provide: True if getting tokens for a provide, False for require. 508 is_provide: True if getting tokens for a provide, False for require.
419 namespace: The required or provided namespaces to get tokens for. 509 namespace: The required or provided namespaces to get tokens for.
420 line_number: The line number the new require or provide statement will be 510 line_number: The line number the new require or provide statement will be
421 on. 511 on.
422 512
423 Returns: 513 Returns:
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
521 print 'WARNING: Line %d of %s is now longer than 80 characters.' % ( 611 print 'WARNING: Line %d of %s is now longer than 80 characters.' % (
522 token.line_number, self._file_name) 612 token.line_number, self._file_name)
523 613
524 char_count = 0 614 char_count = 0
525 615
526 token = token.next 616 token = token.next
527 617
528 if not self._external_file: 618 if not self._external_file:
529 # Close the file if we created it 619 # Close the file if we created it
530 f.close() 620 f.close()
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698