OLD | NEW |
| (Empty) |
1 #!/usr/bin/env python | |
2 # | |
3 # Copyright 2011 The Closure Linter Authors. All Rights Reserved. | |
4 # | |
5 # Licensed under the Apache License, Version 2.0 (the "License"); | |
6 # you may not use this file except in compliance with the License. | |
7 # You may obtain a copy of the License at | |
8 # | |
9 # http://www.apache.org/licenses/LICENSE-2.0 | |
10 # | |
11 # Unless required by applicable law or agreed to in writing, software | |
12 # distributed under the License is distributed on an "AS-IS" BASIS, | |
13 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
14 # See the License for the specific language governing permissions and | |
15 # limitations under the License. | |
16 | |
17 """Contains logic for sorting goog.provide and goog.require statements. | |
18 | |
19 Closurized JavaScript files use goog.provide and goog.require statements at the | |
20 top of the file to manage dependencies. These statements should be sorted | |
21 alphabetically, however, it is common for them to be accompanied by inline | |
22 comments or suppression annotations. In order to sort these statements without | |
23 disrupting their comments and annotations, the association between statements | |
24 and comments/annotations must be maintained while sorting. | |
25 | |
26 RequireProvideSorter: Handles checking/fixing of provide/require statements. | |
27 """ | |
28 | |
29 | |
30 | |
31 from closure_linter import javascripttokens | |
32 from closure_linter import tokenutil | |
33 | |
34 # Shorthand | |
35 Type = javascripttokens.JavaScriptTokenType | |
36 | |
37 | |
38 class RequireProvideSorter(object): | |
39 """Checks for and fixes alphabetization of provide and require statements. | |
40 | |
41 When alphabetizing, comments on the same line or comments directly above a | |
42 goog.provide or goog.require statement are associated with that statement and | |
43 stay with the statement as it gets sorted. | |
44 """ | |
45 | |
46 def CheckProvides(self, token): | |
47 """Checks alphabetization of goog.provide statements. | |
48 | |
49 Iterates over tokens in given token stream, identifies goog.provide tokens, | |
50 and checks that they occur in alphabetical order by the object being | |
51 provided. | |
52 | |
53 Args: | |
54 token: A token in the token stream before any goog.provide tokens. | |
55 | |
56 Returns: | |
57 The first provide token in the token stream. | |
58 | |
59 None is returned if all goog.provide statements are already sorted. | |
60 """ | |
61 provide_tokens = self._GetRequireOrProvideTokens(token, 'goog.provide') | |
62 provide_strings = self._GetRequireOrProvideTokenStrings(provide_tokens) | |
63 sorted_provide_strings = sorted(provide_strings) | |
64 if provide_strings != sorted_provide_strings: | |
65 return provide_tokens[0] | |
66 return None | |
67 | |
68 def CheckRequires(self, token): | |
69 """Checks alphabetization of goog.require statements. | |
70 | |
71 Iterates over tokens in given token stream, identifies goog.require tokens, | |
72 and checks that they occur in alphabetical order by the dependency being | |
73 required. | |
74 | |
75 Args: | |
76 token: A token in the token stream before any goog.require tokens. | |
77 | |
78 Returns: | |
79 The first require token in the token stream. | |
80 | |
81 None is returned if all goog.require statements are already sorted. | |
82 """ | |
83 require_tokens = self._GetRequireOrProvideTokens(token, 'goog.require') | |
84 require_strings = self._GetRequireOrProvideTokenStrings(require_tokens) | |
85 sorted_require_strings = sorted(require_strings) | |
86 if require_strings != sorted_require_strings: | |
87 return require_tokens[0] | |
88 return None | |
89 | |
90 def FixProvides(self, token): | |
91 """Sorts goog.provide statements in the given token stream alphabetically. | |
92 | |
93 Args: | |
94 token: The first token in the token stream. | |
95 """ | |
96 self._FixProvidesOrRequires( | |
97 self._GetRequireOrProvideTokens(token, 'goog.provide')) | |
98 | |
99 def FixRequires(self, token): | |
100 """Sorts goog.require statements in the given token stream alphabetically. | |
101 | |
102 Args: | |
103 token: The first token in the token stream. | |
104 """ | |
105 self._FixProvidesOrRequires( | |
106 self._GetRequireOrProvideTokens(token, 'goog.require')) | |
107 | |
108 def _FixProvidesOrRequires(self, tokens): | |
109 """Sorts goog.provide or goog.require statements. | |
110 | |
111 Args: | |
112 tokens: A list of goog.provide or goog.require tokens in the order they | |
113 appear in the token stream. i.e. the first token in this list must | |
114 be the first goog.provide or goog.require token. | |
115 """ | |
116 strings = self._GetRequireOrProvideTokenStrings(tokens) | |
117 sorted_strings = sorted(strings) | |
118 | |
119 # Make a separate pass to remove any blank lines between goog.require/ | |
120 # goog.provide tokens. | |
121 first_token = tokens[0] | |
122 last_token = tokens[-1] | |
123 i = last_token | |
124 while i != first_token and i is not None: | |
125 if i.type is Type.BLANK_LINE: | |
126 tokenutil.DeleteToken(i) | |
127 i = i.previous | |
128 | |
129 # A map from required/provided object name to tokens that make up the line | |
130 # it was on, including any comments immediately before it or after it on the | |
131 # same line. | |
132 tokens_map = self._GetTokensMap(tokens) | |
133 | |
134 # Iterate over the map removing all tokens. | |
135 for name in tokens_map: | |
136 tokens_to_delete = tokens_map[name] | |
137 for i in tokens_to_delete: | |
138 tokenutil.DeleteToken(i) | |
139 | |
140 # Save token to rest of file. Sorted token will be inserted before this. | |
141 rest_of_file = tokens_map[strings[-1]][-1].next | |
142 | |
143 # Re-add all tokens in the map in alphabetical order. | |
144 insert_after = tokens[0].previous | |
145 for string in sorted_strings: | |
146 for i in tokens_map[string]: | |
147 if rest_of_file: | |
148 tokenutil.InsertTokenBefore(i, rest_of_file) | |
149 else: | |
150 tokenutil.InsertTokenAfter(i, insert_after) | |
151 insert_after = i | |
152 | |
153 def _GetRequireOrProvideTokens(self, token, token_string): | |
154 """Gets all goog.provide or goog.require tokens in the given token stream. | |
155 | |
156 Args: | |
157 token: The first token in the token stream. | |
158 token_string: One of 'goog.provide' or 'goog.require' to indicate which | |
159 tokens to find. | |
160 | |
161 Returns: | |
162 A list of goog.provide or goog.require tokens in the order they appear in | |
163 the token stream. | |
164 """ | |
165 tokens = [] | |
166 while token: | |
167 if token.type == Type.IDENTIFIER: | |
168 if token.string == token_string: | |
169 tokens.append(token) | |
170 elif token.string not in [ | |
171 'goog.provide', 'goog.require', 'goog.setTestOnly']: | |
172 # These 3 identifiers are at the top of the file. So if any other | |
173 # identifier is encountered, return. | |
174 # TODO(user): Once it's decided what ordering goog.require | |
175 # should use, add 'goog.module' to the list above and implement the | |
176 # decision. | |
177 break | |
178 token = token.next | |
179 | |
180 return tokens | |
181 | |
182 def _GetRequireOrProvideTokenStrings(self, tokens): | |
183 """Gets a list of strings corresponding to the given list of tokens. | |
184 | |
185 The string will be the next string in the token stream after each token in | |
186 tokens. This is used to find the object being provided/required by a given | |
187 goog.provide or goog.require token. | |
188 | |
189 Args: | |
190 tokens: A list of goog.provide or goog.require tokens. | |
191 | |
192 Returns: | |
193 A list of object names that are being provided or required by the given | |
194 list of tokens. For example: | |
195 | |
196 ['object.a', 'object.c', 'object.b'] | |
197 """ | |
198 token_strings = [] | |
199 for token in tokens: | |
200 if not token.is_deleted: | |
201 name = tokenutil.GetStringAfterToken(token) | |
202 token_strings.append(name) | |
203 return token_strings | |
204 | |
205 def _GetTokensMap(self, tokens): | |
206 """Gets a map from object name to tokens associated with that object. | |
207 | |
208 Starting from the goog.provide/goog.require token, searches backwards in the | |
209 token stream for any lines that start with a comment. These lines are | |
210 associated with the goog.provide/goog.require token. Also associates any | |
211 tokens on the same line as the goog.provide/goog.require token with that | |
212 token. | |
213 | |
214 Args: | |
215 tokens: A list of goog.provide or goog.require tokens. | |
216 | |
217 Returns: | |
218 A dictionary that maps object names to the tokens associated with the | |
219 goog.provide or goog.require of that object name. For example: | |
220 | |
221 { | |
222 'object.a': [JavaScriptToken, JavaScriptToken, ...], | |
223 'object.b': [...] | |
224 } | |
225 | |
226 The list of tokens includes any comment lines above the goog.provide or | |
227 goog.require statement and everything after the statement on the same | |
228 line. For example, all of the following would be associated with | |
229 'object.a': | |
230 | |
231 /** @suppress {extraRequire} */ | |
232 goog.require('object.a'); // Some comment. | |
233 """ | |
234 tokens_map = {} | |
235 for token in tokens: | |
236 object_name = tokenutil.GetStringAfterToken(token) | |
237 # If the previous line starts with a comment, presume that the comment | |
238 # relates to the goog.require or goog.provide and keep them together when | |
239 # sorting. | |
240 first_token = token | |
241 previous_first_token = tokenutil.GetFirstTokenInPreviousLine(first_token) | |
242 while (previous_first_token and | |
243 previous_first_token.IsAnyType(Type.COMMENT_TYPES)): | |
244 first_token = previous_first_token | |
245 previous_first_token = tokenutil.GetFirstTokenInPreviousLine( | |
246 first_token) | |
247 | |
248 # Find the last token on the line. | |
249 last_token = tokenutil.GetLastTokenInSameLine(token) | |
250 | |
251 all_tokens = self._GetTokenList(first_token, last_token) | |
252 tokens_map[object_name] = all_tokens | |
253 return tokens_map | |
254 | |
255 def _GetTokenList(self, first_token, last_token): | |
256 """Gets a list of all tokens from first_token to last_token, inclusive. | |
257 | |
258 Args: | |
259 first_token: The first token to get. | |
260 last_token: The last token to get. | |
261 | |
262 Returns: | |
263 A list of all tokens between first_token and last_token, including both | |
264 first_token and last_token. | |
265 | |
266 Raises: | |
267 Exception: If the token stream ends before last_token is reached. | |
268 """ | |
269 token_list = [] | |
270 token = first_token | |
271 while token != last_token: | |
272 if not token: | |
273 raise Exception('ran out of tokens') | |
274 token_list.append(token) | |
275 token = token.next | |
276 token_list.append(last_token) | |
277 | |
278 return token_list | |
279 | |
280 def GetFixedRequireString(self, token): | |
281 """Get fixed/sorted order of goog.require statements. | |
282 | |
283 Args: | |
284 token: The first token in the token stream. | |
285 | |
286 Returns: | |
287 A string for correct sorted order of goog.require. | |
288 """ | |
289 return self._GetFixedRequireOrProvideString( | |
290 self._GetRequireOrProvideTokens(token, 'goog.require')) | |
291 | |
292 def GetFixedProvideString(self, token): | |
293 """Get fixed/sorted order of goog.provide statements. | |
294 | |
295 Args: | |
296 token: The first token in the token stream. | |
297 | |
298 Returns: | |
299 A string for correct sorted order of goog.provide. | |
300 """ | |
301 return self._GetFixedRequireOrProvideString( | |
302 self._GetRequireOrProvideTokens(token, 'goog.provide')) | |
303 | |
304 def _GetFixedRequireOrProvideString(self, tokens): | |
305 """Sorts goog.provide or goog.require statements. | |
306 | |
307 Args: | |
308 tokens: A list of goog.provide or goog.require tokens in the order they | |
309 appear in the token stream. i.e. the first token in this list must | |
310 be the first goog.provide or goog.require token. | |
311 | |
312 Returns: | |
313 A string for sorted goog.require or goog.provide statements | |
314 """ | |
315 | |
316 # A map from required/provided object name to tokens that make up the line | |
317 # it was on, including any comments immediately before it or after it on the | |
318 # same line. | |
319 tokens_map = self._GetTokensMap(tokens) | |
320 sorted_strings = sorted(tokens_map.keys()) | |
321 | |
322 new_order = '' | |
323 for string in sorted_strings: | |
324 for i in tokens_map[string]: | |
325 new_order += i.string | |
326 if i.IsLastInLine(): | |
327 new_order += '\n' | |
328 | |
329 return new_order | |
OLD | NEW |