Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1)

Side by Side Diff: third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer.py

Issue 2130093003: Fix pylint warnings in webkitpy/common/. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Rebased Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 # Copyright (C) 2011, Google Inc. All rights reserved. 1 # Copyright (C) 2011, Google Inc. All rights reserved.
2 # 2 #
3 # Redistribution and use in source and binary forms, with or without 3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are 4 # modification, are permitted provided that the following conditions are
5 # met: 5 # met:
6 # 6 #
7 # * Redistributions of source code must retain the above copyright 7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer. 8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above 9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer 10 # copyright notice, this list of conditions and the following disclaimer
(...skipping 12 matching lines...) Expand all
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 28
29 import copy 29 import copy
30 import logging 30 import logging
31 31
32 from webkitpy.common.memoized import memoized 32 from webkitpy.common.memoized import memoized
33 from functools import reduce
34 33
35 _log = logging.getLogger(__name__) 34 _log = logging.getLogger(__name__)
36 35
37 36
38 # FIXME: Should this function be somewhere more general? 37 # FIXME: Should this function be somewhere more general?
39 def _invert_dictionary(dictionary): 38 def _invert_dictionary(dictionary):
40 inverted_dictionary = {} 39 inverted_dictionary = {}
41 for key, value in dictionary.items(): 40 for key, value in dictionary.items():
42 if inverted_dictionary.get(value): 41 if inverted_dictionary.get(value):
43 inverted_dictionary[value].append(key) 42 inverted_dictionary[value].append(key)
44 else: 43 else:
45 inverted_dictionary[value] = [key] 44 inverted_dictionary[value] = [key]
46 return inverted_dictionary 45 return inverted_dictionary
47 46
48 47
49 class BaselineOptimizer(object): 48 class BaselineOptimizer(object):
50 ROOT_LAYOUT_TESTS_DIRECTORY = 'LayoutTests' 49 ROOT_LAYOUT_TESTS_DIRECTORY = 'LayoutTests'
51 50
52 def __init__(self, host, port, port_names, skip_scm_commands): 51 def __init__(self, host, port, port_names, skip_scm_commands):
53 self._filesystem = host.filesystem 52 self._filesystem = host.filesystem
54 self._skip_scm_commands = skip_scm_commands 53 self._skip_scm_commands = skip_scm_commands
55 self._files_to_delete = [] 54 self.files_to_delete = []
56 self._files_to_add = [] 55 self.files_to_add = []
57 self._scm = host.scm() 56 self._scm = host.scm()
58 self._default_port = port 57 self._default_port = port
59 self._ports = {} 58 self._ports = {}
60 for port_name in port_names: 59 for port_name in port_names:
61 self._ports[port_name] = host.port_factory.get(port_name) 60 self._ports[port_name] = host.port_factory.get(port_name)
62 61
63 self._webkit_base = port.webkit_base() 62 self._webkit_base = port.webkit_base()
64 self._layout_tests_dir = port.layout_tests_dir() 63 self._layout_tests_dir = port.layout_tests_dir()
65 64
66 # Only used by unittests. 65 # Only used by unittests.
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
131 directories.add(directory) 130 directories.add(directory)
132 return directories 131 return directories
133 132
134 def _optimize_result_for_root(self, new_results_by_directory, baseline_name) : 133 def _optimize_result_for_root(self, new_results_by_directory, baseline_name) :
135 # The root directory (i.e. LayoutTests) is the only one that doesn't cor respond 134 # The root directory (i.e. LayoutTests) is the only one that doesn't cor respond
136 # to a specific platform. As such, it's the only one where the baseline in fallback directories 135 # to a specific platform. As such, it's the only one where the baseline in fallback directories
137 # immediately before it can be promoted up, i.e. if win and mac 136 # immediately before it can be promoted up, i.e. if win and mac
138 # have the same baseline, then it can be promoted up to be the LayoutTes ts baseline. 137 # have the same baseline, then it can be promoted up to be the LayoutTes ts baseline.
139 # All other baselines can only be removed if they're redundant with a ba seline earlier 138 # All other baselines can only be removed if they're redundant with a ba seline earlier
140 # in the fallback order. They can never promoted up. 139 # in the fallback order. They can never promoted up.
141 directories_immediately_preceding_root = self._directories_immediately_p receding_root(baseline_name) 140 directories_preceding_root = self._directories_immediately_preceding_roo t(baseline_name)
142 141
143 shared_result = None 142 shared_result = None
144 root_baseline_unused = False 143 root_baseline_unused = False
145 for directory in directories_immediately_preceding_root: 144 for directory in directories_preceding_root:
146 this_result = new_results_by_directory.get(directory) 145 this_result = new_results_by_directory.get(directory)
147 146
148 # If any of these directories don't have a baseline, there's no opti mization we can do. 147 # If any of these directories don't have a baseline, there's no opti mization we can do.
149 if not this_result: 148 if not this_result:
150 return 149 return
151 150
152 if not shared_result: 151 if not shared_result:
153 shared_result = this_result 152 shared_result = this_result
154 elif shared_result != this_result: 153 elif shared_result != this_result:
155 root_baseline_unused = True 154 root_baseline_unused = True
156 155
157 baseline_root = self._baseline_root(baseline_name) 156 baseline_root = self._baseline_root(baseline_name)
158 157
159 # The root baseline is unused if all the directories immediately precedi ng the root 158 # The root baseline is unused if all the directories immediately precedi ng the root
160 # have a baseline, but have different baselines, so the baselines can't be promoted up. 159 # have a baseline, but have different baselines, so the baselines can't be promoted up.
161 if root_baseline_unused: 160 if root_baseline_unused:
162 if baseline_root in new_results_by_directory: 161 if baseline_root in new_results_by_directory:
163 del new_results_by_directory[baseline_root] 162 del new_results_by_directory[baseline_root]
164 return 163 return
165 164
166 new_results_by_directory[baseline_root] = shared_result 165 new_results_by_directory[baseline_root] = shared_result
167 for directory in directories_immediately_preceding_root: 166 for directory in directories_preceding_root:
168 del new_results_by_directory[directory] 167 del new_results_by_directory[directory]
169 168
170 def _find_optimal_result_placement(self, baseline_name): 169 def _find_optimal_result_placement(self, baseline_name):
171 results_by_directory = self.read_results_by_directory(baseline_name) 170 results_by_directory = self.read_results_by_directory(baseline_name)
172 results_by_port_name = self._results_by_port_name(results_by_directory, baseline_name) 171 results_by_port_name = self._results_by_port_name(results_by_directory, baseline_name)
173 port_names_by_result = _invert_dictionary(results_by_port_name)
174 172
175 new_results_by_directory = self._remove_redundant_results( 173 new_results_by_directory = self._remove_redundant_results(
176 results_by_directory, results_by_port_name, port_names_by_result, ba seline_name) 174 results_by_directory, results_by_port_name, baseline_name)
177 self._optimize_result_for_root(new_results_by_directory, baseline_name) 175 self._optimize_result_for_root(new_results_by_directory, baseline_name)
178 176
179 return results_by_directory, new_results_by_directory 177 return results_by_directory, new_results_by_directory
180 178
181 def _remove_redundant_results(self, results_by_directory, results_by_port_na me, port_names_by_result, baseline_name): 179 def _remove_redundant_results(self, results_by_directory, results_by_port_na me, baseline_name):
182 new_results_by_directory = copy.copy(results_by_directory) 180 new_results_by_directory = copy.copy(results_by_directory)
183 for port_name, port in self._ports.items(): 181 for port_name, port in self._ports.items():
184 current_result = results_by_port_name.get(port_name) 182 current_result = results_by_port_name.get(port_name)
185 183
186 # This happens if we're missing baselines for a port. 184 # This happens if we're missing baselines for a port.
187 if not current_result: 185 if not current_result:
188 continue 186 continue
189 187
190 fallback_path = self._relative_baseline_search_paths(port, baseline_ name) 188 fallback_path = self._relative_baseline_search_paths(port, baseline_ name)
191 current_index, current_directory = self._find_in_fallbackpath(fallba ck_path, current_result, new_results_by_directory) 189 current_index, current_directory = self._find_in_fallbackpath(fallba ck_path, current_result, new_results_by_directory)
(...skipping 20 matching lines...) Expand all
212 210
213 def _platform(self, filename): 211 def _platform(self, filename):
214 platform_dir = self.ROOT_LAYOUT_TESTS_DIRECTORY + self._filesystem.sep + 'platform' + self._filesystem.sep 212 platform_dir = self.ROOT_LAYOUT_TESTS_DIRECTORY + self._filesystem.sep + 'platform' + self._filesystem.sep
215 if filename.startswith(platform_dir): 213 if filename.startswith(platform_dir):
216 return filename.replace(platform_dir, '').split(self._filesystem.sep )[0] 214 return filename.replace(platform_dir, '').split(self._filesystem.sep )[0]
217 platform_dir = self._filesystem.join(self._webkit_base, platform_dir) 215 platform_dir = self._filesystem.join(self._webkit_base, platform_dir)
218 if filename.startswith(platform_dir): 216 if filename.startswith(platform_dir):
219 return filename.replace(platform_dir, '').split(self._filesystem.sep )[0] 217 return filename.replace(platform_dir, '').split(self._filesystem.sep )[0]
220 return '(generic)' 218 return '(generic)'
221 219
222 def _move_baselines(self, baseline_name, results_by_directory, new_results_b y_directory): 220 def move_baselines(self, baseline_name, results_by_directory, new_results_by _directory):
223 data_for_result = {} 221 data_for_result = {}
224 for directory, result in results_by_directory.items(): 222 for directory, result in results_by_directory.items():
225 if not result in data_for_result: 223 if not result in data_for_result:
226 source = self._join_directory(directory, baseline_name) 224 source = self._join_directory(directory, baseline_name)
227 data_for_result[result] = self._filesystem.read_binary_file(sour ce) 225 data_for_result[result] = self._filesystem.read_binary_file(sour ce)
228 226
229 scm_files = [] 227 scm_files = []
230 fs_files = [] 228 fs_files = []
231 for directory, result in results_by_directory.items(): 229 for directory, result in results_by_directory.items():
232 if new_results_by_directory.get(directory) != result: 230 if new_results_by_directory.get(directory) != result:
233 file_name = self._join_directory(directory, baseline_name) 231 file_name = self._join_directory(directory, baseline_name)
234 if self._scm.exists(file_name): 232 if self._scm.exists(file_name):
235 scm_files.append(file_name) 233 scm_files.append(file_name)
236 elif self._filesystem.exists(file_name): 234 elif self._filesystem.exists(file_name):
237 fs_files.append(file_name) 235 fs_files.append(file_name)
238 236
239 if scm_files or fs_files: 237 if scm_files or fs_files:
240 if scm_files: 238 if scm_files:
241 _log.debug(" Deleting (SCM):") 239 _log.debug(" Deleting (SCM):")
242 for platform_dir in sorted(self._platform(filename) for filename in scm_files): 240 for platform_dir in sorted(self._platform(filename) for filename in scm_files):
243 _log.debug(" " + platform_dir) 241 _log.debug(" " + platform_dir)
244 if self._skip_scm_commands: 242 if self._skip_scm_commands:
245 self._files_to_delete.extend(scm_files) 243 self.files_to_delete.extend(scm_files)
246 else: 244 else:
247 self._scm.delete_list(scm_files) 245 self._scm.delete_list(scm_files)
248 if fs_files: 246 if fs_files:
249 _log.debug(" Deleting (file system):") 247 _log.debug(" Deleting (file system):")
250 for platform_dir in sorted(self._platform(filename) for filename in fs_files): 248 for platform_dir in sorted(self._platform(filename) for filename in fs_files):
251 _log.debug(" " + platform_dir) 249 _log.debug(" " + platform_dir)
252 for filename in fs_files: 250 for filename in fs_files:
253 self._filesystem.remove(filename) 251 self._filesystem.remove(filename)
254 else: 252 else:
255 _log.debug(" (Nothing to delete)") 253 _log.debug(" (Nothing to delete)")
256 254
257 file_names = [] 255 file_names = []
258 for directory, result in new_results_by_directory.items(): 256 for directory, result in new_results_by_directory.items():
259 if results_by_directory.get(directory) != result: 257 if results_by_directory.get(directory) != result:
260 destination = self._join_directory(directory, baseline_name) 258 destination = self._join_directory(directory, baseline_name)
261 self._filesystem.maybe_make_directory(self._filesystem.split(des tination)[0]) 259 self._filesystem.maybe_make_directory(self._filesystem.split(des tination)[0])
262 self._filesystem.write_binary_file(destination, data_for_result[ result]) 260 self._filesystem.write_binary_file(destination, data_for_result[ result])
263 file_names.append(destination) 261 file_names.append(destination)
264 262
265 if file_names: 263 if file_names:
266 _log.debug(" Adding:") 264 _log.debug(" Adding:")
267 for platform_dir in sorted(self._platform(filename) for filename in file_names): 265 for platform_dir in sorted(self._platform(filename) for filename in file_names):
268 _log.debug(" " + platform_dir) 266 _log.debug(" " + platform_dir)
269 if self._skip_scm_commands: 267 if self._skip_scm_commands:
270 # Have adds win over deletes. 268 # Have adds win over deletes.
271 self._files_to_delete = list(set(self._files_to_delete) - set(fi le_names)) 269 self.files_to_delete = list(set(self.files_to_delete) - set(file _names))
272 self._files_to_add.extend(file_names) 270 self.files_to_add.extend(file_names)
273 else: 271 else:
274 self._scm.add_list(file_names) 272 self._scm.add_list(file_names)
275 else: 273 else:
276 _log.debug(" (Nothing to add)") 274 _log.debug(" (Nothing to add)")
277 275
278 def write_by_directory(self, results_by_directory, writer, indent): 276 def write_by_directory(self, results_by_directory, writer, indent):
279 for path in sorted(results_by_directory): 277 for path in sorted(results_by_directory):
280 writer("%s%s: %s" % (indent, self._platform(path), results_by_direct ory[path][0:6])) 278 writer("%s%s: %s" % (indent, self._platform(path), results_by_direct ory[path][0:6]))
281 279
282 def _optimize_subtree(self, baseline_name): 280 def _optimize_subtree(self, baseline_name):
(...skipping 17 matching lines...) Expand all
300 _log.error(" %s: optimization failed", basename) 298 _log.error(" %s: optimization failed", basename)
301 self.write_by_directory(results_by_directory, _log.warning, " " ) 299 self.write_by_directory(results_by_directory, _log.warning, " " )
302 return False 300 return False
303 301
304 _log.debug(" %s:", basename) 302 _log.debug(" %s:", basename)
305 _log.debug(" Before: ") 303 _log.debug(" Before: ")
306 self.write_by_directory(results_by_directory, _log.debug, " ") 304 self.write_by_directory(results_by_directory, _log.debug, " ")
307 _log.debug(" After: ") 305 _log.debug(" After: ")
308 self.write_by_directory(new_results_by_directory, _log.debug, " ") 306 self.write_by_directory(new_results_by_directory, _log.debug, " ")
309 307
310 self._move_baselines(baseline_name, results_by_directory, new_results_by _directory) 308 self.move_baselines(baseline_name, results_by_directory, new_results_by_ directory)
311 return True 309 return True
312 310
313 def _optimize_virtual_root(self, baseline_name, non_virtual_baseline_name): 311 def _optimize_virtual_root(self, baseline_name, non_virtual_baseline_name):
314 virtual_root_expected_baseline_path = self._filesystem.join(self._layout _tests_dir, baseline_name) 312 virtual_root_baseline_path = self._filesystem.join(self._layout_tests_di r, baseline_name)
315 if not self._filesystem.exists(virtual_root_expected_baseline_path): 313 if not self._filesystem.exists(virtual_root_baseline_path):
316 return 314 return
317 root_sha1 = self._filesystem.sha1(virtual_root_expected_baseline_path) 315 root_sha1 = self._filesystem.sha1(virtual_root_baseline_path)
318 316
319 results_by_directory = self.read_results_by_directory(non_virtual_baseli ne_name) 317 results_by_directory = self.read_results_by_directory(non_virtual_baseli ne_name)
320 # See if all the immediate predecessors of the virtual root have the sam e expected result. 318 # See if all the immediate predecessors of the virtual root have the sam e expected result.
321 for port in self._ports.values(): 319 for port in self._ports.values():
322 directories = self._relative_baseline_search_paths(port, non_virtual _baseline_name) 320 directories = self._relative_baseline_search_paths(port, non_virtual _baseline_name)
323 for directory in directories: 321 for directory in directories:
324 if directory not in results_by_directory: 322 if directory not in results_by_directory:
325 continue 323 continue
326 if results_by_directory[directory] != root_sha1: 324 if results_by_directory[directory] != root_sha1:
327 return 325 return
328 break 326 break
329 327
330 _log.debug("Deleting redundant virtual root expected result.") 328 _log.debug("Deleting redundant virtual root expected result.")
331 if self._skip_scm_commands and virtual_root_expected_baseline_path in se lf._files_to_add: 329 if self._skip_scm_commands and virtual_root_baseline_path in self.files_ to_add:
332 self._files_to_add.remove(virtual_root_expected_baseline_path) 330 self.files_to_add.remove(virtual_root_baseline_path)
333 if self._scm.exists(virtual_root_expected_baseline_path): 331 if self._scm.exists(virtual_root_baseline_path):
334 _log.debug(" Deleting (SCM): " + virtual_root_expected_baseline_p ath) 332 _log.debug(" Deleting (SCM): " + virtual_root_baseline_path)
335 if self._skip_scm_commands: 333 if self._skip_scm_commands:
336 self._files_to_delete.append(virtual_root_expected_baseline_path ) 334 self.files_to_delete.append(virtual_root_baseline_path)
337 else: 335 else:
338 self._scm.delete(virtual_root_expected_baseline_path) 336 self._scm.delete(virtual_root_baseline_path)
339 else: 337 else:
340 _log.debug(" Deleting (file system): " + virtual_root_expected_ba seline_path) 338 _log.debug(" Deleting (file system): " + virtual_root_baseline_pa th)
341 self._filesystem.remove(virtual_root_expected_baseline_path) 339 self._filesystem.remove(virtual_root_baseline_path)
342 340
343 def optimize(self, baseline_name): 341 def optimize(self, baseline_name):
344 # The virtual fallback path is the same as the non-virtual one tacked on to the bottom of the non-virtual path. 342 # The virtual fallback path is the same as the non-virtual one tacked on to the bottom of the non-virtual path.
345 # See https://docs.google.com/a/chromium.org/drawings/d/1eGdsIKzJ2dxDDBb UaIABrN4aMLD1bqJTfyxNGZsTdmg/edit for 343 # See https://docs.google.com/a/chromium.org/drawings/d/1eGdsIKzJ2dxDDBb UaIABrN4aMLD1bqJTfyxNGZsTdmg/edit for
346 # a visual representation of this. 344 # a visual representation of this.
347 # 345 #
348 # So, we can optimize the virtual path, then the virtual root and then t he regular path. 346 # So, we can optimize the virtual path, then the virtual root and then t he regular path.
349 347
350 self._files_to_delete = [] 348 self.files_to_delete = []
351 self._files_to_add = [] 349 self.files_to_add = []
352 _log.debug("Optimizing regular fallback path.") 350 _log.debug("Optimizing regular fallback path.")
353 result = self._optimize_subtree(baseline_name) 351 result = self._optimize_subtree(baseline_name)
354 non_virtual_baseline_name = self._virtual_base(baseline_name) 352 non_virtual_baseline_name = self._virtual_base(baseline_name)
355 if not non_virtual_baseline_name: 353 if not non_virtual_baseline_name:
356 return result, self._files_to_delete, self._files_to_add 354 return result, self.files_to_delete, self.files_to_add
357 355
358 self._optimize_virtual_root(baseline_name, non_virtual_baseline_name) 356 self._optimize_virtual_root(baseline_name, non_virtual_baseline_name)
359 357
360 _log.debug("Optimizing non-virtual fallback path.") 358 _log.debug("Optimizing non-virtual fallback path.")
361 result |= self._optimize_subtree(non_virtual_baseline_name) 359 result |= self._optimize_subtree(non_virtual_baseline_name)
362 return result, self._files_to_delete, self._files_to_add 360 return result, self.files_to_delete, self.files_to_add
OLDNEW
« no previous file with comments | « no previous file | third_party/WebKit/Tools/Scripts/webkitpy/common/checkout/baselineoptimizer_unittest.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698