Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(372)

Side by Side Diff: third_party/WebKit/Tools/Scripts/webkitpy/w3c/test_importer.py

Issue 2656903002: Move test_importer -> test_copier, deps_updater -> test_importer. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright (C) 2013 Adobe Systems Incorporated. All rights reserved. 1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # 2 # Use of this source code is governed by a BSD-style license that can be
3 # Redistribution and use in source and binary forms, with or without 3 # found in the LICENSE file.
4 # modification, are permitted provided that the following conditions 4
5 # are met: 5 """Fetches a copy of the latest state of a W3C test repository and commits.
6 # 6
7 # 1. Redistributions of source code must retain the above 7 If this script is given the argument --auto-update, it will also:
8 # copyright notice, this list of conditions and the following 8 1. Upload a CL.
9 # disclaimer. 9 2. Trigger try jobs and wait for them to complete.
10 # 2. Redistributions in binary form must reproduce the above 10 3. Make any changes that are required for new failing tests.
11 # copyright notice, this list of conditions and the following 11 4. Commit the CL.
12 # disclaimer in the documentation and/or other materials 12
13 # provided with the distribution. 13 If this script is given the argument --auto-update, it will also attempt to
14 # 14 upload a CL, trigger try jobs, and make any changes that are required for
15 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER "AS IS" AND ANY 15 new failing tests before committing.
16 # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
17 # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
18 # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE
19 # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
20 # OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
21 # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
22 # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
24 # TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
25 # THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26 # SUCH DAMAGE.
27
28 """Logic for converting and copying files from a W3C repo.
29
30 This module is responsible for modifying and copying a subset of the tests from
31 a local W3C repository source directory into a destination directory.
32 """ 16 """
33 17
18 import argparse
19 import json
34 import logging 20 import logging
35 import mimetypes
36 import os
37 import re 21 import re
38 22
23 from webkitpy.common.net.git_cl import GitCL
39 from webkitpy.common.webkit_finder import WebKitFinder 24 from webkitpy.common.webkit_finder import WebKitFinder
40 from webkitpy.layout_tests.models.test_expectations import TestExpectationParser 25 from webkitpy.layout_tests.models.test_expectations import TestExpectations, Tes tExpectationParser
41 from webkitpy.w3c.test_parser import TestParser 26 from webkitpy.w3c.update_w3c_test_expectations import W3CExpectationsLineAdder
42 from webkitpy.w3c.test_converter import convert_for_webkit 27 from webkitpy.w3c.test_copier import TestCopier
43 28 from webkitpy.w3c.common import WPT_REPO_URL, CSS_REPO_URL, WPT_DEST_NAME, CSS_D EST_NAME
44 # Maximum length of import path starting from top of source repository. 29
45 # This limit is here because the Windows builders cannot create paths that are 30 # Settings for how often to check try job results and how long to wait.
46 # longer than the Windows max path length (260). See http://crbug.com/609871. 31 POLL_DELAY_SECONDS = 2 * 60
47 MAX_PATH_LENGTH = 140 32 TIMEOUT_SECONDS = 180 * 60
48 33
49 _log = logging.getLogger(__name__) 34 _log = logging.getLogger(__file__)
50 35
51 36
52 class TestImporter(object): 37 class TestImporter(object):
53 38
54 def __init__(self, host, source_repo_path, dest_dir_name='external'): 39 def __init__(self, host):
55 """Initializes variables to prepare for copying and converting files. 40 self.host = host
41 self.executive = host.executive
42 self.fs = host.filesystem
43 self.finder = WebKitFinder(self.fs)
44 self.verbose = False
45 self.git_cl = None
46
47 def main(self, argv=None):
48 options = self.parse_args(argv)
49 self.verbose = options.verbose
50 log_level = logging.DEBUG if self.verbose else logging.INFO
51 logging.basicConfig(level=log_level, format='%(message)s')
52
53 if not self.checkout_is_okay(options.allow_local_commits):
54 return 1
55
56 self.git_cl = GitCL(self.host, auth_refresh_token_json=options.auth_refr esh_token_json)
57
58 _log.info('Noting the current Chromium commit.')
59 _, show_ref_output = self.run(['git', 'show-ref', 'HEAD'])
60 chromium_commit = show_ref_output.split()[0]
61
62 if options.target == 'wpt':
63 import_commit = self.update(WPT_DEST_NAME, WPT_REPO_URL, options.kee p_w3c_repos_around, options.revision)
64 self._copy_resources()
65 elif options.target == 'css':
66 import_commit = self.update(CSS_DEST_NAME, CSS_REPO_URL, options.kee p_w3c_repos_around, options.revision)
67 else:
68 raise AssertionError("Unsupported target %s" % options.target)
69
70 has_changes = self._has_changes()
71 if not has_changes:
72 _log.info('Done: no changes to import.')
73 return 0
74
75 commit_message = self._commit_message(chromium_commit, import_commit)
76 self._commit_changes(commit_message)
77 _log.info('Done: changes imported and committed.')
78
79 if options.auto_update:
80 commit_successful = self.do_auto_update()
81 if not commit_successful:
82 return 1
83 return 0
84
85 def parse_args(self, argv):
86 parser = argparse.ArgumentParser()
87 parser.description = __doc__
88 parser.add_argument('-v', '--verbose', action='store_true',
89 help='log what we are doing')
90 parser.add_argument('--allow-local-commits', action='store_true',
91 help='allow script to run even if we have local comm its')
92 parser.add_argument('--keep-w3c-repos-around', action='store_true',
93 help='leave the w3c repos around that were imported previously.')
94 parser.add_argument('-r', dest='revision', action='store',
95 help='Target revision.')
96 parser.add_argument('target', choices=['css', 'wpt'],
97 help='Target repository. "css" for csswg-test, "wpt " for web-platform-tests.')
98 parser.add_argument('--auto-update', action='store_true',
99 help='uploads CL and initiates commit queue.')
100 parser.add_argument('--auth-refresh-token-json',
101 help='Rietveld auth refresh JSON token.')
102 return parser.parse_args(argv)
103
104 def checkout_is_okay(self, allow_local_commits):
105 git_diff_retcode, _ = self.run(['git', 'diff', '--quiet', 'HEAD'], exit_ on_failure=False)
106 if git_diff_retcode:
107 _log.warning('Checkout is dirty; aborting.')
108 return False
109
110 local_commits = self.run(['git', 'log', '--oneline', 'origin/master..HEA D'])[1]
111 if local_commits and not allow_local_commits:
112 _log.warning('Checkout has local commits; aborting. Use --allow-loca l-commits to allow this.')
113 return False
114
115 if self.fs.exists(self.path_from_webkit_base(WPT_DEST_NAME)):
116 _log.warning('WebKit/%s exists; aborting.', WPT_DEST_NAME)
117 return False
118
119 if self.fs.exists(self.path_from_webkit_base(CSS_DEST_NAME)):
120 _log.warning('WebKit/%s repo exists; aborting.', CSS_DEST_NAME)
121 return False
122
123 return True
124
125 def _copy_resources(self):
126 """Copies resources from wpt to LayoutTests/resources.
127
128 We copy idlharness.js and testharness.js in wpt to LayoutTests/resources
129 in order to use them in non-imported tests.
130
131 If this method is changed, the lists of files expected to be identical
132 in LayoutTests/PRESUBMIT.py should also be changed.
133 """
134 resources_to_copy_from_wpt = [
135 ('idlharness.js', 'resources'),
136 ('testharness.js', 'resources'),
137 ]
138 for filename, wpt_subdir in resources_to_copy_from_wpt:
139 source = self.path_from_webkit_base('LayoutTests', 'external', WPT_D EST_NAME, wpt_subdir, filename)
140 destination = self.path_from_webkit_base('LayoutTests', 'resources', filename)
141 self.copyfile(source, destination)
142 self.run(['git', 'add', destination])
143
144 def _generate_manifest(self, dest_path):
145 """Generates MANIFEST.json for imported tests.
56 146
57 Args: 147 Args:
58 source_repo_path: Path to the local checkout of a WPT 148 dest_path: Path to the destination WPT directory.
149
150 Runs the (newly-updated) manifest command if it's found, and then
151 stages the generated MANIFEST.json in the git index, ready to commit.
59 """ 152 """
60 self.host = host 153 manifest_command = self.finder.path_from_webkit_base('Tools', 'Scripts', 'webkitpy', 'thirdparty', 'wpt', 'wpt', 'manifest')
61 154 if 'css' in dest_path:
62 assert self.host.filesystem.exists(source_repo_path) 155 # Do nothing for csswg-test.
63 self.source_repo_path = source_repo_path 156 return
64 self.dest_dir_name = dest_dir_name 157 _log.info('Generating MANIFEST.json')
65 158 self.run([manifest_command, '--work', '--tests-root', dest_path])
66 self.filesystem = self.host.filesystem 159 self.run(['git', 'add', self.fs.join(dest_path, 'MANIFEST.json')])
67 self.webkit_finder = WebKitFinder(self.filesystem) 160
68 self._webkit_root = self.webkit_finder.webkit_base() 161 def update(self, dest_dir_name, url, keep_w3c_repos_around, revision):
69 self.layout_tests_dir = self.webkit_finder.path_from_webkit_base('Layout Tests') 162 """Updates an imported repository.
70 self.destination_directory = self.filesystem.normpath( 163
71 self.filesystem.join( 164 Args:
72 self.layout_tests_dir, 165 dest_dir_name: The destination directory name.
73 dest_dir_name, 166 url: URL of the git repository.
74 self.filesystem.basename(self.source_repo_path))) 167 revision: Commit hash or None.
75 self.import_in_place = (self.source_repo_path == self.destination_direct ory) 168
76 self.dir_above_repo = self.filesystem.dirname(self.source_repo_path) 169 Returns:
77 170 A string for the commit description "<destination>@<commitish>".
78 self.import_list = []
79
80 # This is just a FYI list of CSS properties that still need to be prefix ed,
81 # which may be output after importing.
82 self._prefixed_properties = {}
83
84 def do_import(self):
85 _log.info("Importing %s into %s", self.source_repo_path, self.destinatio n_directory)
86 self.find_importable_tests()
87 self.import_tests()
88
89 def find_importable_tests(self):
90 """Walks through the source directory to find what tests should be impor ted.
91
92 This function sets self.import_list, which contains information about ho w many
93 tests are being imported, and their source and destination paths.
94 """ 171 """
95 paths_to_skip = self.find_paths_to_skip() 172 temp_repo_path = self.path_from_webkit_base(dest_dir_name)
96 173 _log.info('Cloning %s into %s.', url, temp_repo_path)
97 for root, dirs, files in self.filesystem.walk(self.source_repo_path): 174 self.run(['git', 'clone', url, temp_repo_path])
98 cur_dir = root.replace(self.dir_above_repo + '/', '') + '/' 175
99 _log.debug(' scanning ' + cur_dir + '...') 176 if revision is not None:
100 total_tests = 0 177 _log.info('Checking out %s', revision)
101 reftests = 0 178 self.run(['git', 'checkout', revision], cwd=temp_repo_path)
102 jstests = 0 179
103 180 self.run(['git', 'submodule', 'update', '--init', '--recursive'], cwd=te mp_repo_path)
104 # Files in 'tools' are not for browser testing, so we skip them. 181
105 # See: http://testthewebforward.org/docs/test-format-guidelines.html #tools 182 _log.info('Noting the revision we are importing.')
106 dirs_to_skip = ('.git', 'test-plan', 'tools') 183 _, show_ref_output = self.run(['git', 'show-ref', 'origin/master'], cwd= temp_repo_path)
107 184 master_commitish = show_ref_output.split()[0]
108 # We copy all files in 'support', including HTML without metadata. 185
109 # See: http://testthewebforward.org/docs/test-format-guidelines.html #support-files 186 _log.info('Cleaning out tests from LayoutTests/external/%s.', dest_dir_n ame)
110 dirs_to_include = ('resources', 'support') 187 dest_path = self.path_from_webkit_base('LayoutTests', 'external', dest_d ir_name)
111 188 is_not_baseline_filter = lambda fs, dirname, basename: not self.is_basel ine(basename)
112 if dirs: 189 files_to_delete = self.fs.files_under(dest_path, file_filter=is_not_base line_filter)
113 for name in dirs_to_skip: 190 for subpath in files_to_delete:
114 if name in dirs: 191 self.remove('LayoutTests', 'external', subpath)
115 dirs.remove(name) 192
116 193 _log.info('Importing the tests.')
117 for path in paths_to_skip: 194 test_copier = TestCopier(self.host, temp_repo_path)
118 path_base = path.replace(self.dest_dir_name + '/', '') 195 test_copier.do_import()
119 path_base = path_base.replace(cur_dir, '') 196
120 path_full = self.filesystem.join(root, path_base) 197 self.run(['git', 'add', '--all', 'LayoutTests/external/%s' % dest_dir_na me])
121 if path_base in dirs: 198
122 dirs.remove(path_base) 199 _log.info('Deleting any orphaned baselines.')
123 if self.import_in_place: 200
124 _log.debug(' pruning %s', path_base) 201 is_baseline_filter = lambda fs, dirname, basename: self.is_baseline(base name)
125 self.filesystem.rmtree(path_full) 202 previous_baselines = self.fs.files_under(dest_path, file_filter=is_basel ine_filter)
126 else: 203
127 _log.debug(' skipping %s', path_base) 204 for subpath in previous_baselines:
128 205 full_path = self.fs.join(dest_path, subpath)
129 copy_list = [] 206 if self.fs.glob(full_path.replace('-expected.txt', '*')) == [full_pa th]:
130 207 self.fs.remove(full_path)
131 for filename in files: 208
132 path_full = self.filesystem.join(root, filename) 209 self._generate_manifest(dest_path)
133 path_base = path_full.replace(self.source_repo_path + '/', '') 210
134 path_base = self.destination_directory.replace(self.layout_tests _dir + '/', '') + '/' + path_base 211 if not keep_w3c_repos_around:
135 if path_base in paths_to_skip: 212 _log.info('Deleting temp repo directory %s.', temp_repo_path)
136 if self.import_in_place: 213 self.rmtree(temp_repo_path)
137 _log.debug(' pruning %s', path_base) 214
138 self.filesystem.remove(path_full) 215 _log.info('Updating TestExpectations for any removed or renamed tests.')
139 continue 216 self.update_all_test_expectations_files(self._list_deleted_tests(), self ._list_renamed_tests())
140 else: 217
141 continue 218 return '%s@%s' % (dest_dir_name, master_commitish)
142 # FIXME: This block should really be a separate function, but th e early-continues make that difficult. 219
143 220 def _commit_changes(self, commit_message):
144 if filename.startswith('.') or filename.endswith('.pl'): 221 _log.info('Committing changes.')
145 # The w3cs repos may contain perl scripts, which we don't ca re about. 222 self.run(['git', 'commit', '--all', '-F', '-'], stdin=commit_message)
223
224 def _has_changes(self):
225 return_code, _ = self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_fa ilure=False)
226 return return_code == 1
227
228 def _commit_message(self, chromium_commit, import_commit):
229 return ('Import %s\n\n'
230 'Using wpt-import in Chromium %s.\n\n'
231 'NOEXPORT=true' %
232 (import_commit, chromium_commit))
233
234 @staticmethod
235 def is_baseline(basename):
236 # TODO(qyearsley): Find a better, centralized place for this.
237 return basename.endswith('-expected.txt')
238
239 def run(self, cmd, exit_on_failure=True, cwd=None, stdin=''):
240 _log.debug('Running command: %s', ' '.join(cmd))
241
242 cwd = cwd or self.finder.webkit_base()
243 proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self .executive.PIPE, stdin=self.executive.PIPE, cwd=cwd)
244 out, err = proc.communicate(stdin)
245 if proc.returncode or self.verbose:
246 _log.info('# ret> %d', proc.returncode)
247 if out:
248 for line in out.splitlines():
249 _log.info('# out> %s', line)
250 if err:
251 for line in err.splitlines():
252 _log.info('# err> %s', line)
253 if exit_on_failure and proc.returncode:
254 self.host.exit(proc.returncode)
255 return proc.returncode, out
256
257 def check_run(self, command):
258 return_code, out = self.run(command)
259 if return_code:
260 raise Exception('%s failed with exit code %d.' % ' '.join(command), return_code)
261 return out
262
263 def copyfile(self, source, destination):
264 _log.debug('cp %s %s', source, destination)
265 self.fs.copyfile(source, destination)
266
267 def remove(self, *comps):
268 dest = self.path_from_webkit_base(*comps)
269 _log.debug('rm %s', dest)
270 self.fs.remove(dest)
271
272 def rmtree(self, *comps):
273 dest = self.path_from_webkit_base(*comps)
274 _log.debug('rm -fr %s', dest)
275 self.fs.rmtree(dest)
276
277 def path_from_webkit_base(self, *comps):
278 return self.finder.path_from_webkit_base(*comps)
279
280 def do_auto_update(self):
281 """Attempts to upload a CL, make any required adjustments, and commit.
282
283 This function assumes that the imported repo has already been updated,
284 and that change has been committed. There may be newly-failing tests,
285 so before being able to commit these new changes, we may need to update
286 TestExpectations or download new baselines.
287
288 Returns:
289 True if successfully committed, False otherwise.
290 """
291 self._upload_cl()
292 _log.info('Issue: %s', self.git_cl.run(['issue']).strip())
293
294 # First, try on Blink try bots in order to get any new baselines.
295 _log.info('Triggering try jobs.')
296 for try_bot in self.host.builders.all_try_builder_names():
297 self.git_cl.run(['try', '-b', try_bot])
298 try_results = self.git_cl.wait_for_try_jobs(
299 poll_delay_seconds=POLL_DELAY_SECONDS, timeout_seconds=TIMEOUT_SECON DS)
300
301 if not try_results:
302 self.git_cl.run(['set-close'])
303 return False
304
305 if try_results and self.git_cl.has_failing_try_results(try_results):
306 self.fetch_new_expectations_and_baselines()
307
308 # Wait for CQ try jobs to finish. If there are failures, then abort.
309 self.git_cl.run(['set-commit', '--rietveld'])
310 try_results = self.git_cl.wait_for_try_jobs(
311 poll_delay_seconds=POLL_DELAY_SECONDS, timeout_seconds=TIMEOUT_SECON DS)
312
313 if not try_results:
314 self.git_cl.run(['set-close'])
315 return False
316
317 if self.git_cl.has_failing_try_results(try_results):
318 _log.info('CQ failed; aborting.')
319 self.git_cl.run(['set-close'])
320 return False
321
322 _log.info('Update completed.')
323 return True
324
325 def _upload_cl(self):
326 _log.info('Uploading change list.')
327 cc_list = self.get_directory_owners_to_cc()
328 description = self._cl_description()
329 self.git_cl.run([
330 'upload',
331 '-f',
332 '--rietveld',
333 '-m',
334 description,
335 ] + ['--cc=' + email for email in cc_list])
336
337 def _cl_description(self):
338 description = self.check_run(['git', 'log', '-1', '--format=%B'])
339 build_link = self._build_link()
340 if build_link:
341 description += 'Build: %s\n\n' % build_link
342 description += 'TBR=qyearsley@chromium.org\n'
343 # Move any NOEXPORT tag to the end of the description.
344 description = description.replace('NOEXPORT=true', '')
345 description = description.replace('\n\n\n\n', '\n\n')
346 description += 'NOEXPORT=true'
347 return description
348
349 def _build_link(self):
350 """Returns a link to a job, if running on buildbot."""
351 master_name = self.host.environ.get('BUILDBOT_MASTERNAME')
352 builder_name = self.host.environ.get('BUILDBOT_BUILDERNAME')
353 build_number = self.host.environ.get('BUILDBOT_BUILDNUMBER')
354 if not (master_name and builder_name and build_number):
355 return None
356 return 'https://build.chromium.org/p/%s/builders/%s/builds/%s' % (master _name, builder_name, build_number)
357
358 def get_directory_owners_to_cc(self):
359 """Returns a list of email addresses to CC for the current import."""
360 _log.info('Gathering directory owners emails to CC.')
361 directory_owners_file_path = self.finder.path_from_webkit_base(
362 'Tools', 'Scripts', 'webkitpy', 'w3c', 'directory_owners.json')
363 with open(directory_owners_file_path) as data_file:
364 directory_to_owner = self.parse_directory_owners(json.load(data_file ))
365 out = self.check_run(['git', 'diff', 'origin/master', '--name-only'])
366 changed_files = out.splitlines()
367 return self.generate_email_list(changed_files, directory_to_owner)
368
369 @staticmethod
370 def parse_directory_owners(decoded_data_file):
371 directory_dict = {}
372 for dict_set in decoded_data_file:
373 if dict_set['notification-email']:
374 directory_dict[dict_set['directory']] = dict_set['notification-e mail']
375 return directory_dict
376
377 def generate_email_list(self, changed_files, directory_to_owner):
378 """Returns a list of email addresses based on the given file list and
379 directory-to-owner mapping.
380
381 Args:
382 changed_files: A list of file paths relative to the repository root.
383 directory_to_owner: A dict mapping layout test directories to emails .
384
385 Returns:
386 A list of the email addresses to be notified for the current import.
387 """
388 email_addresses = set()
389 for file_path in changed_files:
390 test_path = self.finder.layout_test_name(file_path)
391 if test_path is None:
392 continue
393 test_dir = self.fs.dirname(test_path)
394 if test_dir in directory_to_owner:
395 address = directory_to_owner[test_dir]
396 if not re.match(r'\S+@\S+', address):
397 _log.warning('%s appears not be an email address, skipping.' , address)
146 continue 398 continue
147 if filename == 'OWNERS' or filename == 'reftest.list': 399 email_addresses.add(address)
148 # These files fail our presubmits. 400 return sorted(email_addresses)
149 # See http://crbug.com/584660 and http://crbug.com/582838. 401
150 continue 402 def fetch_new_expectations_and_baselines(self):
151 403 """Adds new expectations and downloads baselines based on try job result s, then commits and uploads the change."""
152 fullpath = self.filesystem.join(root, filename) 404 _log.info('Adding test expectations lines to LayoutTests/TestExpectation s.')
153 405 line_adder = W3CExpectationsLineAdder(self.host)
154 mimetype = mimetypes.guess_type(fullpath) 406 line_adder.run()
155 if ('html' not in str(mimetype[0]) and 407 message = 'Update test expectations and baselines.'
156 'application/xhtml+xml' not in str(mimetype[0]) and 408 self.check_run(['git', 'commit', '-a', '-m', message])
157 'application/xml' not in str(mimetype[0])): 409 self.git_cl.run(['upload', '-m', message, '--rietveld'])
158 copy_list.append({'src': fullpath, 'dest': filename}) 410
159 continue 411 def update_all_test_expectations_files(self, deleted_tests, renamed_tests):
160 412 """Updates all test expectations files for tests that have been deleted or renamed."""
161 if self.filesystem.basename(root) in dirs_to_include:
162 copy_list.append({'src': fullpath, 'dest': filename})
163 continue
164
165 test_parser = TestParser(fullpath, self.host)
166 test_info = test_parser.analyze_test()
167 if test_info is None:
168 copy_list.append({'src': fullpath, 'dest': filename})
169 continue
170
171 if self.path_too_long(path_full):
172 _log.warning('%s skipped due to long path. '
173 'Max length from repo base %d chars; see http:/ /crbug.com/609871.',
174 path_full, MAX_PATH_LENGTH)
175 continue
176
177 if 'reference' in test_info.keys():
178 test_basename = self.filesystem.basename(test_info['test'])
179 # Add the ref file, following WebKit style.
180 # FIXME: Ideally we'd support reading the metadata
181 # directly rather than relying on a naming convention.
182 # Using a naming convention creates duplicate copies of the
183 # reference files (http://crrev.com/268729).
184 ref_file = self.filesystem.splitext(test_basename)[0] + '-ex pected'
185 # Make sure to use the extension from the *reference*, not
186 # from the test, because at least flexbox tests use XHTML
187 # references but HTML tests.
188 ref_file += self.filesystem.splitext(test_info['reference']) [1]
189
190 if not self.filesystem.exists(test_info['reference']):
191 _log.warning('%s skipped because ref file %s was not fou nd.',
192 path_full, ref_file)
193 continue
194
195 if self.path_too_long(path_full.replace(filename, ref_file)) :
196 _log.warning('%s skipped because path of ref file %s wou ld be too long. '
197 'Max length from repo base %d chars; see ht tp://crbug.com/609871.',
198 path_full, ref_file, MAX_PATH_LENGTH)
199 continue
200
201 reftests += 1
202 total_tests += 1
203 copy_list.append({'src': test_info['reference'], 'dest': ref _file,
204 'reference_support_info': test_info['refer ence_support_info']})
205 copy_list.append({'src': test_info['test'], 'dest': filename })
206
207 elif 'jstest' in test_info.keys():
208 jstests += 1
209 total_tests += 1
210 copy_list.append({'src': fullpath, 'dest': filename, 'is_jst est': True})
211
212 if copy_list:
213 # Only add this directory to the list if there's something to im port
214 self.import_list.append({'dirname': root, 'copy_list': copy_list ,
215 'reftests': reftests, 'jstests': jstest s, 'total_tests': total_tests})
216
217 def find_paths_to_skip(self):
218 paths_to_skip = set()
219 port = self.host.port_factory.get() 413 port = self.host.port_factory.get()
220 w3c_import_expectations_path = self.webkit_finder.path_from_webkit_base( 'LayoutTests', 'W3CImportExpectations') 414 for path, file_contents in port.all_expectations_dict().iteritems():
221 w3c_import_expectations = self.filesystem.read_text_file(w3c_import_expe ctations_path) 415 parser = TestExpectationParser(port, all_tests=None, is_lint_mode=Fa lse)
222 parser = TestExpectationParser(port, all_tests=(), is_lint_mode=False) 416 expectation_lines = parser.parse(path, file_contents)
223 expectation_lines = parser.parse(w3c_import_expectations_path, w3c_impor t_expectations) 417 self._update_single_test_expectations_file(path, expectation_lines, deleted_tests, renamed_tests)
224 for line in expectation_lines: 418
225 if 'SKIP' in line.expectations: 419 def _update_single_test_expectations_file(self, path, expectation_lines, del eted_tests, renamed_tests):
226 if line.specifiers: 420 """Updates single test expectations file."""
227 _log.warning("W3CImportExpectations:%s should not have any s pecifiers", line.line_numbers) 421 # FIXME: This won't work for removed or renamed directories with test ex pectations
228 continue 422 # that are directories rather than individual tests.
229 paths_to_skip.add(line.name) 423 new_lines = []
230 return paths_to_skip 424 changed_lines = []
231 425 for expectation_line in expectation_lines:
232 def import_tests(self): 426 if expectation_line.name in deleted_tests:
233 """Reads |self.import_list|, and converts and copies files to their dest ination."""
234 total_imported_tests = 0
235 total_imported_reftests = 0
236 total_imported_jstests = 0
237
238 for dir_to_copy in self.import_list:
239 total_imported_tests += dir_to_copy['total_tests']
240 total_imported_reftests += dir_to_copy['reftests']
241 total_imported_jstests += dir_to_copy['jstests']
242
243 if not dir_to_copy['copy_list']:
244 continue 427 continue
245 428 if expectation_line.name in renamed_tests:
246 orig_path = dir_to_copy['dirname'] 429 expectation_line.name = renamed_tests[expectation_line.name]
247 430 # Upon parsing the file, a "path does not exist" warning is expe cted
248 relative_dir = self.filesystem.relpath(orig_path, self.source_repo_p ath) 431 # to be there for tests that have been renamed, and if there are warnings,
249 dest_dir = self.filesystem.join(self.destination_directory, relative _dir) 432 # then the original string is used. If the warnings are reset, t hen the
250 433 # expectation line is re-serialized when output.
251 if not self.filesystem.exists(dest_dir): 434 expectation_line.warnings = []
252 self.filesystem.maybe_make_directory(dest_dir) 435 changed_lines.append(expectation_line)
253 436 new_lines.append(expectation_line)
254 copied_files = [] 437 new_file_contents = TestExpectations.list_to_string(new_lines, reconstit ute_only_these=changed_lines)
255 438 self.host.filesystem.write_text_file(path, new_file_contents)
256 for file_to_copy in dir_to_copy['copy_list']: 439
257 copied_file = self.copy_file(file_to_copy, dest_dir) 440 def _list_deleted_tests(self):
258 if copied_file: 441 """Returns a list of layout tests that have been deleted."""
259 copied_files.append(copied_file) 442 out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff- filter=D', '--name-only'])
260 443 deleted_tests = []
261 _log.info('') 444 for line in out.splitlines():
262 _log.info('Import complete') 445 test = self.finder.layout_test_name(line)
263 _log.info('') 446 if test:
264 _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests) 447 deleted_tests.append(test)
265 _log.info('Imported %d reftests', total_imported_reftests) 448 return deleted_tests
266 _log.info('Imported %d JS tests', total_imported_jstests) 449
267 _log.info('Imported %d pixel/manual tests', total_imported_tests - total _imported_jstests - total_imported_reftests) 450 def _list_renamed_tests(self):
268 _log.info('') 451 """Returns a dict mapping source to dest name for layout tests that have been renamed."""
269 452 out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff- filter=R', '--name-status'])
270 if self._prefixed_properties: 453 renamed_tests = {}
271 _log.info('Properties needing prefixes (by count):') 454 for line in out.splitlines():
272 for prefixed_property in sorted(self._prefixed_properties, key=lambd a p: self._prefixed_properties[p]): 455 _, source_path, dest_path = line.split()
273 _log.info(' %s: %s', prefixed_property, self._prefixed_properti es[prefixed_property]) 456 source_test = self.finder.layout_test_name(source_path)
274 457 dest_test = self.finder.layout_test_name(dest_path)
275 def copy_file(self, file_to_copy, dest_dir): 458 if source_test and dest_test:
276 """Converts and copies a file, if it should be copied. 459 renamed_tests[source_test] = dest_test
277 460 return renamed_tests
278 Args:
279 file_to_copy: A dict in a file copy list constructed by
280 find_importable_tests, which represents one file to copy, includ ing
281 the keys:
282 "src": Absolute path to the source location of the file.
283 "destination": File name of the destination file.
284 And possibly also the keys "reference_support_info" or "is_jstes t".
285 dest_dir: Path to the directory where the file should be copied.
286
287 Returns:
288 The path to the new file, relative to the Blink root (//third_party/ WebKit).
289 """
290 source_path = self.filesystem.normpath(file_to_copy['src'])
291 dest_path = self.filesystem.join(dest_dir, file_to_copy['dest'])
292
293 if self.filesystem.isdir(source_path):
294 _log.error('%s refers to a directory', source_path)
295 return None
296
297 if not self.filesystem.exists(source_path):
298 _log.error('%s not found. Possible error in the test.', source_path)
299 return None
300
301 reference_support_info = file_to_copy.get('reference_support_info') or N one
302
303 if not self.filesystem.exists(self.filesystem.dirname(dest_path)):
304 if not self.import_in_place:
305 self.filesystem.maybe_make_directory(self.filesystem.dirname(des t_path))
306
307 relpath = self.filesystem.relpath(dest_path, self.layout_tests_dir)
308 # FIXME: Maybe doing a file diff is in order here for existing files?
309 # In other words, there's no sense in overwriting identical files, but
310 # there's no harm in copying the identical thing.
311 _log.debug(' copying %s', relpath)
312
313 if self.should_try_to_convert(file_to_copy, source_path, dest_dir):
314 converted_file = convert_for_webkit(
315 dest_dir, filename=source_path,
316 reference_support_info=reference_support_info,
317 host=self.host)
318 for prefixed_property in converted_file[0]:
319 self._prefixed_properties.setdefault(prefixed_property, 0)
320 self._prefixed_properties[prefixed_property] += 1
321
322 self.filesystem.write_text_file(dest_path, converted_file[1])
323 else:
324 if not self.import_in_place:
325 self.filesystem.copyfile(source_path, dest_path)
326 if self.filesystem.read_binary_file(source_path)[:2] == '#!':
327 self.filesystem.make_executable(dest_path)
328
329 return dest_path.replace(self._webkit_root, '')
330
331 @staticmethod
332 def should_try_to_convert(file_to_copy, source_path, dest_dir):
333 """Checks whether we should try to modify the file when importing."""
334 if file_to_copy.get('is_jstest', False):
335 return False
336
337 # Conversion is not necessary for any tests in wpt now; see http://crbug .com/654081.
338 # Note, we want to move away from converting files, see http://crbug.com /663773.
339 if re.search(r'[/\\]external[/\\]wpt[/\\]', dest_dir):
340 return False
341
342 # Only HTML, XHTML and CSS files should be converted.
343 mimetype, _ = mimetypes.guess_type(source_path)
344 return mimetype in ('text/html', 'application/xhtml+xml', 'text/css')
345
346 def path_too_long(self, source_path):
347 """Checks whether a source path is too long to import.
348
349 Args:
350 Absolute path of file to be imported.
351
352 Returns:
353 True if the path is too long to import, False if it's OK.
354 """
355 path_from_repo_base = os.path.relpath(source_path, self.source_repo_path )
356 return len(path_from_repo_base) > MAX_PATH_LENGTH
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698