| OLD | NEW |
| (Empty) |
| 1 # Copyright 2014 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 """Fetches a copy of the latest state of a W3C test repository and commits. | |
| 6 | |
| 7 If this script is given the argument --auto-update, it will also: | |
| 8 1. Upload a CL. | |
| 9 2. Trigger try jobs and wait for them to complete. | |
| 10 3. Make any changes that are required for new failing tests. | |
| 11 4. Commit the CL. | |
| 12 | |
| 13 If this script is given the argument --auto-update, it will also attempt to | |
| 14 upload a CL, trigger try jobs, and make any changes that are required for | |
| 15 new failing tests before committing. | |
| 16 """ | |
| 17 | |
| 18 import argparse | |
| 19 import json | |
| 20 import logging | |
| 21 import re | |
| 22 | |
| 23 from webkitpy.common.net.git_cl import GitCL | |
| 24 from webkitpy.common.webkit_finder import WebKitFinder | |
| 25 from webkitpy.layout_tests.models.test_expectations import TestExpectations, Tes
tExpectationParser | |
| 26 from webkitpy.w3c.update_w3c_test_expectations import W3CExpectationsLineAdder | |
| 27 from webkitpy.w3c.test_importer import TestImporter | |
| 28 from webkitpy.w3c.common import WPT_REPO_URL, CSS_REPO_URL, WPT_DEST_NAME, CSS_D
EST_NAME | |
| 29 | |
| 30 # Settings for how often to check try job results and how long to wait. | |
| 31 POLL_DELAY_SECONDS = 2 * 60 | |
| 32 TIMEOUT_SECONDS = 180 * 60 | |
| 33 | |
| 34 _log = logging.getLogger(__file__) | |
| 35 | |
| 36 | |
| 37 class DepsUpdater(object): | |
| 38 | |
| 39 def __init__(self, host): | |
| 40 self.host = host | |
| 41 self.executive = host.executive | |
| 42 self.fs = host.filesystem | |
| 43 self.finder = WebKitFinder(self.fs) | |
| 44 self.verbose = False | |
| 45 self.git_cl = None | |
| 46 | |
| 47 def main(self, argv=None): | |
| 48 options = self.parse_args(argv) | |
| 49 self.verbose = options.verbose | |
| 50 log_level = logging.DEBUG if self.verbose else logging.INFO | |
| 51 logging.basicConfig(level=log_level, format='%(message)s') | |
| 52 | |
| 53 if not self.checkout_is_okay(options.allow_local_commits): | |
| 54 return 1 | |
| 55 | |
| 56 self.git_cl = GitCL(self.host, auth_refresh_token_json=options.auth_refr
esh_token_json) | |
| 57 | |
| 58 _log.info('Noting the current Chromium commit.') | |
| 59 _, show_ref_output = self.run(['git', 'show-ref', 'HEAD']) | |
| 60 chromium_commit = show_ref_output.split()[0] | |
| 61 | |
| 62 if options.target == 'wpt': | |
| 63 import_commit = self.update(WPT_DEST_NAME, WPT_REPO_URL, options.kee
p_w3c_repos_around, options.revision) | |
| 64 self._copy_resources() | |
| 65 elif options.target == 'css': | |
| 66 import_commit = self.update(CSS_DEST_NAME, CSS_REPO_URL, options.kee
p_w3c_repos_around, options.revision) | |
| 67 else: | |
| 68 raise AssertionError("Unsupported target %s" % options.target) | |
| 69 | |
| 70 has_changes = self._has_changes() | |
| 71 if not has_changes: | |
| 72 _log.info('Done: no changes to import.') | |
| 73 return 0 | |
| 74 | |
| 75 commit_message = self._commit_message(chromium_commit, import_commit) | |
| 76 self._commit_changes(commit_message) | |
| 77 _log.info('Done: changes imported and committed.') | |
| 78 | |
| 79 if options.auto_update: | |
| 80 commit_successful = self.do_auto_update() | |
| 81 if not commit_successful: | |
| 82 return 1 | |
| 83 return 0 | |
| 84 | |
| 85 def parse_args(self, argv): | |
| 86 parser = argparse.ArgumentParser() | |
| 87 parser.description = __doc__ | |
| 88 parser.add_argument('-v', '--verbose', action='store_true', | |
| 89 help='log what we are doing') | |
| 90 parser.add_argument('--allow-local-commits', action='store_true', | |
| 91 help='allow script to run even if we have local comm
its') | |
| 92 parser.add_argument('--keep-w3c-repos-around', action='store_true', | |
| 93 help='leave the w3c repos around that were imported
previously.') | |
| 94 parser.add_argument('-r', dest='revision', action='store', | |
| 95 help='Target revision.') | |
| 96 parser.add_argument('target', choices=['css', 'wpt'], | |
| 97 help='Target repository. "css" for csswg-test, "wpt
" for web-platform-tests.') | |
| 98 parser.add_argument('--auto-update', action='store_true', | |
| 99 help='uploads CL and initiates commit queue.') | |
| 100 parser.add_argument('--auth-refresh-token-json', | |
| 101 help='Rietveld auth refresh JSON token.') | |
| 102 return parser.parse_args(argv) | |
| 103 | |
| 104 def checkout_is_okay(self, allow_local_commits): | |
| 105 git_diff_retcode, _ = self.run(['git', 'diff', '--quiet', 'HEAD'], exit_
on_failure=False) | |
| 106 if git_diff_retcode: | |
| 107 _log.warning('Checkout is dirty; aborting.') | |
| 108 return False | |
| 109 | |
| 110 local_commits = self.run(['git', 'log', '--oneline', 'origin/master..HEA
D'])[1] | |
| 111 if local_commits and not allow_local_commits: | |
| 112 _log.warning('Checkout has local commits; aborting. Use --allow-loca
l-commits to allow this.') | |
| 113 return False | |
| 114 | |
| 115 if self.fs.exists(self.path_from_webkit_base(WPT_DEST_NAME)): | |
| 116 _log.warning('WebKit/%s exists; aborting.', WPT_DEST_NAME) | |
| 117 return False | |
| 118 | |
| 119 if self.fs.exists(self.path_from_webkit_base(CSS_DEST_NAME)): | |
| 120 _log.warning('WebKit/%s repo exists; aborting.', CSS_DEST_NAME) | |
| 121 return False | |
| 122 | |
| 123 return True | |
| 124 | |
| 125 def _copy_resources(self): | |
| 126 """Copies resources from wpt to LayoutTests/resources. | |
| 127 | |
| 128 We copy idlharness.js and testharness.js in wpt to LayoutTests/resources | |
| 129 in order to use them in non-imported tests. | |
| 130 | |
| 131 If this method is changed, the lists of files expected to be identical | |
| 132 in LayoutTests/PRESUBMIT.py should also be changed. | |
| 133 """ | |
| 134 resources_to_copy_from_wpt = [ | |
| 135 ('idlharness.js', 'resources'), | |
| 136 ('testharness.js', 'resources'), | |
| 137 ] | |
| 138 for filename, wpt_subdir in resources_to_copy_from_wpt: | |
| 139 source = self.path_from_webkit_base('LayoutTests', 'external', WPT_D
EST_NAME, wpt_subdir, filename) | |
| 140 destination = self.path_from_webkit_base('LayoutTests', 'resources',
filename) | |
| 141 self.copyfile(source, destination) | |
| 142 self.run(['git', 'add', destination]) | |
| 143 | |
| 144 def _generate_manifest(self, dest_path): | |
| 145 """Generates MANIFEST.json for imported tests. | |
| 146 | |
| 147 Args: | |
| 148 dest_path: Path to the destination WPT directory. | |
| 149 | |
| 150 Runs the (newly-updated) manifest command if it's found, and then | |
| 151 stages the generated MANIFEST.json in the git index, ready to commit. | |
| 152 """ | |
| 153 manifest_command = self.finder.path_from_webkit_base('Tools', 'Scripts',
'webkitpy', 'thirdparty', 'wpt', 'wpt', 'manifest') | |
| 154 if 'css' in dest_path: | |
| 155 # Do nothing for csswg-test. | |
| 156 return | |
| 157 _log.info('Generating MANIFEST.json') | |
| 158 self.run([manifest_command, '--work', '--tests-root', dest_path]) | |
| 159 self.run(['git', 'add', self.fs.join(dest_path, 'MANIFEST.json')]) | |
| 160 | |
| 161 def update(self, dest_dir_name, url, keep_w3c_repos_around, revision): | |
| 162 """Updates an imported repository. | |
| 163 | |
| 164 Args: | |
| 165 dest_dir_name: The destination directory name. | |
| 166 url: URL of the git repository. | |
| 167 revision: Commit hash or None. | |
| 168 | |
| 169 Returns: | |
| 170 A string for the commit description "<destination>@<commitish>". | |
| 171 """ | |
| 172 temp_repo_path = self.path_from_webkit_base(dest_dir_name) | |
| 173 _log.info('Cloning %s into %s.', url, temp_repo_path) | |
| 174 self.run(['git', 'clone', url, temp_repo_path]) | |
| 175 | |
| 176 if revision is not None: | |
| 177 _log.info('Checking out %s', revision) | |
| 178 self.run(['git', 'checkout', revision], cwd=temp_repo_path) | |
| 179 | |
| 180 self.run(['git', 'submodule', 'update', '--init', '--recursive'], cwd=te
mp_repo_path) | |
| 181 | |
| 182 _log.info('Noting the revision we are importing.') | |
| 183 _, show_ref_output = self.run(['git', 'show-ref', 'origin/master'], cwd=
temp_repo_path) | |
| 184 master_commitish = show_ref_output.split()[0] | |
| 185 | |
| 186 _log.info('Cleaning out tests from LayoutTests/external/%s.', dest_dir_n
ame) | |
| 187 dest_path = self.path_from_webkit_base('LayoutTests', 'external', dest_d
ir_name) | |
| 188 is_not_baseline_filter = lambda fs, dirname, basename: not self.is_basel
ine(basename) | |
| 189 files_to_delete = self.fs.files_under(dest_path, file_filter=is_not_base
line_filter) | |
| 190 for subpath in files_to_delete: | |
| 191 self.remove('LayoutTests', 'external', subpath) | |
| 192 | |
| 193 _log.info('Importing the tests.') | |
| 194 test_importer = TestImporter(self.host, temp_repo_path) | |
| 195 test_importer.do_import() | |
| 196 | |
| 197 self.run(['git', 'add', '--all', 'LayoutTests/external/%s' % dest_dir_na
me]) | |
| 198 | |
| 199 _log.info('Deleting any orphaned baselines.') | |
| 200 | |
| 201 is_baseline_filter = lambda fs, dirname, basename: self.is_baseline(base
name) | |
| 202 previous_baselines = self.fs.files_under(dest_path, file_filter=is_basel
ine_filter) | |
| 203 | |
| 204 for subpath in previous_baselines: | |
| 205 full_path = self.fs.join(dest_path, subpath) | |
| 206 if self.fs.glob(full_path.replace('-expected.txt', '*')) == [full_pa
th]: | |
| 207 self.fs.remove(full_path) | |
| 208 | |
| 209 self._generate_manifest(dest_path) | |
| 210 | |
| 211 if not keep_w3c_repos_around: | |
| 212 _log.info('Deleting temp repo directory %s.', temp_repo_path) | |
| 213 self.rmtree(temp_repo_path) | |
| 214 | |
| 215 _log.info('Updating TestExpectations for any removed or renamed tests.') | |
| 216 self.update_all_test_expectations_files(self._list_deleted_tests(), self
._list_renamed_tests()) | |
| 217 | |
| 218 return '%s@%s' % (dest_dir_name, master_commitish) | |
| 219 | |
| 220 def _commit_changes(self, commit_message): | |
| 221 _log.info('Committing changes.') | |
| 222 self.run(['git', 'commit', '--all', '-F', '-'], stdin=commit_message) | |
| 223 | |
| 224 def _has_changes(self): | |
| 225 return_code, _ = self.run(['git', 'diff', '--quiet', 'HEAD'], exit_on_fa
ilure=False) | |
| 226 return return_code == 1 | |
| 227 | |
| 228 def _commit_message(self, chromium_commit, import_commit): | |
| 229 return ('Import %s\n\n' | |
| 230 'Using wpt-import in Chromium %s.\n\n' | |
| 231 'NOEXPORT=true' % | |
| 232 (import_commit, chromium_commit)) | |
| 233 | |
| 234 @staticmethod | |
| 235 def is_baseline(basename): | |
| 236 # TODO(qyearsley): Find a better, centralized place for this. | |
| 237 return basename.endswith('-expected.txt') | |
| 238 | |
| 239 def run(self, cmd, exit_on_failure=True, cwd=None, stdin=''): | |
| 240 _log.debug('Running command: %s', ' '.join(cmd)) | |
| 241 | |
| 242 cwd = cwd or self.finder.webkit_base() | |
| 243 proc = self.executive.popen(cmd, stdout=self.executive.PIPE, stderr=self
.executive.PIPE, stdin=self.executive.PIPE, cwd=cwd) | |
| 244 out, err = proc.communicate(stdin) | |
| 245 if proc.returncode or self.verbose: | |
| 246 _log.info('# ret> %d', proc.returncode) | |
| 247 if out: | |
| 248 for line in out.splitlines(): | |
| 249 _log.info('# out> %s', line) | |
| 250 if err: | |
| 251 for line in err.splitlines(): | |
| 252 _log.info('# err> %s', line) | |
| 253 if exit_on_failure and proc.returncode: | |
| 254 self.host.exit(proc.returncode) | |
| 255 return proc.returncode, out | |
| 256 | |
| 257 def check_run(self, command): | |
| 258 return_code, out = self.run(command) | |
| 259 if return_code: | |
| 260 raise Exception('%s failed with exit code %d.' % ' '.join(command),
return_code) | |
| 261 return out | |
| 262 | |
| 263 def copyfile(self, source, destination): | |
| 264 _log.debug('cp %s %s', source, destination) | |
| 265 self.fs.copyfile(source, destination) | |
| 266 | |
| 267 def remove(self, *comps): | |
| 268 dest = self.path_from_webkit_base(*comps) | |
| 269 _log.debug('rm %s', dest) | |
| 270 self.fs.remove(dest) | |
| 271 | |
| 272 def rmtree(self, *comps): | |
| 273 dest = self.path_from_webkit_base(*comps) | |
| 274 _log.debug('rm -fr %s', dest) | |
| 275 self.fs.rmtree(dest) | |
| 276 | |
| 277 def path_from_webkit_base(self, *comps): | |
| 278 return self.finder.path_from_webkit_base(*comps) | |
| 279 | |
| 280 def do_auto_update(self): | |
| 281 """Attempts to upload a CL, make any required adjustments, and commit. | |
| 282 | |
| 283 This function assumes that the imported repo has already been updated, | |
| 284 and that change has been committed. There may be newly-failing tests, | |
| 285 so before being able to commit these new changes, we may need to update | |
| 286 TestExpectations or download new baselines. | |
| 287 | |
| 288 Returns: | |
| 289 True if successfully committed, False otherwise. | |
| 290 """ | |
| 291 self._upload_cl() | |
| 292 _log.info('Issue: %s', self.git_cl.run(['issue']).strip()) | |
| 293 | |
| 294 # First, try on Blink try bots in order to get any new baselines. | |
| 295 _log.info('Triggering try jobs.') | |
| 296 for try_bot in self.host.builders.all_try_builder_names(): | |
| 297 self.git_cl.run(['try', '-b', try_bot]) | |
| 298 try_results = self.git_cl.wait_for_try_jobs( | |
| 299 poll_delay_seconds=POLL_DELAY_SECONDS, timeout_seconds=TIMEOUT_SECON
DS) | |
| 300 | |
| 301 if not try_results: | |
| 302 self.git_cl.run(['set-close']) | |
| 303 return False | |
| 304 | |
| 305 if try_results and self.git_cl.has_failing_try_results(try_results): | |
| 306 self.fetch_new_expectations_and_baselines() | |
| 307 | |
| 308 # Wait for CQ try jobs to finish. If there are failures, then abort. | |
| 309 self.git_cl.run(['set-commit', '--rietveld']) | |
| 310 try_results = self.git_cl.wait_for_try_jobs( | |
| 311 poll_delay_seconds=POLL_DELAY_SECONDS, timeout_seconds=TIMEOUT_SECON
DS) | |
| 312 | |
| 313 if not try_results: | |
| 314 self.git_cl.run(['set-close']) | |
| 315 return False | |
| 316 | |
| 317 if self.git_cl.has_failing_try_results(try_results): | |
| 318 _log.info('CQ failed; aborting.') | |
| 319 self.git_cl.run(['set-close']) | |
| 320 return False | |
| 321 | |
| 322 _log.info('Update completed.') | |
| 323 return True | |
| 324 | |
| 325 def _upload_cl(self): | |
| 326 _log.info('Uploading change list.') | |
| 327 cc_list = self.get_directory_owners_to_cc() | |
| 328 description = self._cl_description() | |
| 329 self.git_cl.run([ | |
| 330 'upload', | |
| 331 '-f', | |
| 332 '--rietveld', | |
| 333 '-m', | |
| 334 description, | |
| 335 ] + ['--cc=' + email for email in cc_list]) | |
| 336 | |
| 337 def _cl_description(self): | |
| 338 description = self.check_run(['git', 'log', '-1', '--format=%B']) | |
| 339 build_link = self._build_link() | |
| 340 if build_link: | |
| 341 description += 'Build: %s\n\n' % build_link | |
| 342 description += 'TBR=qyearsley@chromium.org\n' | |
| 343 # Move any NOEXPORT tag to the end of the description. | |
| 344 description = description.replace('NOEXPORT=true', '') | |
| 345 description = description.replace('\n\n\n\n', '\n\n') | |
| 346 description += 'NOEXPORT=true' | |
| 347 return description | |
| 348 | |
| 349 def _build_link(self): | |
| 350 """Returns a link to a job, if running on buildbot.""" | |
| 351 master_name = self.host.environ.get('BUILDBOT_MASTERNAME') | |
| 352 builder_name = self.host.environ.get('BUILDBOT_BUILDERNAME') | |
| 353 build_number = self.host.environ.get('BUILDBOT_BUILDNUMBER') | |
| 354 if not (master_name and builder_name and build_number): | |
| 355 return None | |
| 356 return 'https://build.chromium.org/p/%s/builders/%s/builds/%s' % (master
_name, builder_name, build_number) | |
| 357 | |
| 358 def get_directory_owners_to_cc(self): | |
| 359 """Returns a list of email addresses to CC for the current import.""" | |
| 360 _log.info('Gathering directory owners emails to CC.') | |
| 361 directory_owners_file_path = self.finder.path_from_webkit_base( | |
| 362 'Tools', 'Scripts', 'webkitpy', 'w3c', 'directory_owners.json') | |
| 363 with open(directory_owners_file_path) as data_file: | |
| 364 directory_to_owner = self.parse_directory_owners(json.load(data_file
)) | |
| 365 out = self.check_run(['git', 'diff', 'origin/master', '--name-only']) | |
| 366 changed_files = out.splitlines() | |
| 367 return self.generate_email_list(changed_files, directory_to_owner) | |
| 368 | |
| 369 @staticmethod | |
| 370 def parse_directory_owners(decoded_data_file): | |
| 371 directory_dict = {} | |
| 372 for dict_set in decoded_data_file: | |
| 373 if dict_set['notification-email']: | |
| 374 directory_dict[dict_set['directory']] = dict_set['notification-e
mail'] | |
| 375 return directory_dict | |
| 376 | |
| 377 def generate_email_list(self, changed_files, directory_to_owner): | |
| 378 """Returns a list of email addresses based on the given file list and | |
| 379 directory-to-owner mapping. | |
| 380 | |
| 381 Args: | |
| 382 changed_files: A list of file paths relative to the repository root. | |
| 383 directory_to_owner: A dict mapping layout test directories to emails
. | |
| 384 | |
| 385 Returns: | |
| 386 A list of the email addresses to be notified for the current import. | |
| 387 """ | |
| 388 email_addresses = set() | |
| 389 for file_path in changed_files: | |
| 390 test_path = self.finder.layout_test_name(file_path) | |
| 391 if test_path is None: | |
| 392 continue | |
| 393 test_dir = self.fs.dirname(test_path) | |
| 394 if test_dir in directory_to_owner: | |
| 395 address = directory_to_owner[test_dir] | |
| 396 if not re.match(r'\S+@\S+', address): | |
| 397 _log.warning('%s appears not be an email address, skipping.'
, address) | |
| 398 continue | |
| 399 email_addresses.add(address) | |
| 400 return sorted(email_addresses) | |
| 401 | |
| 402 def fetch_new_expectations_and_baselines(self): | |
| 403 """Adds new expectations and downloads baselines based on try job result
s, then commits and uploads the change.""" | |
| 404 _log.info('Adding test expectations lines to LayoutTests/TestExpectation
s.') | |
| 405 line_adder = W3CExpectationsLineAdder(self.host) | |
| 406 line_adder.run() | |
| 407 message = 'Update test expectations and baselines.' | |
| 408 self.check_run(['git', 'commit', '-a', '-m', message]) | |
| 409 self.git_cl.run(['upload', '-m', message, '--rietveld']) | |
| 410 | |
| 411 def update_all_test_expectations_files(self, deleted_tests, renamed_tests): | |
| 412 """Updates all test expectations files for tests that have been deleted
or renamed.""" | |
| 413 port = self.host.port_factory.get() | |
| 414 for path, file_contents in port.all_expectations_dict().iteritems(): | |
| 415 parser = TestExpectationParser(port, all_tests=None, is_lint_mode=Fa
lse) | |
| 416 expectation_lines = parser.parse(path, file_contents) | |
| 417 self._update_single_test_expectations_file(path, expectation_lines,
deleted_tests, renamed_tests) | |
| 418 | |
| 419 def _update_single_test_expectations_file(self, path, expectation_lines, del
eted_tests, renamed_tests): | |
| 420 """Updates single test expectations file.""" | |
| 421 # FIXME: This won't work for removed or renamed directories with test ex
pectations | |
| 422 # that are directories rather than individual tests. | |
| 423 new_lines = [] | |
| 424 changed_lines = [] | |
| 425 for expectation_line in expectation_lines: | |
| 426 if expectation_line.name in deleted_tests: | |
| 427 continue | |
| 428 if expectation_line.name in renamed_tests: | |
| 429 expectation_line.name = renamed_tests[expectation_line.name] | |
| 430 # Upon parsing the file, a "path does not exist" warning is expe
cted | |
| 431 # to be there for tests that have been renamed, and if there are
warnings, | |
| 432 # then the original string is used. If the warnings are reset, t
hen the | |
| 433 # expectation line is re-serialized when output. | |
| 434 expectation_line.warnings = [] | |
| 435 changed_lines.append(expectation_line) | |
| 436 new_lines.append(expectation_line) | |
| 437 new_file_contents = TestExpectations.list_to_string(new_lines, reconstit
ute_only_these=changed_lines) | |
| 438 self.host.filesystem.write_text_file(path, new_file_contents) | |
| 439 | |
| 440 def _list_deleted_tests(self): | |
| 441 """Returns a list of layout tests that have been deleted.""" | |
| 442 out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff-
filter=D', '--name-only']) | |
| 443 deleted_tests = [] | |
| 444 for line in out.splitlines(): | |
| 445 test = self.finder.layout_test_name(line) | |
| 446 if test: | |
| 447 deleted_tests.append(test) | |
| 448 return deleted_tests | |
| 449 | |
| 450 def _list_renamed_tests(self): | |
| 451 """Returns a dict mapping source to dest name for layout tests that have
been renamed.""" | |
| 452 out = self.check_run(['git', 'diff', 'origin/master', '-M100%', '--diff-
filter=R', '--name-status']) | |
| 453 renamed_tests = {} | |
| 454 for line in out.splitlines(): | |
| 455 _, source_path, dest_path = line.split() | |
| 456 source_test = self.finder.layout_test_name(source_path) | |
| 457 dest_test = self.finder.layout_test_name(dest_path) | |
| 458 if source_test and dest_test: | |
| 459 renamed_tests[source_test] = dest_test | |
| 460 return renamed_tests | |
| OLD | NEW |