Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 #!/usr/bin/env python | 1 #!/usr/bin/env python |
| 2 | 2 |
| 3 # Copyright (C) 2013 Adobe Systems Incorporated. All rights reserved. | 3 # Copyright (C) 2013 Adobe Systems Incorporated. All rights reserved. |
| 4 # | 4 # |
| 5 # Redistribution and use in source and binary forms, with or without | 5 # Redistribution and use in source and binary forms, with or without |
| 6 # modification, are permitted provided that the following conditions | 6 # modification, are permitted provided that the following conditions |
| 7 # are met: | 7 # are met: |
| 8 # | 8 # |
| 9 # 1. Redistributions of source code must retain the above | 9 # 1. Redistributions of source code must retain the above |
| 10 # copyright notice, this list of conditions and the following | 10 # copyright notice, this list of conditions and the following |
| (...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 82 require prefixes, the list of imported files, and guidance for future tes t modification and | 82 require prefixes, the list of imported files, and guidance for future tes t modification and |
| 83 maintenance. | 83 maintenance. |
| 84 | 84 |
| 85 - On subsequent imports, this file is read to determine if files have been removed in the newer changesets. | 85 - On subsequent imports, this file is read to determine if files have been removed in the newer changesets. |
| 86 The script removes these files accordingly. | 86 The script removes these files accordingly. |
| 87 """ | 87 """ |
| 88 | 88 |
| 89 # FIXME: Change this file to use the Host abstractions rather that os, sys, shut ils, etc. | 89 # FIXME: Change this file to use the Host abstractions rather that os, sys, shut ils, etc. |
| 90 | 90 |
| 91 import datetime | 91 import datetime |
| 92 import logging | |
| 92 import mimetypes | 93 import mimetypes |
| 93 import optparse | 94 import optparse |
| 94 import os | 95 import os |
| 95 import shutil | 96 import shutil |
| 96 import sys | 97 import sys |
| 97 | 98 |
| 98 from webkitpy.common.host import Host | 99 from webkitpy.common.host import Host |
| 99 from webkitpy.common.system.executive import ScriptError | 100 from webkitpy.common.system.executive import ScriptError |
| 100 from webkitpy.w3c.test_parser import TestParser | 101 from webkitpy.w3c.test_parser import TestParser |
| 101 from webkitpy.w3c.test_converter import W3CTestConverter | 102 from webkitpy.w3c.test_converter import W3CTestConverter |
| 102 | 103 |
| 103 | 104 |
| 104 TEST_STATUS_UNKNOWN = 'unknown' | 105 TEST_STATUS_UNKNOWN = 'unknown' |
| 105 TEST_STATUS_APPROVED = 'approved' | 106 TEST_STATUS_APPROVED = 'approved' |
| 106 TEST_STATUS_SUBMITTED = 'submitted' | 107 TEST_STATUS_SUBMITTED = 'submitted' |
| 107 | 108 |
| 108 CHANGESET_NOT_AVAILABLE = 'Not Available' | 109 CHANGESET_NOT_AVAILABLE = 'Not Available' |
| 109 | 110 |
| 110 | 111 |
| 112 _log = logging.getLogger(__name__) | |
| 113 | |
| 114 | |
| 111 def main(_argv, _stdout, _stderr): | 115 def main(_argv, _stdout, _stderr): |
| 112 options, args = parse_args() | 116 options, args = parse_args() |
| 113 import_dir = args[0] | 117 import_dir = args[0] |
| 114 if len(args) == 1: | 118 if len(args) == 1: |
| 115 repo_dir = os.path.dirname(import_dir) | 119 repo_dir = os.path.dirname(import_dir) |
| 116 else: | 120 else: |
| 117 repo_dir = args[1] | 121 repo_dir = args[1] |
| 118 | 122 |
| 119 if not os.path.exists(import_dir): | 123 if not os.path.exists(import_dir): |
| 120 sys.exit('Source directory %s not found!' % import_dir) | 124 sys.exit('Source directory %s not found!' % import_dir) |
| 121 | 125 |
| 122 if not os.path.exists(repo_dir): | 126 if not os.path.exists(repo_dir): |
| 123 sys.exit('Repository directory %s not found!' % repo_dir) | 127 sys.exit('Repository directory %s not found!' % repo_dir) |
| 124 if not repo_dir in import_dir: | 128 if not repo_dir in import_dir: |
| 125 sys.exit('Repository directory %s must be a parent of %s' % (repo_dir, i mport_dir)) | 129 sys.exit('Repository directory %s must be a parent of %s' % (repo_dir, i mport_dir)) |
| 126 | 130 |
| 131 configure_logging() | |
| 132 | |
| 127 test_importer = TestImporter(Host(), import_dir, repo_dir, options) | 133 test_importer = TestImporter(Host(), import_dir, repo_dir, options) |
| 128 test_importer.do_import() | 134 test_importer.do_import() |
| 129 | 135 |
| 130 | 136 |
| 137 def configure_logging(): | |
| 138 class LogHandler(logging.StreamHandler): | |
| 139 | |
| 140 def format(self, record): | |
| 141 if record.levelno > logging.INFO: | |
|
ojan
2013/05/29 00:31:47
Should this be: if record.levelno > self.lvl: ?
| |
| 142 return "%s: %s" % (record.levelname, record.getMessage()) | |
| 143 return record.getMessage() | |
| 144 | |
| 145 logger = logging.getLogger() | |
| 146 logger.setLevel(logging.INFO) | |
| 147 handler = LogHandler() | |
| 148 handler.setLevel(logging.INFO) | |
| 149 logger.addHandler(handler) | |
| 150 return handler | |
| 151 | |
| 152 | |
| 131 def parse_args(): | 153 def parse_args(): |
| 132 parser = optparse.OptionParser(usage='usage: %prog [options] w3c_test_direct ory [repo_directory]') | 154 parser = optparse.OptionParser(usage='usage: %prog [options] w3c_test_direct ory [repo_directory]') |
| 133 parser.add_option('-n', '--no-overwrite', dest='overwrite', action='store_fa lse', default=True, | 155 parser.add_option('-n', '--no-overwrite', dest='overwrite', action='store_fa lse', default=True, |
| 134 help='Flag to prevent duplicate test files from overwriting existing tes ts. By default, they will be overwritten') | 156 help='Flag to prevent duplicate test files from overwriting existing tes ts. By default, they will be overwritten') |
| 135 parser.add_option('-a', '--all', action='store_true', default=False, | 157 parser.add_option('-a', '--all', action='store_true', default=False, |
| 136 help='Import all tests including reftests, JS tests, and manual/pixel te sts. By default, only reftests and JS tests are imported') | 158 help='Import all tests including reftests, JS tests, and manual/pixel te sts. By default, only reftests and JS tests are imported') |
| 137 | 159 |
| 138 options, args = parser.parse_args() | 160 options, args = parser.parse_args() |
| 139 if len(args) not in (1, 2): | 161 if len(args) not in (1, 2): |
| 140 parser.error('Incorrect number of arguments') | 162 parser.error('Incorrect number of arguments') |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 172 def load_changeset(self): | 194 def load_changeset(self): |
| 173 """Returns the current changeset from mercurial or "Not Available".""" | 195 """Returns the current changeset from mercurial or "Not Available".""" |
| 174 try: | 196 try: |
| 175 self.changeset = self.host.executive.run_command(['hg', 'tip']).spli t('changeset:')[1] | 197 self.changeset = self.host.executive.run_command(['hg', 'tip']).spli t('changeset:')[1] |
| 176 except (OSError, ScriptError): | 198 except (OSError, ScriptError): |
| 177 self.changeset = CHANGESET_NOT_AVAILABLE | 199 self.changeset = CHANGESET_NOT_AVAILABLE |
| 178 | 200 |
| 179 def find_importable_tests(self, directory): | 201 def find_importable_tests(self, directory): |
| 180 # FIXME: use filesystem | 202 # FIXME: use filesystem |
| 181 for root, dirs, files in os.walk(directory): | 203 for root, dirs, files in os.walk(directory): |
| 182 print 'Scanning ' + root + '...' | 204 _log.info('Scanning ' + root + '...') |
| 183 total_tests = 0 | 205 total_tests = 0 |
| 184 reftests = 0 | 206 reftests = 0 |
| 185 jstests = 0 | 207 jstests = 0 |
| 186 | 208 |
| 187 # "archive" and "data" dirs are internal csswg things that live in e very approved directory. | 209 # "archive" and "data" dirs are internal csswg things that live in e very approved directory. |
| 188 # FIXME: skip 'incoming' tests for now, but we should rework the 'te st_status' concept and | 210 # FIXME: skip 'incoming' tests for now, but we should rework the 'te st_status' concept and |
| 189 # support reading them as well. | 211 # support reading them as well. |
| 190 DIRS_TO_SKIP = ('.git', '.hg', 'data', 'archive', 'incoming') | 212 DIRS_TO_SKIP = ('.git', '.hg', 'data', 'archive', 'incoming') |
| 191 for d in DIRS_TO_SKIP: | 213 for d in DIRS_TO_SKIP: |
| 192 if d in dirs: | 214 if d in dirs: |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 284 os.makedirs(new_path) | 306 os.makedirs(new_path) |
| 285 | 307 |
| 286 copied_files = [] | 308 copied_files = [] |
| 287 | 309 |
| 288 for file_to_copy in dir_to_copy['copy_list']: | 310 for file_to_copy in dir_to_copy['copy_list']: |
| 289 # FIXME: Split this block into a separate function. | 311 # FIXME: Split this block into a separate function. |
| 290 orig_filepath = os.path.normpath(file_to_copy['src']) | 312 orig_filepath = os.path.normpath(file_to_copy['src']) |
| 291 | 313 |
| 292 if os.path.isdir(orig_filepath): | 314 if os.path.isdir(orig_filepath): |
| 293 # FIXME: Figure out what is triggering this and what to do a bout it. | 315 # FIXME: Figure out what is triggering this and what to do a bout it. |
| 294 print 'Error: %s refers to a directory' % orig_filepath | 316 _log.error('%s refers to a directory' % orig_filepath) |
| 295 continue | 317 continue |
| 296 | 318 |
| 297 if not(os.path.exists(orig_filepath)): | 319 if not(os.path.exists(orig_filepath)): |
| 298 print 'Warning: ' + orig_filepath + ' not found. Possible er ror in the test.' | 320 _log.warning('%s not found. Possible error in the test.', or ig_filepath) |
| 299 continue | 321 continue |
| 300 | 322 |
| 301 new_filepath = os.path.join(new_path, file_to_copy['dest']) | 323 new_filepath = os.path.join(new_path, file_to_copy['dest']) |
| 302 | 324 |
| 303 if not(os.path.exists(os.path.dirname(new_filepath))): | 325 if not(os.path.exists(os.path.dirname(new_filepath))): |
| 304 os.makedirs(os.path.dirname(new_filepath)) | 326 os.makedirs(os.path.dirname(new_filepath)) |
| 305 | 327 |
| 306 if not self.options.overwrite and os.path.exists(new_filepath): | 328 if not self.options.overwrite and os.path.exists(new_filepath): |
| 307 print 'Skipping import of existing file ' + new_filepath | 329 _log.info('Skipping import of existing file ' + new_filepath ) |
| 308 else: | 330 else: |
| 309 # FIXME: Maybe doing a file diff is in order here for existi ng files? | 331 # FIXME: Maybe doing a file diff is in order here for existi ng files? |
| 310 # In other words, there's no sense in overwriting identical files, but | 332 # In other words, there's no sense in overwriting identical files, but |
| 311 # there's no harm in copying the identical thing. | 333 # there's no harm in copying the identical thing. |
| 312 print 'Importing:', orig_filepath | 334 _log.info('Importing: %s', orig_filepath) |
| 313 print ' As:', new_filepath | 335 _log.info(' As: %s', new_filepath) |
| 314 | 336 |
| 315 # Only html, xml, or css should be converted | 337 # Only html, xml, or css should be converted |
| 316 # FIXME: Eventually, so should js when support is added for this type of conversion | 338 # FIXME: Eventually, so should js when support is added for this type of conversion |
| 317 mimetype = mimetypes.guess_type(orig_filepath) | 339 mimetype = mimetypes.guess_type(orig_filepath) |
| 318 if 'html' in str(mimetype[0]) or 'xml' in str(mimetype[0]) or ' css' in str(mimetype[0]): | 340 if 'html' in str(mimetype[0]) or 'xml' in str(mimetype[0]) or ' css' in str(mimetype[0]): |
| 319 converted_file = converter.convert_for_webkit(new_path, file name=orig_filepath) | 341 converted_file = converter.convert_for_webkit(new_path, file name=orig_filepath) |
| 320 | 342 |
| 321 if not converted_file: | 343 if not converted_file: |
| 322 shutil.copyfile(orig_filepath, new_filepath) # The file was unmodified. | 344 shutil.copyfile(orig_filepath, new_filepath) # The file was unmodified. |
| 323 else: | 345 else: |
| 324 for prefixed_property in converted_file[0]: | 346 for prefixed_property in converted_file[0]: |
| 325 total_prefixed_properties.setdefault(prefixed_proper ty, 0) | 347 total_prefixed_properties.setdefault(prefixed_proper ty, 0) |
| 326 total_prefixed_properties[prefixed_property] += 1 | 348 total_prefixed_properties[prefixed_property] += 1 |
| 327 | 349 |
| 328 prefixed_properties.extend(set(converted_file[0]) - set( prefixed_properties)) | 350 prefixed_properties.extend(set(converted_file[0]) - set( prefixed_properties)) |
| 329 outfile = open(new_filepath, 'wb') | 351 outfile = open(new_filepath, 'wb') |
| 330 outfile.write(converted_file[1]) | 352 outfile.write(converted_file[1]) |
| 331 outfile.close() | 353 outfile.close() |
| 332 else: | 354 else: |
| 333 shutil.copyfile(orig_filepath, new_filepath) | 355 shutil.copyfile(orig_filepath, new_filepath) |
| 334 | 356 |
| 335 copied_files.append(new_filepath.replace(self._webkit_root, '')) | 357 copied_files.append(new_filepath.replace(self._webkit_root, '')) |
| 336 | 358 |
| 337 self.remove_deleted_files(new_path, copied_files) | 359 self.remove_deleted_files(new_path, copied_files) |
| 338 self.write_import_log(new_path, copied_files, prefixed_properties) | 360 self.write_import_log(new_path, copied_files, prefixed_properties) |
| 339 | 361 |
| 340 print 'Import complete' | 362 _log.info('Import complete') |
| 341 | 363 |
| 342 print 'IMPORTED ' + str(total_imported_tests) + ' TOTAL TESTS' | 364 _log.info('IMPORTED %d TOTAL TESTS', total_imported_tests) |
| 343 print 'Imported ' + str(total_imported_reftests) + ' reftests' | 365 _log.info('Imported %d reftests', total_imported_reftests) |
| 344 print 'Imported ' + str(total_imported_jstests) + ' JS tests' | 366 _log.info('Imported %d JS tests', total_imported_jstests) |
| 345 print 'Imported ' + str(total_imported_tests - total_imported_jstests - total_imported_reftests) + ' pixel/manual tests' | 367 _log.info('Imported %d pixel/manual tests', total_imported_tests - total _imported_jstests - total_imported_reftests) |
| 346 print | 368 _log.info('') |
| 347 print "Properties needing prefixes (by count):" | 369 _log.info('Properties needing prefixes (by count):') |
| 348 for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]): | 370 for prefixed_property in sorted(total_prefixed_properties, key=lambda p: total_prefixed_properties[p]): |
| 349 print " %s: %s" % (prefixed_property, total_prefixed_properties[pre fixed_property]) | 371 _log.info(' %s: %s', prefixed_property, total_prefixed_properties[p refixed_property]) |
| 350 | 372 |
| 351 def setup_destination_directory(self): | 373 def setup_destination_directory(self): |
| 352 """ Creates a destination directory that mirrors that of the source appr oved or submitted directory """ | 374 """ Creates a destination directory that mirrors that of the source appr oved or submitted directory """ |
| 353 | 375 |
| 354 self.update_test_status() | 376 self.update_test_status() |
| 355 | 377 |
| 356 start = self.source_directory.find(self.test_status) | 378 start = self.source_directory.find(self.test_status) |
| 357 new_subpath = self.source_directory[len(self.repo_dir):] | 379 new_subpath = self.source_directory[len(self.repo_dir):] |
| 358 | 380 |
| 359 destination_directory = os.path.join(self.destination_directory, new_sub path) | 381 destination_directory = os.path.join(self.destination_directory, new_sub path) |
| 360 | 382 |
| 361 if not os.path.exists(destination_directory): | 383 if not os.path.exists(destination_directory): |
| 362 os.makedirs(destination_directory) | 384 os.makedirs(destination_directory) |
| 363 | 385 |
| 364 print 'Tests will be imported into: ' + destination_directory | 386 _log.info('Tests will be imported into: %s', destination_directory) |
| 365 | 387 |
| 366 def update_test_status(self): | 388 def update_test_status(self): |
| 367 """ Sets the test status to either 'approved' or 'submitted' """ | 389 """ Sets the test status to either 'approved' or 'submitted' """ |
| 368 | 390 |
| 369 status = TEST_STATUS_UNKNOWN | 391 status = TEST_STATUS_UNKNOWN |
| 370 | 392 |
| 371 if 'approved' in self.source_directory.split(os.path.sep): | 393 if 'approved' in self.source_directory.split(os.path.sep): |
| 372 status = TEST_STATUS_APPROVED | 394 status = TEST_STATUS_APPROVED |
| 373 elif 'submitted' in self.source_directory.split(os.path.sep): | 395 elif 'submitted' in self.source_directory.split(os.path.sep): |
| 374 status = TEST_STATUS_SUBMITTED | 396 status = TEST_STATUS_SUBMITTED |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 386 | 408 |
| 387 import_log = open(import_log_file, 'r') | 409 import_log = open(import_log_file, 'r') |
| 388 contents = import_log.readlines() | 410 contents = import_log.readlines() |
| 389 | 411 |
| 390 if 'List of files\n' in contents: | 412 if 'List of files\n' in contents: |
| 391 list_index = contents.index('List of files:\n') + 1 | 413 list_index = contents.index('List of files:\n') + 1 |
| 392 previous_file_list = [filename.strip() for filename in contents[list _index:]] | 414 previous_file_list = [filename.strip() for filename in contents[list _index:]] |
| 393 | 415 |
| 394 deleted_files = set(previous_file_list) - set(new_file_list) | 416 deleted_files = set(previous_file_list) - set(new_file_list) |
| 395 for deleted_file in deleted_files: | 417 for deleted_file in deleted_files: |
| 396 print 'Deleting file removed from the W3C repo:' + deleted_file | 418 _log.info('Deleting file removed from the W3C repo: %s', deleted_fil e) |
| 397 deleted_file = os.path.join(self._webkit_root, deleted_file) | 419 deleted_file = os.path.join(self._webkit_root, deleted_file) |
| 398 os.remove(deleted_file) | 420 os.remove(deleted_file) |
| 399 | 421 |
| 400 import_log.close() | 422 import_log.close() |
| 401 | 423 |
| 402 def write_import_log(self, import_directory, file_list, prop_list): | 424 def write_import_log(self, import_directory, file_list, prop_list): |
| 403 """ Writes a w3c-import.log file in each directory with imported files. """ | 425 """ Writes a w3c-import.log file in each directory with imported files. """ |
| 404 | 426 |
| 405 now = datetime.datetime.now() | 427 now = datetime.datetime.now() |
| 406 | 428 |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 420 for prop in prop_list: | 442 for prop in prop_list: |
| 421 import_log.write(prop + '\n') | 443 import_log.write(prop + '\n') |
| 422 else: | 444 else: |
| 423 import_log.write('None\n') | 445 import_log.write('None\n') |
| 424 import_log.write('------------------------------------------------------ ------------------\n') | 446 import_log.write('------------------------------------------------------ ------------------\n') |
| 425 import_log.write('List of files:\n') | 447 import_log.write('List of files:\n') |
| 426 for item in file_list: | 448 for item in file_list: |
| 427 import_log.write(item + '\n') | 449 import_log.write(item + '\n') |
| 428 | 450 |
| 429 import_log.close() | 451 import_log.close() |
| OLD | NEW |