Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2047)

Unified Diff: Tools/Scripts/webkitpy/bindings/main.py

Issue 557203002: Added core and modules to binding tests results for binding modularization. (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: Added s/b/tests/idls/modules/TestInterface5.idl Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: Tools/Scripts/webkitpy/bindings/main.py
diff --git a/Tools/Scripts/webkitpy/bindings/main.py b/Tools/Scripts/webkitpy/bindings/main.py
index 7671ec0a61ebfa3da0a16f0d8dbfabf3a5749a6a..db8d9f4851a95bcb63830ee90bb3bed3c3ef564a 100644
--- a/Tools/Scripts/webkitpy/bindings/main.py
+++ b/Tools/Scripts/webkitpy/bindings/main.py
@@ -72,6 +72,11 @@ COMPONENT_DIRECTORY = frozenset(['core', 'modules'])
test_input_directory = os.path.join(source_path, 'bindings', 'tests', 'idls')
reference_directory = os.path.join(source_path, 'bindings', 'tests', 'results')
+PLY_LEX_YACC_FILES = frozenset([
+ 'lextab.py', # PLY lex
+ 'lextab.pyc',
+ 'parsetab.pickle', # PLY yacc
+])
@contextmanager
def TemporaryDirectory():
@@ -125,6 +130,16 @@ def generate_interface_dependencies():
def bindings_tests(output_directory, verbose):
executive = Executive()
+ def listfiles(directory):
bashi 2014/09/10 07:39:53 listfiles -> list_files
tasak 2014/09/10 08:27:18 Done.
+ files = []
+ for component in os.listdir(directory):
+ if component not in COMPONENT_DIRECTORY:
+ continue
+ directory_with_component = os.path.join(directory, component)
+ for filename in os.listdir(directory_with_component):
+ files.append(os.path.join(directory_with_component, filename))
+ return files
+
def diff(filename1, filename2):
# Python's difflib module is too slow, especially on long output, so
# run external diff(1) command
@@ -137,14 +152,17 @@ def bindings_tests(output_directory, verbose):
# non-zero exit if files differ.
return executive.run_command(cmd, error_handler=lambda x: None)
+ def is_cache_file(filename):
+ if filename in PLY_LEX_YACC_FILES:
+ return True
+ if filename.endswith('.cache'): # Jinja
+ return True
+ return False
+
def delete_cache_files():
# FIXME: Instead of deleting cache files, don't generate them.
- cache_files = [os.path.join(output_directory, output_file)
- for output_file in os.listdir(output_directory)
- if (output_file in ('lextab.py', # PLY lex
- 'lextab.pyc',
- 'parsetab.pickle') or # PLY yacc
- output_file.endswith('.cache'))] # Jinja
+ cache_files = [path for path in listfiles(output_directory)
+ if is_cache_file(os.path.basename(path))]
for cache_file in cache_files:
os.remove(cache_file)
@@ -170,18 +188,25 @@ def bindings_tests(output_directory, verbose):
return True
def identical_output_files():
- file_pairs = [(os.path.join(reference_directory, output_file),
- os.path.join(output_directory, output_file))
- for output_file in os.listdir(output_directory)]
+ output_files = listfiles(output_directory)
+ reference_files = [os.path.join(reference_directory,
+ os.path.relpath(path, output_directory))
+ for path in output_files]
return all([identical_file(reference_filename, output_filename)
- for (reference_filename, output_filename) in file_pairs])
+ for (reference_filename, output_filename) in zip(reference_files, output_files)])
def no_excess_files():
bashi 2014/09/10 07:39:53 You call listfiles() twice here. Maybe good to sto
tasak 2014/09/10 08:27:18 Done.
- generated_files = set(os.listdir(output_directory))
- generated_files.add('.svn') # Subversion working copy directory
- excess_files = [output_file
- for output_file in os.listdir(reference_directory)
- if output_file not in generated_files]
+ generated_files = set([os.path.relpath(path, output_directory)
+ for path in listfiles(output_directory)])
+ # Add subversion working copy directories in core and modules.
+ for component in COMPONENT_DIRECTORY:
+ generated_files.add(os.path.join(component, '.svn'))
+
+ excess_files = []
+ for path in listfiles(reference_directory):
+ relpath = os.path.relpath(path, reference_directory)
+ if relpath not in generated_files:
+ excess_files.append(relpath)
if excess_files:
print ('Excess reference files! '
'(probably cruft from renaming or deleting):\n' +
@@ -189,17 +214,28 @@ def bindings_tests(output_directory, verbose):
return False
return True
+ def makedir(path):
bashi 2014/09/10 07:39:53 Use os.path.exists() and os.makedirs(). if not os
tasak 2014/09/10 08:27:18 Done.
+ try:
+ os.mkdir(path)
+ except OSError as e:
+ if e.args[0] == os.errno.EEXIST:
+ return
+ raise e
+
try:
generate_interface_dependencies()
- idl_compiler = IdlCompilerV8(output_directory,
- interfaces_info=interfaces_info,
- only_if_changed=True)
- dictionary_impl_compiler = IdlCompilerDictionaryImpl(
- output_directory, interfaces_info=interfaces_info,
- only_if_changed=True)
-
- idl_filenames = []
for component in COMPONENT_DIRECTORY:
+ output_dir = os.path.join(output_directory, component)
+ makedir(output_dir)
+
+ idl_compiler = IdlCompilerV8(output_dir,
+ interfaces_info=interfaces_info,
+ only_if_changed=True)
+ dictionary_impl_compiler = IdlCompilerDictionaryImpl(
+ output_dir, interfaces_info=interfaces_info,
+ only_if_changed=True)
+
+ idl_filenames = []
input_directory = os.path.join(test_input_directory, component)
for filename in os.listdir(input_directory):
if (filename.endswith('.idl') and
@@ -209,15 +245,14 @@ def bindings_tests(output_directory, verbose):
idl_filenames.append(
os.path.realpath(
os.path.join(input_directory, filename)))
- for idl_path in idl_filenames:
- idl_basename = os.path.basename(idl_path)
- idl_compiler.compile_file(idl_path)
- definition_name, _ = os.path.splitext(idl_basename)
- if (definition_name in interfaces_info and
- interfaces_info[definition_name]['is_dictionary']):
- dictionary_impl_compiler.compile_file(idl_path)
- if verbose:
- print 'Compiled: %s' % idl_path
+ for idl_path in idl_filenames:
+ idl_basename = os.path.basename(idl_path)
+ idl_compiler.compile_file(idl_path)
+ definition_name, _ = os.path.splitext(idl_basename)
+ if (definition_name in interfaces_info and interfaces_info[definition_name]['is_dictionary']):
+ dictionary_impl_compiler.compile_file(idl_path)
+ if verbose:
+ print 'Compiled: %s' % idl_path
finally:
delete_cache_files()

Powered by Google App Engine
This is Rietveld 408576698