| OLD | NEW |
| 1 # Copyright (c) 2013 Google Inc. All rights reserved. | 1 # Copyright (c) 2013 Google Inc. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 from __future__ import print_function |
| 6 |
| 5 import collections | 7 import collections |
| 6 import copy | 8 import copy |
| 7 import hashlib | 9 import hashlib |
| 8 import json | 10 import json |
| 9 import multiprocessing | 11 import multiprocessing |
| 10 import os.path | 12 import os.path |
| 11 import re | 13 import re |
| 12 import signal | 14 import signal |
| 13 import subprocess | 15 import subprocess |
| 14 import sys | 16 import sys |
| 15 import gyp | 17 import gyp |
| 16 import gyp.common | 18 import gyp.common |
| 17 from gyp.common import OrderedSet | 19 from gyp.common import OrderedSet |
| 18 import gyp.msvs_emulation | 20 import gyp.msvs_emulation |
| 19 import gyp.MSVSUtil as MSVSUtil | 21 import gyp.MSVSUtil as MSVSUtil |
| 20 import gyp.xcode_emulation | 22 import gyp.xcode_emulation |
| 21 from cStringIO import StringIO | 23 try: |
| 24 from cStringIO import StringIO |
| 25 except ImportError: |
| 26 from io import StringIO |
| 22 | 27 |
| 23 from gyp.common import GetEnvironFallback | 28 from gyp.common import GetEnvironFallback |
| 24 import gyp.ninja_syntax as ninja_syntax | 29 import gyp.ninja_syntax as ninja_syntax |
| 25 | 30 |
| 26 generator_default_variables = { | 31 generator_default_variables = { |
| 27 'EXECUTABLE_PREFIX': '', | 32 'EXECUTABLE_PREFIX': '', |
| 28 'EXECUTABLE_SUFFIX': '', | 33 'EXECUTABLE_SUFFIX': '', |
| 29 'STATIC_LIB_PREFIX': 'lib', | 34 'STATIC_LIB_PREFIX': 'lib', |
| 30 'STATIC_LIB_SUFFIX': '.a', | 35 'STATIC_LIB_SUFFIX': '.a', |
| 31 'SHARED_LIB_PREFIX': 'lib', | 36 'SHARED_LIB_PREFIX': 'lib', |
| (...skipping 308 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 340 path_basename = self.name + '.' + path_basename | 345 path_basename = self.name + '.' + path_basename |
| 341 return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, | 346 return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, |
| 342 path_basename)) | 347 path_basename)) |
| 343 | 348 |
| 344 def WriteCollapsedDependencies(self, name, targets, order_only=None): | 349 def WriteCollapsedDependencies(self, name, targets, order_only=None): |
| 345 """Given a list of targets, return a path for a single file | 350 """Given a list of targets, return a path for a single file |
| 346 representing the result of building all the targets or None. | 351 representing the result of building all the targets or None. |
| 347 | 352 |
| 348 Uses a stamp file if necessary.""" | 353 Uses a stamp file if necessary.""" |
| 349 | 354 |
| 350 assert targets == filter(None, targets), targets | 355 assert targets == [t for t in targets if t], targets |
| 351 if len(targets) == 0: | 356 if len(targets) == 0: |
| 352 assert not order_only | 357 assert not order_only |
| 353 return None | 358 return None |
| 354 if len(targets) > 1 or order_only: | 359 if len(targets) > 1 or order_only: |
| 355 stamp = self.GypPathToUniqueOutput(name + '.stamp') | 360 stamp = self.GypPathToUniqueOutput(name + '.stamp') |
| 356 targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) | 361 targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only) |
| 357 self.ninja.newline() | 362 self.ninja.newline() |
| 358 return targets[0] | 363 return targets[0] |
| 359 | 364 |
| 360 def _SubninjaNameForArch(self, arch): | 365 def _SubninjaNameForArch(self, arch): |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 418 actions_depends = [] | 423 actions_depends = [] |
| 419 compile_depends = [] | 424 compile_depends = [] |
| 420 # TODO(evan): it is rather confusing which things are lists and which | 425 # TODO(evan): it is rather confusing which things are lists and which |
| 421 # are strings. Fix these. | 426 # are strings. Fix these. |
| 422 if 'dependencies' in spec: | 427 if 'dependencies' in spec: |
| 423 for dep in spec['dependencies']: | 428 for dep in spec['dependencies']: |
| 424 if dep in self.target_outputs: | 429 if dep in self.target_outputs: |
| 425 target = self.target_outputs[dep] | 430 target = self.target_outputs[dep] |
| 426 actions_depends.append(target.PreActionInput(self.flavor)) | 431 actions_depends.append(target.PreActionInput(self.flavor)) |
| 427 compile_depends.append(target.PreCompileInput()) | 432 compile_depends.append(target.PreCompileInput()) |
| 428 actions_depends = filter(None, actions_depends) | 433 actions_depends = [d for d in actions_depends if d] |
| 429 compile_depends = filter(None, compile_depends) | 434 compile_depends = [d for d in compile_depends if d] |
| 430 actions_depends = self.WriteCollapsedDependencies('actions_depends', | 435 actions_depends = self.WriteCollapsedDependencies('actions_depends', |
| 431 actions_depends) | 436 actions_depends) |
| 432 compile_depends = self.WriteCollapsedDependencies('compile_depends', | 437 compile_depends = self.WriteCollapsedDependencies('compile_depends', |
| 433 compile_depends) | 438 compile_depends) |
| 434 self.target.preaction_stamp = actions_depends | 439 self.target.preaction_stamp = actions_depends |
| 435 self.target.precompile_stamp = compile_depends | 440 self.target.precompile_stamp = compile_depends |
| 436 | 441 |
| 437 # Write out actions, rules, and copies. These must happen before we | 442 # Write out actions, rules, and copies. These must happen before we |
| 438 # compile any sources, so compute a list of predependencies for sources | 443 # compile any sources, so compute a list of predependencies for sources |
| 439 # while we do it. | 444 # while we do it. |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 471 lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) | 476 lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) |
| 472 link_deps = self.WriteSources( | 477 link_deps = self.WriteSources( |
| 473 self.ninja, config_name, config, sources, compile_depends_stamp, pch, | 478 self.ninja, config_name, config, sources, compile_depends_stamp, pch, |
| 474 spec) | 479 spec) |
| 475 # Some actions/rules output 'sources' that are already object files. | 480 # Some actions/rules output 'sources' that are already object files. |
| 476 obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] | 481 obj_outputs = [f for f in sources if f.endswith(self.obj_ext)] |
| 477 if obj_outputs: | 482 if obj_outputs: |
| 478 if self.flavor != 'mac' or len(self.archs) == 1: | 483 if self.flavor != 'mac' or len(self.archs) == 1: |
| 479 link_deps += [self.GypPathToNinja(o) for o in obj_outputs] | 484 link_deps += [self.GypPathToNinja(o) for o in obj_outputs] |
| 480 else: | 485 else: |
| 481 print "Warning: Actions/rules writing object files don't work with " \ | 486 print("Warning: Actions/rules writing object files don't work with " \ |
| 482 "multiarch targets, dropping. (target %s)" % spec['target_name'] | 487 "multiarch targets, dropping. (target %s)" % spec['target_name']
) |
| 483 elif self.flavor == 'mac' and len(self.archs) > 1: | 488 elif self.flavor == 'mac' and len(self.archs) > 1: |
| 484 link_deps = collections.defaultdict(list) | 489 link_deps = collections.defaultdict(list) |
| 485 | 490 |
| 486 compile_deps = self.target.actions_stamp or actions_depends | 491 compile_deps = self.target.actions_stamp or actions_depends |
| 487 if self.flavor == 'win' and self.target.type == 'static_library': | 492 if self.flavor == 'win' and self.target.type == 'static_library': |
| 488 self.target.component_objs = link_deps | 493 self.target.component_objs = link_deps |
| 489 self.target.compile_deps = compile_deps | 494 self.target.compile_deps = compile_deps |
| 490 | 495 |
| 491 # Write out a link step, if needed. | 496 # Write out a link step, if needed. |
| 492 output = None | 497 output = None |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 824 at each invocation of ninja.""" | 829 at each invocation of ninja.""" |
| 825 if not xcassets: | 830 if not xcassets: |
| 826 return | 831 return |
| 827 | 832 |
| 828 extra_arguments = {} | 833 extra_arguments = {} |
| 829 settings_to_arg = { | 834 settings_to_arg = { |
| 830 'XCASSETS_APP_ICON': 'app-icon', | 835 'XCASSETS_APP_ICON': 'app-icon', |
| 831 'XCASSETS_LAUNCH_IMAGE': 'launch-image', | 836 'XCASSETS_LAUNCH_IMAGE': 'launch-image', |
| 832 } | 837 } |
| 833 settings = self.xcode_settings.xcode_settings[self.config_name] | 838 settings = self.xcode_settings.xcode_settings[self.config_name] |
| 834 for settings_key, arg_name in settings_to_arg.iteritems(): | 839 for settings_key, arg_name in settings_to_arg.items(): |
| 835 value = settings.get(settings_key) | 840 value = settings.get(settings_key) |
| 836 if value: | 841 if value: |
| 837 extra_arguments[arg_name] = value | 842 extra_arguments[arg_name] = value |
| 838 | 843 |
| 839 partial_info_plist = None | 844 partial_info_plist = None |
| 840 if extra_arguments: | 845 if extra_arguments: |
| 841 partial_info_plist = self.GypPathToUniqueOutput( | 846 partial_info_plist = self.GypPathToUniqueOutput( |
| 842 'assetcatalog_generated_info.plist') | 847 'assetcatalog_generated_info.plist') |
| 843 extra_arguments['output-partial-info-plist'] = partial_info_plist | 848 extra_arguments['output-partial-info-plist'] = partial_info_plist |
| 844 | 849 |
| (...skipping 909 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1754 ("ullAvailVirtual", ctypes.c_ulonglong), | 1759 ("ullAvailVirtual", ctypes.c_ulonglong), |
| 1755 ("sullAvailExtendedVirtual", ctypes.c_ulonglong), | 1760 ("sullAvailExtendedVirtual", ctypes.c_ulonglong), |
| 1756 ] | 1761 ] |
| 1757 | 1762 |
| 1758 stat = MEMORYSTATUSEX() | 1763 stat = MEMORYSTATUSEX() |
| 1759 stat.dwLength = ctypes.sizeof(stat) | 1764 stat.dwLength = ctypes.sizeof(stat) |
| 1760 ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) | 1765 ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) |
| 1761 | 1766 |
| 1762 # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM | 1767 # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM |
| 1763 # on a 64 GB machine. | 1768 # on a 64 GB machine. |
| 1764 mem_limit = max(1, stat.ullTotalPhys / (5 * (2 ** 30))) # total / 5GB | 1769 mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30))) # total / 5GB |
| 1765 hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32))) | 1770 hard_cap = max(1, int(os.environ.get('GYP_LINK_CONCURRENCY_MAX', 2**32))) |
| 1766 return min(mem_limit, hard_cap) | 1771 return min(mem_limit, hard_cap) |
| 1767 elif sys.platform.startswith('linux'): | 1772 elif sys.platform.startswith('linux'): |
| 1768 if os.path.exists("/proc/meminfo"): | 1773 if os.path.exists("/proc/meminfo"): |
| 1769 with open("/proc/meminfo") as meminfo: | 1774 with open("/proc/meminfo") as meminfo: |
| 1770 memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') | 1775 memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB') |
| 1771 for line in meminfo: | 1776 for line in meminfo: |
| 1772 match = memtotal_re.match(line) | 1777 match = memtotal_re.match(line) |
| 1773 if not match: | 1778 if not match: |
| 1774 continue | 1779 continue |
| 1775 # Allow 8Gb per link on Linux because Gold is quite memory hungry | 1780 # Allow 8Gb per link on Linux because Gold is quite memory hungry |
| 1776 return max(1, int(match.group(1)) / (8 * (2 ** 20))) | 1781 return max(1, int(match.group(1)) // (8 * (2 ** 20))) |
| 1777 return 1 | 1782 return 1 |
| 1778 elif sys.platform == 'darwin': | 1783 elif sys.platform == 'darwin': |
| 1779 try: | 1784 try: |
| 1780 avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) | 1785 avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize'])) |
| 1781 # A static library debug build of Chromium's unit_tests takes ~2.7GB, so | 1786 # A static library debug build of Chromium's unit_tests takes ~2.7GB, so |
| 1782 # 4GB per ld process allows for some more bloat. | 1787 # 4GB per ld process allows for some more bloat. |
| 1783 return max(1, avail_bytes / (4 * (2 ** 30))) # total / 4GB | 1788 return max(1, avail_bytes // (4 * (2 ** 30))) # total / 4GB |
| 1784 except: | 1789 except: |
| 1785 return 1 | 1790 return 1 |
| 1786 else: | 1791 else: |
| 1787 # TODO(scottmg): Implement this for other platforms. | 1792 # TODO(scottmg): Implement this for other platforms. |
| 1788 return 1 | 1793 return 1 |
| 1789 | 1794 |
| 1790 | 1795 |
| 1791 def _GetWinLinkRuleNameSuffix(embed_manifest): | 1796 def _GetWinLinkRuleNameSuffix(embed_manifest): |
| 1792 """Returns the suffix used to select an appropriate linking rule depending on | 1797 """Returns the suffix used to select an appropriate linking rule depending on |
| 1793 whether the manifest embedding is enabled.""" | 1798 whether the manifest embedding is enabled.""" |
| (...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1928 if key == 'NM.host': | 1933 if key == 'NM.host': |
| 1929 nm_host = os.path.join(build_to_root, value) | 1934 nm_host = os.path.join(build_to_root, value) |
| 1930 if key == 'READELF': | 1935 if key == 'READELF': |
| 1931 readelf = os.path.join(build_to_root, value) | 1936 readelf = os.path.join(build_to_root, value) |
| 1932 if key == 'READELF.host': | 1937 if key == 'READELF.host': |
| 1933 readelf_host = os.path.join(build_to_root, value) | 1938 readelf_host = os.path.join(build_to_root, value) |
| 1934 if key.endswith('_wrapper'): | 1939 if key.endswith('_wrapper'): |
| 1935 wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) | 1940 wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) |
| 1936 | 1941 |
| 1937 # Support wrappers from environment variables too. | 1942 # Support wrappers from environment variables too. |
| 1938 for key, value in os.environ.iteritems(): | 1943 for key, value in os.environ.items(): |
| 1939 if key.lower().endswith('_wrapper'): | 1944 if key.lower().endswith('_wrapper'): |
| 1940 key_prefix = key[:-len('_wrapper')] | 1945 key_prefix = key[:-len('_wrapper')] |
| 1941 key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) | 1946 key_prefix = re.sub(r'\.HOST$', '.host', key_prefix) |
| 1942 wrappers[key_prefix] = os.path.join(build_to_root, value) | 1947 wrappers[key_prefix] = os.path.join(build_to_root, value) |
| 1943 | 1948 |
| 1944 mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) | 1949 mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None) |
| 1945 if mac_toolchain_dir: | 1950 if mac_toolchain_dir: |
| 1946 wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir | 1951 wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir |
| 1947 | 1952 |
| 1948 if flavor == 'win': | 1953 if flavor == 'win': |
| 1949 configs = [target_dicts[qualified_target]['configurations'][config_name] | 1954 configs = [target_dicts[qualified_target]['configurations'][config_name] |
| 1950 for qualified_target in target_list] | 1955 for qualified_target in target_list] |
| 1951 shared_system_includes = None | 1956 shared_system_includes = None |
| 1952 if not generator_flags.get('ninja_use_custom_environment_files', 0): | 1957 if not generator_flags.get('ninja_use_custom_environment_files', 0): |
| 1953 shared_system_includes = \ | 1958 shared_system_includes = \ |
| 1954 gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( | 1959 gyp.msvs_emulation.ExtractSharedMSVSSystemIncludes( |
| 1955 configs, generator_flags) | 1960 configs, generator_flags) |
| 1956 cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( | 1961 cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles( |
| 1957 toplevel_build, generator_flags, shared_system_includes, OpenOutput) | 1962 toplevel_build, generator_flags, shared_system_includes, OpenOutput) |
| 1958 for arch, path in sorted(cl_paths.iteritems()): | 1963 for arch, path in sorted(cl_paths.items()): |
| 1959 if clang_cl: | 1964 if clang_cl: |
| 1960 # If we have selected clang-cl, use that instead. | 1965 # If we have selected clang-cl, use that instead. |
| 1961 path = clang_cl | 1966 path = clang_cl |
| 1962 command = CommandWithWrapper('CC', wrappers, | 1967 command = CommandWithWrapper('CC', wrappers, |
| 1963 QuoteShellArgument(path, 'win')) | 1968 QuoteShellArgument(path, 'win')) |
| 1964 if clang_cl: | 1969 if clang_cl: |
| 1965 # Use clang-cl to cross-compile for x86 or x86_64. | 1970 # Use clang-cl to cross-compile for x86 or x86_64. |
| 1966 command += (' -m32' if arch == 'x86' else ' -m64') | 1971 command += (' -m32' if arch == 'x86' else ' -m64') |
| 1967 master_ninja.variable('cl_' + arch, command) | 1972 master_ninja.variable('cl_' + arch, command) |
| 1968 | 1973 |
| (...skipping 387 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2356 gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) | 2361 gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) |
| 2357 | 2362 |
| 2358 # If build_file is a symlink, we must not follow it because there's a chance | 2363 # If build_file is a symlink, we must not follow it because there's a chance |
| 2359 # it could point to a path above toplevel_dir, and we cannot correctly deal | 2364 # it could point to a path above toplevel_dir, and we cannot correctly deal |
| 2360 # with that case at the moment. | 2365 # with that case at the moment. |
| 2361 build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, | 2366 build_file = gyp.common.RelativePath(build_file, options.toplevel_dir, |
| 2362 False) | 2367 False) |
| 2363 | 2368 |
| 2364 qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, | 2369 qualified_target_for_hash = gyp.common.QualifiedTarget(build_file, name, |
| 2365 toolset) | 2370 toolset) |
| 2371 qualified_target_for_hash = qualified_target_for_hash.encode('utf-8') |
| 2366 hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() | 2372 hash_for_rules = hashlib.md5(qualified_target_for_hash).hexdigest() |
| 2367 | 2373 |
| 2368 base_path = os.path.dirname(build_file) | 2374 base_path = os.path.dirname(build_file) |
| 2369 obj = 'obj' | 2375 obj = 'obj' |
| 2370 if toolset != 'target': | 2376 if toolset != 'target': |
| 2371 obj += '.' + toolset | 2377 obj += '.' + toolset |
| 2372 output_file = os.path.join(obj, base_path, name + '.ninja') | 2378 output_file = os.path.join(obj, base_path, name + '.ninja') |
| 2373 | 2379 |
| 2374 ninja_output = StringIO() | 2380 ninja_output = StringIO() |
| 2375 writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir, | 2381 writer = NinjaWriter(hash_for_rules, target_outputs, base_path, build_dir, |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2422 master_ninja.default(generator_flags.get('default_target', 'all')) | 2428 master_ninja.default(generator_flags.get('default_target', 'all')) |
| 2423 | 2429 |
| 2424 master_ninja_file.close() | 2430 master_ninja_file.close() |
| 2425 | 2431 |
| 2426 | 2432 |
| 2427 def PerformBuild(data, configurations, params): | 2433 def PerformBuild(data, configurations, params): |
| 2428 options = params['options'] | 2434 options = params['options'] |
| 2429 for config in configurations: | 2435 for config in configurations: |
| 2430 builddir = os.path.join(options.toplevel_dir, 'out', config) | 2436 builddir = os.path.join(options.toplevel_dir, 'out', config) |
| 2431 arguments = ['ninja', '-C', builddir] | 2437 arguments = ['ninja', '-C', builddir] |
| 2432 print 'Building [%s]: %s' % (config, arguments) | 2438 print('Building [%s]: %s' % (config, arguments)) |
| 2433 subprocess.check_call(arguments) | 2439 subprocess.check_call(arguments) |
| 2434 | 2440 |
| 2435 | 2441 |
| 2436 def CallGenerateOutputForConfig(arglist): | 2442 def CallGenerateOutputForConfig(arglist): |
| 2437 # Ignore the interrupt signal so that the parent process catches it and | 2443 # Ignore the interrupt signal so that the parent process catches it and |
| 2438 # kills all multiprocessing children. | 2444 # kills all multiprocessing children. |
| 2439 signal.signal(signal.SIGINT, signal.SIG_IGN) | 2445 signal.signal(signal.SIGINT, signal.SIG_IGN) |
| 2440 | 2446 |
| 2441 (target_list, target_dicts, data, params, config_name) = arglist | 2447 (target_list, target_dicts, data, params, config_name) = arglist |
| 2442 GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) | 2448 GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) |
| 2443 | 2449 |
| 2444 | 2450 |
| 2445 def GenerateOutput(target_list, target_dicts, data, params): | 2451 def GenerateOutput(target_list, target_dicts, data, params): |
| 2446 # Update target_dicts for iOS device builds. | 2452 # Update target_dicts for iOS device builds. |
| 2447 target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( | 2453 target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator( |
| 2448 target_dicts) | 2454 target_dicts) |
| 2449 | 2455 |
| 2450 user_config = params.get('generator_flags', {}).get('config', None) | 2456 user_config = params.get('generator_flags', {}).get('config', None) |
| 2451 if gyp.common.GetFlavor(params) == 'win': | 2457 if gyp.common.GetFlavor(params) == 'win': |
| 2452 target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) | 2458 target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) |
| 2453 target_list, target_dicts = MSVSUtil.InsertLargePdbShims( | 2459 target_list, target_dicts = MSVSUtil.InsertLargePdbShims( |
| 2454 target_list, target_dicts, generator_default_variables) | 2460 target_list, target_dicts, generator_default_variables) |
| 2455 | 2461 |
| 2456 if user_config: | 2462 if user_config: |
| 2457 GenerateOutputForConfig(target_list, target_dicts, data, params, | 2463 GenerateOutputForConfig(target_list, target_dicts, data, params, |
| 2458 user_config) | 2464 user_config) |
| 2459 else: | 2465 else: |
| 2460 config_names = target_dicts[target_list[0]]['configurations'].keys() | 2466 config_names = target_dicts[target_list[0]]['configurations'] |
| 2461 if params['parallel']: | 2467 if params['parallel']: |
| 2462 try: | 2468 try: |
| 2463 pool = multiprocessing.Pool(len(config_names)) | 2469 pool = multiprocessing.Pool(len(config_names)) |
| 2464 arglists = [] | 2470 arglists = [] |
| 2465 for config_name in config_names: | 2471 for config_name in config_names: |
| 2466 arglists.append( | 2472 arglists.append( |
| 2467 (target_list, target_dicts, data, params, config_name)) | 2473 (target_list, target_dicts, data, params, config_name)) |
| 2468 pool.map(CallGenerateOutputForConfig, arglists) | 2474 pool.map(CallGenerateOutputForConfig, arglists) |
| 2469 except KeyboardInterrupt, e: | 2475 except KeyboardInterrupt as e: |
| 2470 pool.terminate() | 2476 pool.terminate() |
| 2471 raise e | 2477 raise e |
| 2472 else: | 2478 else: |
| 2473 for config_name in config_names: | 2479 for config_name in config_names: |
| 2474 GenerateOutputForConfig(target_list, target_dicts, data, params, | 2480 GenerateOutputForConfig(target_list, target_dicts, data, params, |
| 2475 config_name) | 2481 config_name) |
| OLD | NEW |