Index: third_party/scons/scons-local/SCons/Executor.py |
=================================================================== |
--- third_party/scons/scons-local/SCons/Executor.py (revision 9094) |
+++ third_party/scons/scons-local/SCons/Executor.py (working copy) |
@@ -6,7 +6,7 @@ |
""" |
# |
-# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation |
+# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation |
# |
# Permission is hereby granted, free of charge, to any person obtaining |
# a copy of this software and associated documentation files (the |
@@ -28,15 +28,88 @@ |
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
# |
-__revision__ = "src/engine/SCons/Executor.py 3842 2008/12/20 22:59:52 scons" |
+__revision__ = "src/engine/SCons/Executor.py 3897 2009/01/13 06:45:54 scons" |
import string |
+import UserList |
from SCons.Debug import logInstanceCreation |
import SCons.Errors |
import SCons.Memoize |
+class Batch: |
+ """Remembers exact association between targets |
+ and sources of executor.""" |
+ def __init__(self, targets=[], sources=[]): |
+ self.targets = targets |
+ self.sources = sources |
+ |
+ |
+ |
+class TSList(UserList.UserList): |
+ """A class that implements $TARGETS or $SOURCES expansions by wrapping |
+ an executor Method. This class is used in the Executor.lvars() |
+ to delay creation of NodeList objects until they're needed. |
+ |
+ Note that we subclass UserList.UserList purely so that the |
+ is_Sequence() function will identify an object of this class as |
+ a list during variable expansion. We're not really using any |
+ UserList.UserList methods in practice. |
+ """ |
+ def __init__(self, func): |
+ self.func = func |
+ def __getattr__(self, attr): |
+ nl = self.func() |
+ return getattr(nl, attr) |
+ def __getitem__(self, i): |
+ nl = self.func() |
+ return nl[i] |
+ def __getslice__(self, i, j): |
+ nl = self.func() |
+ i = max(i, 0); j = max(j, 0) |
+ return nl[i:j] |
+ def __str__(self): |
+ nl = self.func() |
+ return str(nl) |
+ def __repr__(self): |
+ nl = self.func() |
+ return repr(nl) |
+ |
+class TSObject: |
+ """A class that implements $TARGET or $SOURCE expansions by wrapping |
+ an Executor method. |
+ """ |
+ def __init__(self, func): |
+ self.func = func |
+ def __getattr__(self, attr): |
+ n = self.func() |
+ return getattr(n, attr) |
+ def __str__(self): |
+ n = self.func() |
+ if n: |
+ return str(n) |
+ return '' |
+ def __repr__(self): |
+ n = self.func() |
+ if n: |
+ return repr(n) |
+ return '' |
+ |
+def rfile(node): |
+ """ |
+ A function to return the results of a Node's rfile() method, |
+ if it exists, and the Node itself otherwise (if it's a Value |
+ Node, e.g.). |
+ """ |
+ try: |
+ rfile = node.rfile |
+ except AttributeError: |
+ return node |
+ else: |
+ return rfile() |
+ |
+ |
class Executor: |
"""A class for controlling instances of executing an action. |
@@ -58,12 +131,96 @@ |
self.post_actions = [] |
self.env = env |
self.overridelist = overridelist |
- self.targets = targets |
- self.sources = sources[:] |
- self.sources_need_sorting = False |
+ if targets or sources: |
+ self.batches = [Batch(targets[:], sources[:])] |
+ else: |
+ self.batches = [] |
self.builder_kw = builder_kw |
self._memo = {} |
+ def get_lvars(self): |
+ try: |
+ return self.lvars |
+ except AttributeError: |
+ self.lvars = { |
+ 'CHANGED_SOURCES' : TSList(self._get_changed_sources), |
+ 'CHANGED_TARGETS' : TSList(self._get_changed_targets), |
+ 'SOURCE' : TSObject(self._get_source), |
+ 'SOURCES' : TSList(self._get_sources), |
+ 'TARGET' : TSObject(self._get_target), |
+ 'TARGETS' : TSList(self._get_targets), |
+ 'UNCHANGED_SOURCES' : TSList(self._get_unchanged_sources), |
+ 'UNCHANGED_TARGETS' : TSList(self._get_unchanged_targets), |
+ } |
+ return self.lvars |
+ |
+ def _get_changes(self): |
+ cs = [] |
+ ct = [] |
+ us = [] |
+ ut = [] |
+ for b in self.batches: |
+ if b.targets[0].changed(): |
+ cs.extend(map(rfile, b.sources)) |
+ ct.extend(b.targets) |
+ else: |
+ us.extend(map(rfile, b.sources)) |
+ ut.extend(b.targets) |
+ self._changed_sources_list = SCons.Util.NodeList(cs) |
+ self._changed_targets_list = SCons.Util.NodeList(ct) |
+ self._unchanged_sources_list = SCons.Util.NodeList(us) |
+ self._unchanged_targets_list = SCons.Util.NodeList(ut) |
+ |
+ def _get_changed_sources(self, *args, **kw): |
+ try: |
+ return self._changed_sources_list |
+ except AttributeError: |
+ self._get_changes() |
+ return self._changed_sources_list |
+ |
+ def _get_changed_targets(self, *args, **kw): |
+ try: |
+ return self._changed_targets_list |
+ except AttributeError: |
+ self._get_changes() |
+ return self._changed_targets_list |
+ |
+ def _get_source(self, *args, **kw): |
+ #return SCons.Util.NodeList([rfile(self.batches[0].sources[0]).get_subst_proxy()]) |
+ return rfile(self.batches[0].sources[0]).get_subst_proxy() |
+ |
+ def _get_sources(self, *args, **kw): |
+ return SCons.Util.NodeList(map(lambda n: rfile(n).get_subst_proxy(), self.get_all_sources())) |
+ |
+ def _get_target(self, *args, **kw): |
+ #return SCons.Util.NodeList([self.batches[0].targets[0].get_subst_proxy()]) |
+ return self.batches[0].targets[0].get_subst_proxy() |
+ |
+ def _get_targets(self, *args, **kw): |
+ return SCons.Util.NodeList(map(lambda n: n.get_subst_proxy(), self.get_all_targets())) |
+ |
+ def _get_unchanged_sources(self, *args, **kw): |
+ try: |
+ return self._unchanged_sources_list |
+ except AttributeError: |
+ self._get_changes() |
+ return self._unchanged_sources_list |
+ |
+ def _get_unchanged_targets(self, *args, **kw): |
+ try: |
+ return self._unchanged_targets_list |
+ except AttributeError: |
+ self._get_changes() |
+ return self._unchanged_targets_list |
+ |
+ def get_action_targets(self): |
+ if not self.action_list: |
+ return [] |
+ targets_string = self.action_list[0].get_targets(self.env, self) |
+ if targets_string[0] == '$': |
+ targets_string = targets_string[1:] |
+ return self.get_lvars()[targets_string] |
+ |
def set_action_list(self, action): |
import SCons.Util |
if not SCons.Util.is_List(action): |
@@ -76,6 +233,58 @@ |
def get_action_list(self): |
return self.pre_actions + self.action_list + self.post_actions |
+ def get_all_targets(self): |
+ """Returns all targets for all batches of this Executor.""" |
+ result = [] |
+ for batch in self.batches: |
+ # TODO(1.5): remove the list() cast |
+ result.extend(list(batch.targets)) |
+ return result |
+ |
+ def get_all_sources(self): |
+ """Returns all sources for all batches of this Executor.""" |
+ result = [] |
+ for batch in self.batches: |
+ # TODO(1.5): remove the list() cast |
+ result.extend(list(batch.sources)) |
+ return result |
+ |
+ def get_all_children(self): |
+ """Returns all unique children (dependencies) for all batches |
+ of this Executor. |
+ |
+ The Taskmaster can recognize when it's already evaluated a |
+ Node, so we don't have to make this list unique for its intended |
+ canonical use case, but we expect there to be a lot of redundancy |
+ (long lists of batched .cc files #including the same .h files |
+ over and over), so removing the duplicates once up front should |
+ save the Taskmaster a lot of work. |
+ """ |
+ result = SCons.Util.UniqueList([]) |
+ for target in self.get_all_targets(): |
+ result.extend(target.children()) |
+ return result |
+ |
+ def get_all_prerequisites(self): |
+ """Returns all unique (order-only) prerequisites for all batches |
+ of this Executor. |
+ """ |
+ result = SCons.Util.UniqueList([]) |
+ for target in self.get_all_targets(): |
+ # TODO(1.5): remove the list() cast |
+ result.extend(list(target.prerequisites)) |
+ return result |
+ |
+ def get_action_side_effects(self): |
+ |
+ """Returns all side effects for all batches of this |
+ Executor used by the underlying Action. |
+ """ |
+ result = SCons.Util.UniqueList([]) |
+ for target in self.get_action_targets(): |
+ result.extend(target.side_effects) |
+ return result |
+ |
memoizer_counters.append(SCons.Memoize.CountValue('get_build_env')) |
def get_build_env(self): |
@@ -109,14 +318,17 @@ |
""" |
env = self.get_build_env() |
try: |
- cwd = self.targets[0].cwd |
+ cwd = self.batches[0].targets[0].cwd |
except (IndexError, AttributeError): |
cwd = None |
- return scanner.path(env, cwd, self.targets, self.get_sources()) |
+ return scanner.path(env, cwd, |
+ self.get_all_targets(), |
+ self.get_all_sources()) |
def get_kw(self, kw={}): |
result = self.builder_kw.copy() |
result.update(kw) |
+ result['executor'] = self |
return result |
def do_nothing(self, target, kw): |
@@ -128,7 +340,9 @@ |
kw = self.get_kw(kw) |
status = 0 |
for act in self.get_action_list(): |
- status = apply(act, (self.targets, self.get_sources(), env), kw) |
+ #args = (self.get_all_targets(), self.get_all_sources(), env) |
+ args = ([], [], env) |
+ status = apply(act, args, kw) |
if isinstance(status, SCons.Errors.BuildError): |
status.executor = self |
raise status |
@@ -136,7 +350,7 @@ |
msg = "Error %s" % status |
raise SCons.Errors.BuildError( |
errstr=msg, |
- node=self.targets, |
+ node=self.batches[0].targets, |
executor=self, |
action=act) |
return status |
@@ -155,24 +369,32 @@ |
"""Add source files to this Executor's list. This is necessary |
for "multi" Builders that can be called repeatedly to build up |
a source file list for a given target.""" |
- self.sources.extend(sources) |
- self.sources_need_sorting = True |
+ # TODO(batch): extend to multiple batches |
+ assert (len(self.batches) == 1) |
+ # TODO(batch): remove duplicates? |
+ sources = filter(lambda x, s=self.batches[0].sources: x not in s, sources) |
+ self.batches[0].sources.extend(sources) |
def get_sources(self): |
- if self.sources_need_sorting: |
- self.sources = SCons.Util.uniquer_hashables(self.sources) |
- self.sources_need_sorting = False |
- return self.sources |
+ return self.batches[0].sources |
+ def add_batch(self, targets, sources): |
+ """Add pair of associated target and source to this Executor's list. |
+ This is necessary for "batch" Builders that can be called repeatedly |
+ to build up a list of matching target and source files that will be |
+ used in order to update multiple target files at once from multiple |
+ corresponding source files, for tools like MSVC that support it.""" |
+ self.batches.append(Batch(targets, sources)) |
+ |
def prepare(self): |
""" |
Preparatory checks for whether this Executor can go ahead |
and (try to) build its targets. |
""" |
- for s in self.get_sources(): |
+ for s in self.get_all_sources(): |
if s.missing(): |
msg = "Source `%s' not found, needed by target `%s'." |
- raise SCons.Errors.StopError, msg % (s, self.targets[0]) |
+ raise SCons.Errors.StopError, msg % (s, self.batches[0].targets[0]) |
def add_pre_action(self, action): |
self.pre_actions.append(action) |
@@ -184,7 +406,7 @@ |
def my_str(self): |
env = self.get_build_env() |
- get = lambda action, t=self.targets, s=self.get_sources(), e=env: \ |
+ get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ |
action.genstring(t, s, e) |
return string.join(map(get, self.get_action_list()), "\n") |
@@ -209,7 +431,7 @@ |
except KeyError: |
pass |
env = self.get_build_env() |
- get = lambda action, t=self.targets, s=self.get_sources(), e=env: \ |
+ get = lambda action, t=self.get_all_targets(), s=self.get_all_sources(), e=env: \ |
action.get_contents(t, s, e) |
result = string.join(map(get, self.get_action_list()), "") |
self._memo['get_contents'] = result |
@@ -223,11 +445,13 @@ |
return 0 |
def scan_targets(self, scanner): |
- self.scan(scanner, self.targets) |
+ # TODO(batch): scan by batches |
+ self.scan(scanner, self.get_all_targets()) |
def scan_sources(self, scanner): |
- if self.sources: |
- self.scan(scanner, self.get_sources()) |
+ # TODO(batch): scan by batches |
+ if self.batches[0].sources: |
+ self.scan(scanner, self.get_all_sources()) |
def scan(self, scanner, node_list): |
"""Scan a list of this Executor's files (targets or sources) for |
@@ -237,6 +461,7 @@ |
""" |
env = self.get_build_env() |
+ # TODO(batch): scan by batches) |
deps = [] |
if scanner: |
for node in node_list: |
@@ -261,16 +486,16 @@ |
deps.extend(self.get_implicit_deps()) |
- for tgt in self.targets: |
+ for tgt in self.get_all_targets(): |
tgt.add_to_implicit(deps) |
- def _get_unignored_sources_key(self, ignore=()): |
- return tuple(ignore) |
+ def _get_unignored_sources_key(self, node, ignore=()): |
+ return (node,) + tuple(ignore) |
memoizer_counters.append(SCons.Memoize.CountDict('get_unignored_sources', _get_unignored_sources_key)) |
- def get_unignored_sources(self, ignore=()): |
- ignore = tuple(ignore) |
+ def get_unignored_sources(self, node, ignore=()): |
+ key = (node,) + tuple(ignore) |
try: |
memo_dict = self._memo['get_unignored_sources'] |
except KeyError: |
@@ -278,56 +503,56 @@ |
self._memo['get_unignored_sources'] = memo_dict |
else: |
try: |
- return memo_dict[ignore] |
+ return memo_dict[key] |
except KeyError: |
pass |
- sourcelist = self.get_sources() |
+ if node: |
+ # TODO: better way to do this (it's a linear search, |
+ # but it may not be critical path)? |
+ sourcelist = [] |
+ for b in self.batches: |
+ if node in b.targets: |
+ sourcelist = b.sources |
+ break |
+ else: |
+ sourcelist = self.get_all_sources() |
if ignore: |
idict = {} |
for i in ignore: |
idict[i] = 1 |
sourcelist = filter(lambda s, i=idict: not i.has_key(s), sourcelist) |
- memo_dict[ignore] = sourcelist |
+ memo_dict[key] = sourcelist |
return sourcelist |
- def _process_sources_key(self, func, ignore=()): |
- return (func, tuple(ignore)) |
- |
- memoizer_counters.append(SCons.Memoize.CountDict('process_sources', _process_sources_key)) |
- |
- def process_sources(self, func, ignore=()): |
- memo_key = (func, tuple(ignore)) |
- try: |
- memo_dict = self._memo['process_sources'] |
- except KeyError: |
- memo_dict = {} |
- self._memo['process_sources'] = memo_dict |
- else: |
- try: |
- return memo_dict[memo_key] |
- except KeyError: |
- pass |
- |
- result = map(func, self.get_unignored_sources(ignore)) |
- |
- memo_dict[memo_key] = result |
- |
- return result |
- |
def get_implicit_deps(self): |
"""Return the executor's implicit dependencies, i.e. the nodes of |
the commands to be executed.""" |
result = [] |
build_env = self.get_build_env() |
for act in self.get_action_list(): |
- result.extend(act.get_implicit_deps(self.targets, self.get_sources(), build_env)) |
+ deps = act.get_implicit_deps(self.get_all_targets(), |
+ self.get_all_sources(), |
+ build_env) |
+ result.extend(deps) |
return result |
+ |
+ |
+_batch_executors = {} |
+ |
+def GetBatchExecutor(key): |
+ return _batch_executors[key] |
+ |
+def AddBatchExecutor(key, executor): |
+ assert not _batch_executors.has_key(key) |
+ _batch_executors[key] = executor |
+ |
nullenv = None |
+ |
def get_NullEnvironment(): |
"""Use singleton pattern for Null Environments.""" |
global nullenv |
@@ -354,7 +579,7 @@ |
""" |
def __init__(self, *args, **kw): |
if __debug__: logInstanceCreation(self, 'Executor.Null') |
- self.targets = kw['targets'] |
+ self.batches = [Batch(kw['targets'][:], [])] |
def get_build_env(self): |
return get_NullEnvironment() |
def get_build_scanner_path(self): |
@@ -365,17 +590,30 @@ |
pass |
def get_unignored_sources(self, *args, **kw): |
return tuple(()) |
+ def get_action_targets(self): |
+ return [] |
def get_action_list(self): |
return [] |
+ def get_all_targets(self): |
+ return self.batches[0].targets |
+ def get_all_sources(self): |
+ return self.batches[0].targets[0].sources |
+ def get_all_children(self): |
+ return self.get_all_sources() |
+ def get_all_prerequisites(self): |
+ return [] |
+ def get_action_side_effects(self): |
+ return [] |
def __call__(self, *args, **kw): |
return 0 |
def get_contents(self): |
return '' |
- |
def _morph(self): |
"""Morph this Null executor to a real Executor object.""" |
+ batches = self.batches |
self.__class__ = Executor |
- self.__init__([], targets=self.targets) |
+ self.__init__([]) |
+ self.batches = batches |
# The following methods require morphing this Null Executor to a |
# real Executor object. |