OLD | NEW |
1 """scons.Node.FS | 1 """scons.Node.FS |
2 | 2 |
3 File system nodes. | 3 File system nodes. |
4 | 4 |
5 These Nodes represent the canonical external objects that people think | 5 These Nodes represent the canonical external objects that people think |
6 of when they think of building software: files and directories. | 6 of when they think of building software: files and directories. |
7 | 7 |
8 This holds a "default_fs" variable that should be initialized with an FS | 8 This holds a "default_fs" variable that should be initialized with an FS |
9 that can be used by scripts or modules looking for the canonical default. | 9 that can be used by scripts or modules looking for the canonical default. |
10 | 10 |
(...skipping 15 matching lines...) Expand all Loading... |
26 # | 26 # |
27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY | 27 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY |
28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE | 28 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE |
29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | 29 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE | 30 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE |
31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION | 31 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION | 32 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION |
33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | 33 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
34 # | 34 # |
35 | 35 |
36 __revision__ = "src/engine/SCons/Node/FS.py 3603 2008/10/10 05:46:45 scons" | 36 __revision__ = "src/engine/SCons/Node/FS.py 3842 2008/12/20 22:59:52 scons" |
37 | 37 |
38 import fnmatch | 38 import fnmatch |
39 from itertools import izip | 39 from itertools import izip |
40 import os | 40 import os |
41 import os.path | 41 import os.path |
42 import re | 42 import re |
43 import shutil | 43 import shutil |
44 import stat | 44 import stat |
45 import string | 45 import string |
46 import sys | 46 import sys |
47 import time | 47 import time |
48 import cStringIO | 48 import cStringIO |
49 | 49 |
50 import SCons.Action | 50 import SCons.Action |
51 from SCons.Debug import logInstanceCreation | 51 from SCons.Debug import logInstanceCreation |
52 import SCons.Errors | 52 import SCons.Errors |
53 import SCons.Memoize | 53 import SCons.Memoize |
54 import SCons.Node | 54 import SCons.Node |
55 import SCons.Node.Alias | 55 import SCons.Node.Alias |
56 import SCons.Subst | 56 import SCons.Subst |
57 import SCons.Util | 57 import SCons.Util |
58 import SCons.Warnings | 58 import SCons.Warnings |
59 | 59 |
60 from SCons.Debug import Trace | 60 from SCons.Debug import Trace |
61 | 61 |
62 do_store_info = True | 62 do_store_info = True |
63 | 63 |
| 64 |
| 65 class EntryProxyAttributeError(AttributeError): |
| 66 """ |
| 67 An AttributeError subclass for recording and displaying the name |
| 68 of the underlying Entry involved in an AttributeError exception. |
| 69 """ |
| 70 def __init__(self, entry_proxy, attribute): |
| 71 AttributeError.__init__(self) |
| 72 self.entry_proxy = entry_proxy |
| 73 self.attribute = attribute |
| 74 def __str__(self): |
| 75 entry = self.entry_proxy.get() |
| 76 fmt = "%s instance %s has no attribute %s" |
| 77 return fmt % (entry.__class__.__name__, |
| 78 repr(entry.name), |
| 79 repr(self.attribute)) |
| 80 |
64 # The max_drift value: by default, use a cached signature value for | 81 # The max_drift value: by default, use a cached signature value for |
65 # any file that's been untouched for more than two days. | 82 # any file that's been untouched for more than two days. |
66 default_max_drift = 2*24*60*60 | 83 default_max_drift = 2*24*60*60 |
67 | 84 |
68 # | 85 # |
69 # We stringify these file system Nodes a lot. Turning a file system Node | 86 # We stringify these file system Nodes a lot. Turning a file system Node |
70 # into a string is non-trivial, because the final string representation | 87 # into a string is non-trivial, because the final string representation |
71 # can depend on a lot of factors: whether it's a derived target or not, | 88 # can depend on a lot of factors: whether it's a derived target or not, |
72 # whether it's linked to a repository or source directory, and whether | 89 # whether it's linked to a repository or source directory, and whether |
73 # there's duplication going on. The normal technique for optimizing | 90 # there's duplication going on. The normal technique for optimizing |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
218 except (IOError, OSError): | 235 except (IOError, OSError): |
219 # An OSError indicates something happened like a permissions | 236 # An OSError indicates something happened like a permissions |
220 # problem or an attempt to symlink across file-system | 237 # problem or an attempt to symlink across file-system |
221 # boundaries. An IOError indicates something like the file | 238 # boundaries. An IOError indicates something like the file |
222 # not existing. In either case, keeping trying additional | 239 # not existing. In either case, keeping trying additional |
223 # functions in the list and only raise an error if the last | 240 # functions in the list and only raise an error if the last |
224 # one failed. | 241 # one failed. |
225 if func == Link_Funcs[-1]: | 242 if func == Link_Funcs[-1]: |
226 # exception of the last link method (copy) are fatal | 243 # exception of the last link method (copy) are fatal |
227 raise | 244 raise |
228 else: | |
229 pass | |
230 return 0 | 245 return 0 |
231 | 246 |
232 Link = SCons.Action.Action(LinkFunc, None) | 247 Link = SCons.Action.Action(LinkFunc, None) |
233 def LocalString(target, source, env): | 248 def LocalString(target, source, env): |
234 return 'Local copy of %s from %s' % (target[0], source[0]) | 249 return 'Local copy of %s from %s' % (target[0], source[0]) |
235 | 250 |
236 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) | 251 LocalCopy = SCons.Action.Action(LinkFunc, LocalString) |
237 | 252 |
238 def UnlinkFunc(target, source, env): | 253 def UnlinkFunc(target, source, env): |
239 t = target[0] | 254 t = target[0] |
(...skipping 238 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
478 } | 493 } |
479 | 494 |
480 def __getattr__(self, name): | 495 def __getattr__(self, name): |
481 # This is how we implement the "special" attributes | 496 # This is how we implement the "special" attributes |
482 # such as base, posix, srcdir, etc. | 497 # such as base, posix, srcdir, etc. |
483 try: | 498 try: |
484 attr_function = self.dictSpecialAttrs[name] | 499 attr_function = self.dictSpecialAttrs[name] |
485 except KeyError: | 500 except KeyError: |
486 try: | 501 try: |
487 attr = SCons.Util.Proxy.__getattr__(self, name) | 502 attr = SCons.Util.Proxy.__getattr__(self, name) |
488 except AttributeError: | 503 except AttributeError, e: |
489 entry = self.get() | 504 # Raise our own AttributeError subclass with an |
490 classname = string.split(str(entry.__class__), '.')[-1] | 505 # overridden __str__() method that identifies the |
491 if classname[-2:] == "'>": | 506 # name of the entry that caused the exception. |
492 # new-style classes report their name as: | 507 raise EntryProxyAttributeError(self, name) |
493 # "<class 'something'>" | |
494 # instead of the classic classes: | |
495 # "something" | |
496 classname = classname[:-2] | |
497 raise AttributeError, "%s instance '%s' has no attribute '%s'" %
(classname, entry.name, name) | |
498 return attr | 508 return attr |
499 else: | 509 else: |
500 return attr_function(self) | 510 return attr_function(self) |
501 | 511 |
502 class Base(SCons.Node.Node): | 512 class Base(SCons.Node.Node): |
503 """A generic class for file system entries. This class is for | 513 """A generic class for file system entries. This class is for |
504 when we don't know yet whether the entry being looked up is a file | 514 when we don't know yet whether the entry being looked up is a file |
505 or a directory. Instances of this class can morph into either | 515 or a directory. Instances of this class can morph into either |
506 Dir or File objects by a later, more precise lookup. | 516 Dir or File objects by a later, more precise lookup. |
507 | 517 |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
585 pass | 595 pass |
586 result = self._get_str() | 596 result = self._get_str() |
587 self._memo['_save_str'] = result | 597 self._memo['_save_str'] = result |
588 return result | 598 return result |
589 | 599 |
590 def _get_str(self): | 600 def _get_str(self): |
591 global Save_Strings | 601 global Save_Strings |
592 if self.duplicate or self.is_derived(): | 602 if self.duplicate or self.is_derived(): |
593 return self.get_path() | 603 return self.get_path() |
594 srcnode = self.srcnode() | 604 srcnode = self.srcnode() |
595 if srcnode.stat() is None and not self.stat() is None: | 605 if srcnode.stat() is None and self.stat() is not None: |
596 result = self.get_path() | 606 result = self.get_path() |
597 else: | 607 else: |
598 result = srcnode.get_path() | 608 result = srcnode.get_path() |
599 if not Save_Strings: | 609 if not Save_Strings: |
600 # We're not at the point where we're saving the string string | 610 # We're not at the point where we're saving the string string |
601 # representations of FS Nodes (because we haven't finished | 611 # representations of FS Nodes (because we haven't finished |
602 # reading the SConscript files and need to have str() return | 612 # reading the SConscript files and need to have str() return |
603 # things relative to them). That also means we can't yet | 613 # things relative to them). That also means we can't yet |
604 # cache values returned (or not returned) by stat(), since | 614 # cache values returned (or not returned) by stat(), since |
605 # Python code in the SConscript files might still create | 615 # Python code in the SConscript files might still create |
606 # or otherwise affect the on-disk file. So get rid of the | 616 # or otherwise affect the on-disk file. So get rid of the |
607 # values that the underlying stat() method saved. | 617 # values that the underlying stat() method saved. |
608 try: del self._memo['stat'] | 618 try: del self._memo['stat'] |
609 except KeyError: pass | 619 except KeyError: pass |
610 if not self is srcnode: | 620 if self is not srcnode: |
611 try: del srcnode._memo['stat'] | 621 try: del srcnode._memo['stat'] |
612 except KeyError: pass | 622 except KeyError: pass |
613 return result | 623 return result |
614 | 624 |
615 rstr = __str__ | 625 rstr = __str__ |
616 | 626 |
617 memoizer_counters.append(SCons.Memoize.CountValue('stat')) | 627 memoizer_counters.append(SCons.Memoize.CountValue('stat')) |
618 | 628 |
619 def stat(self): | 629 def stat(self): |
620 try: return self._memo['stat'] | 630 try: return self._memo['stat'] |
621 except KeyError: pass | 631 except KeyError: pass |
622 try: result = self.fs.stat(self.abspath) | 632 try: result = self.fs.stat(self.abspath) |
623 except os.error: result = None | 633 except os.error: result = None |
624 self._memo['stat'] = result | 634 self._memo['stat'] = result |
625 return result | 635 return result |
626 | 636 |
627 def exists(self): | 637 def exists(self): |
628 return not self.stat() is None | 638 return self.stat() is not None |
629 | 639 |
630 def rexists(self): | 640 def rexists(self): |
631 return self.rfile().exists() | 641 return self.rfile().exists() |
632 | 642 |
633 def getmtime(self): | 643 def getmtime(self): |
634 st = self.stat() | 644 st = self.stat() |
635 if st: return st[stat.ST_MTIME] | 645 if st: return st[stat.ST_MTIME] |
636 else: return None | 646 else: return None |
637 | 647 |
638 def getsize(self): | 648 def getsize(self): |
639 st = self.stat() | 649 st = self.stat() |
640 if st: return st[stat.ST_SIZE] | 650 if st: return st[stat.ST_SIZE] |
641 else: return None | 651 else: return None |
642 | 652 |
643 def isdir(self): | 653 def isdir(self): |
644 st = self.stat() | 654 st = self.stat() |
645 return not st is None and stat.S_ISDIR(st[stat.ST_MODE]) | 655 return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) |
646 | 656 |
647 def isfile(self): | 657 def isfile(self): |
648 st = self.stat() | 658 st = self.stat() |
649 return not st is None and stat.S_ISREG(st[stat.ST_MODE]) | 659 return st is not None and stat.S_ISREG(st[stat.ST_MODE]) |
650 | 660 |
651 if hasattr(os, 'symlink'): | 661 if hasattr(os, 'symlink'): |
652 def islink(self): | 662 def islink(self): |
653 try: st = self.fs.lstat(self.abspath) | 663 try: st = self.fs.lstat(self.abspath) |
654 except os.error: return 0 | 664 except os.error: return 0 |
655 return stat.S_ISLNK(st[stat.ST_MODE]) | 665 return stat.S_ISLNK(st[stat.ST_MODE]) |
656 else: | 666 else: |
657 def islink(self): | 667 def islink(self): |
658 return 0 # no symlinks | 668 return 0 # no symlinks |
659 | 669 |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
879 # string so calls to get_contents() in emitters and the | 889 # string so calls to get_contents() in emitters and the |
880 # like (e.g. in qt.py) don't have to disambiguate by hand | 890 # like (e.g. in qt.py) don't have to disambiguate by hand |
881 # or catch the exception. | 891 # or catch the exception. |
882 return '' | 892 return '' |
883 else: | 893 else: |
884 return self.get_contents() | 894 return self.get_contents() |
885 | 895 |
886 def must_be_same(self, klass): | 896 def must_be_same(self, klass): |
887 """Called to make sure a Node is a Dir. Since we're an | 897 """Called to make sure a Node is a Dir. Since we're an |
888 Entry, we can morph into one.""" | 898 Entry, we can morph into one.""" |
889 if not self.__class__ is klass: | 899 if self.__class__ is not klass: |
890 self.__class__ = klass | 900 self.__class__ = klass |
891 self._morph() | 901 self._morph() |
892 self.clear() | 902 self.clear() |
893 | 903 |
894 # The following methods can get called before the Taskmaster has | 904 # The following methods can get called before the Taskmaster has |
895 # had a chance to call disambiguate() directly to see if this Entry | 905 # had a chance to call disambiguate() directly to see if this Entry |
896 # should really be a Dir or a File. We therefore use these to call | 906 # should really be a Dir or a File. We therefore use these to call |
897 # disambiguate() transparently (from our caller's point of view). | 907 # disambiguate() transparently (from our caller's point of view). |
898 # | 908 # |
899 # Right now, this minimal set of methods has been derived by just | 909 # Right now, this minimal set of methods has been derived by just |
900 # looking at some of the methods that will obviously be called early | 910 # looking at some of the methods that will obviously be called early |
901 # in any of the various Taskmasters' calling sequences, and then | 911 # in any of the various Taskmasters' calling sequences, and then |
902 # empirically figuring out which additional methods are necessary | 912 # empirically figuring out which additional methods are necessary |
903 # to make various tests pass. | 913 # to make various tests pass. |
904 | 914 |
905 def exists(self): | 915 def exists(self): |
906 """Return if the Entry exists. Check the file system to see | 916 """Return if the Entry exists. Check the file system to see |
907 what we should turn into first. Assume a file if there's no | 917 what we should turn into first. Assume a file if there's no |
908 directory.""" | 918 directory.""" |
909 return self.disambiguate().exists() | 919 return self.disambiguate().exists() |
910 | 920 |
911 def rel_path(self, other): | 921 def rel_path(self, other): |
912 d = self.disambiguate() | 922 d = self.disambiguate() |
913 if d.__class__ == Entry: | 923 if d.__class__ is Entry: |
914 raise "rel_path() could not disambiguate File/Dir" | 924 raise "rel_path() could not disambiguate File/Dir" |
915 return d.rel_path(other) | 925 return d.rel_path(other) |
916 | 926 |
917 def new_ninfo(self): | 927 def new_ninfo(self): |
918 return self.disambiguate().new_ninfo() | 928 return self.disambiguate().new_ninfo() |
919 | 929 |
920 def changed_since_last_build(self, target, prev_ni): | 930 def changed_since_last_build(self, target, prev_ni): |
921 return self.disambiguate().changed_since_last_build(target, prev_ni) | 931 return self.disambiguate().changed_since_last_build(target, prev_ni) |
922 | 932 |
923 def _glob1(self, pattern, ondisk=True, source=False, strings=False): | 933 def _glob1(self, pattern, ondisk=True, source=False, strings=False): |
(...skipping 134 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1058 def getcwd(self): | 1068 def getcwd(self): |
1059 return self._cwd | 1069 return self._cwd |
1060 | 1070 |
1061 def chdir(self, dir, change_os_dir=0): | 1071 def chdir(self, dir, change_os_dir=0): |
1062 """Change the current working directory for lookups. | 1072 """Change the current working directory for lookups. |
1063 If change_os_dir is true, we will also change the "real" cwd | 1073 If change_os_dir is true, we will also change the "real" cwd |
1064 to match. | 1074 to match. |
1065 """ | 1075 """ |
1066 curr=self._cwd | 1076 curr=self._cwd |
1067 try: | 1077 try: |
1068 if not dir is None: | 1078 if dir is not None: |
1069 self._cwd = dir | 1079 self._cwd = dir |
1070 if change_os_dir: | 1080 if change_os_dir: |
1071 os.chdir(dir.abspath) | 1081 os.chdir(dir.abspath) |
1072 except OSError: | 1082 except OSError: |
1073 self._cwd = curr | 1083 self._cwd = curr |
1074 raise | 1084 raise |
1075 | 1085 |
1076 def get_root(self, drive): | 1086 def get_root(self, drive): |
1077 """ | 1087 """ |
1078 Returns the root directory for the specified drive, creating | 1088 Returns the root directory for the specified drive, creating |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1164 else: | 1174 else: |
1165 if not directory: | 1175 if not directory: |
1166 directory = self._cwd | 1176 directory = self._cwd |
1167 root = directory.root | 1177 root = directory.root |
1168 | 1178 |
1169 if os.sep != '/': | 1179 if os.sep != '/': |
1170 p = string.replace(p, os.sep, '/') | 1180 p = string.replace(p, os.sep, '/') |
1171 return root._lookup_abs(p, fsclass, create) | 1181 return root._lookup_abs(p, fsclass, create) |
1172 | 1182 |
1173 def Entry(self, name, directory = None, create = 1): | 1183 def Entry(self, name, directory = None, create = 1): |
1174 """Lookup or create a generic Entry node with the specified name. | 1184 """Look up or create a generic Entry node with the specified name. |
1175 If the name is a relative path (begins with ./, ../, or a file | 1185 If the name is a relative path (begins with ./, ../, or a file |
1176 name), then it is looked up relative to the supplied directory | 1186 name), then it is looked up relative to the supplied directory |
1177 node, or to the top level directory of the FS (supplied at | 1187 node, or to the top level directory of the FS (supplied at |
1178 construction time) if no directory is supplied. | 1188 construction time) if no directory is supplied. |
1179 """ | 1189 """ |
1180 return self._lookup(name, directory, Entry, create) | 1190 return self._lookup(name, directory, Entry, create) |
1181 | 1191 |
1182 def File(self, name, directory = None, create = 1): | 1192 def File(self, name, directory = None, create = 1): |
1183 """Lookup or create a File node with the specified name. If | 1193 """Look up or create a File node with the specified name. If |
1184 the name is a relative path (begins with ./, ../, or a file name), | 1194 the name is a relative path (begins with ./, ../, or a file name), |
1185 then it is looked up relative to the supplied directory node, | 1195 then it is looked up relative to the supplied directory node, |
1186 or to the top level directory of the FS (supplied at construction | 1196 or to the top level directory of the FS (supplied at construction |
1187 time) if no directory is supplied. | 1197 time) if no directory is supplied. |
1188 | 1198 |
1189 This method will raise TypeError if a directory is found at the | 1199 This method will raise TypeError if a directory is found at the |
1190 specified path. | 1200 specified path. |
1191 """ | 1201 """ |
1192 return self._lookup(name, directory, File, create) | 1202 return self._lookup(name, directory, File, create) |
1193 | 1203 |
1194 def Dir(self, name, directory = None, create = True): | 1204 def Dir(self, name, directory = None, create = True): |
1195 """Lookup or create a Dir node with the specified name. If | 1205 """Look up or create a Dir node with the specified name. If |
1196 the name is a relative path (begins with ./, ../, or a file name), | 1206 the name is a relative path (begins with ./, ../, or a file name), |
1197 then it is looked up relative to the supplied directory node, | 1207 then it is looked up relative to the supplied directory node, |
1198 or to the top level directory of the FS (supplied at construction | 1208 or to the top level directory of the FS (supplied at construction |
1199 time) if no directory is supplied. | 1209 time) if no directory is supplied. |
1200 | 1210 |
1201 This method will raise TypeError if a normal file is found at the | 1211 This method will raise TypeError if a normal file is found at the |
1202 specified path. | 1212 specified path. |
1203 """ | 1213 """ |
1204 return self._lookup(name, directory, Dir, create) | 1214 return self._lookup(name, directory, Dir, create) |
1205 | 1215 |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1341 for node in self.entries.values(): | 1351 for node in self.entries.values(): |
1342 if node != self.dir: | 1352 if node != self.dir: |
1343 if node != self and isinstance(node, Dir): | 1353 if node != self and isinstance(node, Dir): |
1344 node.__clearRepositoryCache(duplicate) | 1354 node.__clearRepositoryCache(duplicate) |
1345 else: | 1355 else: |
1346 node.clear() | 1356 node.clear() |
1347 try: | 1357 try: |
1348 del node._srcreps | 1358 del node._srcreps |
1349 except AttributeError: | 1359 except AttributeError: |
1350 pass | 1360 pass |
1351 if duplicate != None: | 1361 if duplicate is not None: |
1352 node.duplicate=duplicate | 1362 node.duplicate=duplicate |
1353 | 1363 |
1354 def __resetDuplicate(self, node): | 1364 def __resetDuplicate(self, node): |
1355 if node != self: | 1365 if node != self: |
1356 node.duplicate = node.get_dir().duplicate | 1366 node.duplicate = node.get_dir().duplicate |
1357 | 1367 |
1358 def Entry(self, name): | 1368 def Entry(self, name): |
1359 """ | 1369 """ |
1360 Looks up or creates an entry node named 'name' relative to | 1370 Looks up or creates an entry node named 'name' relative to |
1361 this directory. | 1371 this directory. |
1362 """ | 1372 """ |
1363 return self.fs.Entry(name, self) | 1373 return self.fs.Entry(name, self) |
1364 | 1374 |
1365 def Dir(self, name, create=True): | 1375 def Dir(self, name, create=True): |
1366 """ | 1376 """ |
1367 Looks up or creates a directory node named 'name' relative to | 1377 Looks up or creates a directory node named 'name' relative to |
1368 this directory. | 1378 this directory. |
1369 """ | 1379 """ |
1370 dir = self.fs.Dir(name, self, create) | 1380 return self.fs.Dir(name, self, create) |
1371 return dir | |
1372 | 1381 |
1373 def File(self, name): | 1382 def File(self, name): |
1374 """ | 1383 """ |
1375 Looks up or creates a file node named 'name' relative to | 1384 Looks up or creates a file node named 'name' relative to |
1376 this directory. | 1385 this directory. |
1377 """ | 1386 """ |
1378 return self.fs.File(name, self) | 1387 return self.fs.File(name, self) |
1379 | 1388 |
1380 def _lookup_rel(self, name, klass, create=1): | 1389 def _lookup_rel(self, name, klass, create=1): |
1381 """ | 1390 """ |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1444 | 1453 |
1445 def _rel_path_key(self, other): | 1454 def _rel_path_key(self, other): |
1446 return str(other) | 1455 return str(other) |
1447 | 1456 |
1448 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) | 1457 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) |
1449 | 1458 |
1450 def rel_path(self, other): | 1459 def rel_path(self, other): |
1451 """Return a path to "other" relative to this directory. | 1460 """Return a path to "other" relative to this directory. |
1452 """ | 1461 """ |
1453 | 1462 |
1454 » # This complicated and expensive method, which constructs relative | 1463 # This complicated and expensive method, which constructs relative |
1455 » # paths between arbitrary Node.FS objects, is no longer used | 1464 # paths between arbitrary Node.FS objects, is no longer used |
1456 » # by SCons itself. It was introduced to store dependency paths | 1465 # by SCons itself. It was introduced to store dependency paths |
1457 » # in .sconsign files relative to the target, but that ended up | 1466 # in .sconsign files relative to the target, but that ended up |
1458 » # being significantly inefficient. | 1467 # being significantly inefficient. |
1459 # | 1468 # |
1460 » # We're continuing to support the method because some SConstruct | 1469 # We're continuing to support the method because some SConstruct |
1461 » # files out there started using it when it was available, and | 1470 # files out there started using it when it was available, and |
1462 » # we're all about backwards compatibility.. | 1471 # we're all about backwards compatibility.. |
1463 | 1472 |
1464 try: | 1473 try: |
1465 memo_dict = self._memo['rel_path'] | 1474 memo_dict = self._memo['rel_path'] |
1466 except KeyError: | 1475 except KeyError: |
1467 memo_dict = {} | 1476 memo_dict = {} |
1468 self._memo['rel_path'] = memo_dict | 1477 self._memo['rel_path'] = memo_dict |
1469 else: | 1478 else: |
1470 try: | 1479 try: |
1471 return memo_dict[other] | 1480 return memo_dict[other] |
1472 except KeyError: | 1481 except KeyError: |
1473 pass | 1482 pass |
1474 | 1483 |
1475 if self is other: | 1484 if self is other: |
1476 | |
1477 result = '.' | 1485 result = '.' |
1478 | 1486 |
1479 elif not other in self.path_elements: | 1487 elif not other in self.path_elements: |
1480 | |
1481 try: | 1488 try: |
1482 other_dir = other.get_dir() | 1489 other_dir = other.get_dir() |
1483 except AttributeError: | 1490 except AttributeError: |
1484 result = str(other) | 1491 result = str(other) |
1485 else: | 1492 else: |
1486 if other_dir is None: | 1493 if other_dir is None: |
1487 result = other.name | 1494 result = other.name |
1488 else: | 1495 else: |
1489 dir_rel_path = self.rel_path(other_dir) | 1496 dir_rel_path = self.rel_path(other_dir) |
1490 if dir_rel_path == '.': | 1497 if dir_rel_path == '.': |
1491 result = other.name | 1498 result = other.name |
1492 else: | 1499 else: |
1493 result = dir_rel_path + os.sep + other.name | 1500 result = dir_rel_path + os.sep + other.name |
1494 | |
1495 else: | 1501 else: |
1496 | |
1497 i = self.path_elements.index(other) + 1 | 1502 i = self.path_elements.index(other) + 1 |
1498 | 1503 |
1499 path_elems = ['..'] * (len(self.path_elements) - i) \ | 1504 path_elems = ['..'] * (len(self.path_elements) - i) \ |
1500 + map(lambda n: n.name, other.path_elements[i:]) | 1505 + map(lambda n: n.name, other.path_elements[i:]) |
1501 | 1506 |
1502 result = string.join(path_elems, os.sep) | 1507 result = string.join(path_elems, os.sep) |
1503 | 1508 |
1504 memo_dict[other] = result | 1509 memo_dict[other] = result |
1505 | 1510 |
1506 return result | 1511 return result |
(...skipping 30 matching lines...) Expand all Loading... |
1537 # | 1542 # |
1538 # Taskmaster interface subsystem | 1543 # Taskmaster interface subsystem |
1539 # | 1544 # |
1540 | 1545 |
1541 def prepare(self): | 1546 def prepare(self): |
1542 pass | 1547 pass |
1543 | 1548 |
1544 def build(self, **kw): | 1549 def build(self, **kw): |
1545 """A null "builder" for directories.""" | 1550 """A null "builder" for directories.""" |
1546 global MkdirBuilder | 1551 global MkdirBuilder |
1547 if not self.builder is MkdirBuilder: | 1552 if self.builder is not MkdirBuilder: |
1548 apply(SCons.Node.Node.build, [self,], kw) | 1553 apply(SCons.Node.Node.build, [self,], kw) |
1549 | 1554 |
1550 # | 1555 # |
1551 # | 1556 # |
1552 # | 1557 # |
1553 | 1558 |
1554 def _create(self): | 1559 def _create(self): |
1555 """Create this directory, silently and without worrying about | 1560 """Create this directory, silently and without worrying about |
1556 whether the builder is the default or not.""" | 1561 whether the builder is the default or not.""" |
1557 listDirs = [] | 1562 listDirs = [] |
1558 parent = self | 1563 parent = self |
1559 while parent: | 1564 while parent: |
1560 if parent.exists(): | 1565 if parent.exists(): |
1561 break | 1566 break |
1562 listDirs.append(parent) | 1567 listDirs.append(parent) |
1563 p = parent.up() | 1568 parent = parent.up() |
1564 if p is None: | 1569 else: |
1565 raise SCons.Errors.StopError, parent.path | 1570 raise SCons.Errors.StopError, parent.path |
1566 parent = p | |
1567 listDirs.reverse() | 1571 listDirs.reverse() |
1568 for dirnode in listDirs: | 1572 for dirnode in listDirs: |
1569 try: | 1573 try: |
1570 # Don't call dirnode.build(), call the base Node method | 1574 # Don't call dirnode.build(), call the base Node method |
1571 # directly because we definitely *must* create this | 1575 # directly because we definitely *must* create this |
1572 # directory. The dirnode.build() method will suppress | 1576 # directory. The dirnode.build() method will suppress |
1573 # the build if it's the default builder. | 1577 # the build if it's the default builder. |
1574 SCons.Node.Node.build(dirnode) | 1578 SCons.Node.Node.build(dirnode) |
1575 dirnode.get_executor().nullify() | 1579 dirnode.get_executor().nullify() |
1576 # The build() action may or may not have actually | 1580 # The build() action may or may not have actually |
1577 # created the directory, depending on whether the -n | 1581 # created the directory, depending on whether the -n |
1578 # option was used or not. Delete the _exists and | 1582 # option was used or not. Delete the _exists and |
1579 # _rexists attributes so they can be reevaluated. | 1583 # _rexists attributes so they can be reevaluated. |
1580 dirnode.clear() | 1584 dirnode.clear() |
1581 except OSError: | 1585 except OSError: |
1582 pass | 1586 pass |
1583 | 1587 |
1584 def multiple_side_effect_has_builder(self): | 1588 def multiple_side_effect_has_builder(self): |
1585 global MkdirBuilder | 1589 global MkdirBuilder |
1586 return not self.builder is MkdirBuilder and self.has_builder() | 1590 return self.builder is not MkdirBuilder and self.has_builder() |
1587 | 1591 |
1588 def alter_targets(self): | 1592 def alter_targets(self): |
1589 """Return any corresponding targets in a variant directory. | 1593 """Return any corresponding targets in a variant directory. |
1590 """ | 1594 """ |
1591 return self.fs.variant_dir_target_climb(self, self, []) | 1595 return self.fs.variant_dir_target_climb(self, self, []) |
1592 | 1596 |
1593 def scanner_key(self): | 1597 def scanner_key(self): |
1594 """A directory does not get scanned.""" | 1598 """A directory does not get scanned.""" |
1595 return None | 1599 return None |
1596 | 1600 |
(...skipping 18 matching lines...) Expand all Loading... |
1615 return SCons.Util.MD5signature(contents) | 1619 return SCons.Util.MD5signature(contents) |
1616 | 1620 |
1617 def do_duplicate(self, src): | 1621 def do_duplicate(self, src): |
1618 pass | 1622 pass |
1619 | 1623 |
1620 changed_since_last_build = SCons.Node.Node.state_has_changed | 1624 changed_since_last_build = SCons.Node.Node.state_has_changed |
1621 | 1625 |
1622 def is_up_to_date(self): | 1626 def is_up_to_date(self): |
1623 """If any child is not up-to-date, then this directory isn't, | 1627 """If any child is not up-to-date, then this directory isn't, |
1624 either.""" | 1628 either.""" |
1625 if not self.builder is MkdirBuilder and not self.exists(): | 1629 if self.builder is not MkdirBuilder and not self.exists(): |
1626 return 0 | 1630 return 0 |
1627 up_to_date = SCons.Node.up_to_date | 1631 up_to_date = SCons.Node.up_to_date |
1628 for kid in self.children(): | 1632 for kid in self.children(): |
1629 if kid.get_state() > up_to_date: | 1633 if kid.get_state() > up_to_date: |
1630 return 0 | 1634 return 0 |
1631 return 1 | 1635 return 1 |
1632 | 1636 |
1633 def rdir(self): | 1637 def rdir(self): |
1634 if not self.exists(): | 1638 if not self.exists(): |
1635 norm_name = _my_normcase(self.name) | 1639 norm_name = _my_normcase(self.name) |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1788 return node | 1792 return node |
1789 | 1793 |
1790 def file_on_disk(self, name): | 1794 def file_on_disk(self, name): |
1791 if self.entry_exists_on_disk(name) or \ | 1795 if self.entry_exists_on_disk(name) or \ |
1792 diskcheck_rcs(self, name) or \ | 1796 diskcheck_rcs(self, name) or \ |
1793 diskcheck_sccs(self, name): | 1797 diskcheck_sccs(self, name): |
1794 try: return self.File(name) | 1798 try: return self.File(name) |
1795 except TypeError: pass | 1799 except TypeError: pass |
1796 node = self.srcdir_duplicate(name) | 1800 node = self.srcdir_duplicate(name) |
1797 if isinstance(node, Dir): | 1801 if isinstance(node, Dir): |
1798 node = None | 1802 return None |
1799 return node | 1803 return node |
1800 | 1804 |
1801 def walk(self, func, arg): | 1805 def walk(self, func, arg): |
1802 """ | 1806 """ |
1803 Walk this directory tree by calling the specified function | 1807 Walk this directory tree by calling the specified function |
1804 for each directory in the tree. | 1808 for each directory in the tree. |
1805 | 1809 |
1806 This behaves like the os.path.walk() function, but for in-memory | 1810 This behaves like the os.path.walk() function, but for in-memory |
1807 Node.FS.Dir objects. The function takes the same arguments as | 1811 Node.FS.Dir objects. The function takes the same arguments as |
1808 the functions passed to os.path.walk(): | 1812 the functions passed to os.path.walk(): |
(...skipping 72 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1881 This searches any repositories and source directories for | 1885 This searches any repositories and source directories for |
1882 corresponding entries and returns a Node (or string) relative | 1886 corresponding entries and returns a Node (or string) relative |
1883 to the current directory if an entry is found anywhere. | 1887 to the current directory if an entry is found anywhere. |
1884 | 1888 |
1885 TODO: handle pattern with no wildcard | 1889 TODO: handle pattern with no wildcard |
1886 """ | 1890 """ |
1887 search_dir_list = self.get_all_rdirs() | 1891 search_dir_list = self.get_all_rdirs() |
1888 for srcdir in self.srcdir_list(): | 1892 for srcdir in self.srcdir_list(): |
1889 search_dir_list.extend(srcdir.get_all_rdirs()) | 1893 search_dir_list.extend(srcdir.get_all_rdirs()) |
1890 | 1894 |
| 1895 selfEntry = self.Entry |
1891 names = [] | 1896 names = [] |
1892 for dir in search_dir_list: | 1897 for dir in search_dir_list: |
1893 # We use the .name attribute from the Node because the keys of | 1898 # We use the .name attribute from the Node because the keys of |
1894 # the dir.entries dictionary are normalized (that is, all upper | 1899 # the dir.entries dictionary are normalized (that is, all upper |
1895 # case) on case-insensitive systems like Windows. | 1900 # case) on case-insensitive systems like Windows. |
1896 #node_names = [ v.name for k, v in dir.entries.items() if k not in (
'.', '..') ] | 1901 #node_names = [ v.name for k, v in dir.entries.items() if k not in (
'.', '..') ] |
1897 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.key
s()) | 1902 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.key
s()) |
1898 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) | 1903 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) |
1899 names.extend(node_names) | 1904 names.extend(node_names) |
1900 if not strings: | 1905 if not strings: |
1901 # Make sure the working directory (self) actually has | 1906 # Make sure the working directory (self) actually has |
1902 # entries for all Nodes in repositories or variant dirs. | 1907 # entries for all Nodes in repositories or variant dirs. |
1903 map(self.Entry, node_names) | 1908 map(selfEntry, node_names) |
1904 if ondisk: | 1909 if ondisk: |
1905 try: | 1910 try: |
1906 disk_names = os.listdir(dir.abspath) | 1911 disk_names = os.listdir(dir.abspath) |
1907 except os.error: | 1912 except os.error: |
1908 pass | 1913 continue |
1909 else: | 1914 names.extend(disk_names) |
1910 names.extend(disk_names) | 1915 if not strings: |
1911 if not strings: | 1916 # We're going to return corresponding Nodes in |
1912 # We're going to return corresponding Nodes in | 1917 # the local directory, so we need to make sure |
1913 # the local directory, so we need to make sure | 1918 # those Nodes exist. We only want to create |
1914 # those Nodes exist. We only want to create | 1919 # Nodes for the entries that will match the |
1915 # Nodes for the entries that will match the | 1920 # specified pattern, though, which means we |
1916 # specified pattern, though, which means we | 1921 # need to filter the list here, even though |
1917 # need to filter the list here, even though | 1922 # the overall list will also be filtered later, |
1918 # the overall list will also be filtered later, | 1923 # after we exit this loop. |
1919 # after we exit this loop. | 1924 if pattern[0] != '.': |
1920 if pattern[0] != '.': | 1925 #disk_names = [ d for d in disk_names if d[0] != '.' ] |
1921 #disk_names = [ d for d in disk_names if d[0] != '.'
] | 1926 disk_names = filter(lambda x: x[0] != '.', disk_names) |
1922 disk_names = filter(lambda x: x[0] != '.', disk_name
s) | 1927 disk_names = fnmatch.filter(disk_names, pattern) |
1923 disk_names = fnmatch.filter(disk_names, pattern) | 1928 dirEntry = dir.Entry |
1924 rep_nodes = map(dir.Entry, disk_names) | 1929 for name in disk_names: |
1925 #rep_nodes = [ n.disambiguate() for n in rep_nodes ] | 1930 # Add './' before disk filename so that '#' at |
1926 rep_nodes = map(lambda n: n.disambiguate(), rep_nodes) | 1931 # beginning of filename isn't interpreted. |
1927 for node, name in izip(rep_nodes, disk_names): | 1932 name = './' + name |
1928 n = self.Entry(name) | 1933 node = dirEntry(name).disambiguate() |
1929 if n.__class__ != node.__class__: | 1934 n = selfEntry(name) |
1930 n.__class__ = node.__class__ | 1935 if n.__class__ != node.__class__: |
1931 n._morph() | 1936 n.__class__ = node.__class__ |
| 1937 n._morph() |
1932 | 1938 |
1933 names = set(names) | 1939 names = set(names) |
1934 if pattern[0] != '.': | 1940 if pattern[0] != '.': |
1935 #names = [ n for n in names if n[0] != '.' ] | 1941 #names = [ n for n in names if n[0] != '.' ] |
1936 names = filter(lambda x: x[0] != '.', names) | 1942 names = filter(lambda x: x[0] != '.', names) |
1937 names = fnmatch.filter(names, pattern) | 1943 names = fnmatch.filter(names, pattern) |
1938 | 1944 |
1939 if strings: | 1945 if strings: |
1940 return names | 1946 return names |
1941 | 1947 |
(...skipping 188 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2130 attrs = [ | 2136 attrs = [ |
2131 ('bsources', 'bsourcesigs'), | 2137 ('bsources', 'bsourcesigs'), |
2132 ('bdepends', 'bdependsigs'), | 2138 ('bdepends', 'bdependsigs'), |
2133 ('bimplicit', 'bimplicitsigs'), | 2139 ('bimplicit', 'bimplicitsigs'), |
2134 ] | 2140 ] |
2135 for (nattr, sattr) in attrs: | 2141 for (nattr, sattr) in attrs: |
2136 try: | 2142 try: |
2137 strings = getattr(self, nattr) | 2143 strings = getattr(self, nattr) |
2138 nodeinfos = getattr(self, sattr) | 2144 nodeinfos = getattr(self, sattr) |
2139 except AttributeError: | 2145 except AttributeError: |
2140 pass | 2146 continue |
2141 else: | 2147 nodes = [] |
2142 nodes = [] | 2148 for s, ni in izip(strings, nodeinfos): |
2143 for s, ni in izip(strings, nodeinfos): | 2149 if not isinstance(s, SCons.Node.Node): |
2144 if not isinstance(s, SCons.Node.Node): | 2150 s = ni.str_to_node(s) |
2145 s = ni.str_to_node(s) | 2151 nodes.append(s) |
2146 nodes.append(s) | 2152 setattr(self, nattr, nodes) |
2147 setattr(self, nattr, nodes) | |
2148 def format(self, names=0): | 2153 def format(self, names=0): |
2149 result = [] | 2154 result = [] |
2150 bkids = self.bsources + self.bdepends + self.bimplicit | 2155 bkids = self.bsources + self.bdepends + self.bimplicit |
2151 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs | 2156 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs |
2152 for bkid, bkidsig in izip(bkids, bkidsigs): | 2157 for bkid, bkidsig in izip(bkids, bkidsigs): |
2153 result.append(str(bkid) + ': ' + | 2158 result.append(str(bkid) + ': ' + |
2154 string.join(bkidsig.format(names=names), ' ')) | 2159 string.join(bkidsig.format(names=names), ' ')) |
2155 result.append('%s [%s]' % (self.bactsig, self.bact)) | 2160 result.append('%s [%s]' % (self.bactsig, self.bact)) |
2156 return string.join(result, '\n') | 2161 return string.join(result, '\n') |
2157 | 2162 |
(...skipping 12 matching lines...) Expand all Loading... |
2170 diskcheck_match(self, self.isdir, | 2175 diskcheck_match(self, self.isdir, |
2171 "Directory %s found where file expected.") | 2176 "Directory %s found where file expected.") |
2172 | 2177 |
2173 def __init__(self, name, directory, fs): | 2178 def __init__(self, name, directory, fs): |
2174 if __debug__: logInstanceCreation(self, 'Node.FS.File') | 2179 if __debug__: logInstanceCreation(self, 'Node.FS.File') |
2175 Base.__init__(self, name, directory, fs) | 2180 Base.__init__(self, name, directory, fs) |
2176 self._morph() | 2181 self._morph() |
2177 | 2182 |
2178 def Entry(self, name): | 2183 def Entry(self, name): |
2179 """Create an entry node named 'name' relative to | 2184 """Create an entry node named 'name' relative to |
2180 the SConscript directory of this file.""" | 2185 the directory of this file.""" |
2181 cwd = self.cwd or self.fs._cwd | 2186 return self.dir.Entry(name) |
2182 return cwd.Entry(name) | |
2183 | 2187 |
2184 def Dir(self, name, create=True): | 2188 def Dir(self, name, create=True): |
2185 """Create a directory node named 'name' relative to | 2189 """Create a directory node named 'name' relative to |
2186 the SConscript directory of this file.""" | 2190 the directory of this file.""" |
2187 cwd = self.cwd or self.fs._cwd | 2191 return self.dir.Dir(name, create=create) |
2188 return cwd.Dir(name, create) | |
2189 | 2192 |
2190 def Dirs(self, pathlist): | 2193 def Dirs(self, pathlist): |
2191 """Create a list of directories relative to the SConscript | 2194 """Create a list of directories relative to the SConscript |
2192 directory of this file.""" | 2195 directory of this file.""" |
| 2196 # TODO(1.5) |
| 2197 # return [self.Dir(p) for p in pathlist] |
2193 return map(lambda p, s=self: s.Dir(p), pathlist) | 2198 return map(lambda p, s=self: s.Dir(p), pathlist) |
2194 | 2199 |
2195 def File(self, name): | 2200 def File(self, name): |
2196 """Create a file node named 'name' relative to | 2201 """Create a file node named 'name' relative to |
2197 the SConscript directory of this file.""" | 2202 the directory of this file.""" |
2198 cwd = self.cwd or self.fs._cwd | 2203 return self.dir.File(name) |
2199 return cwd.File(name) | |
2200 | 2204 |
2201 #def generate_build_dict(self): | 2205 #def generate_build_dict(self): |
2202 # """Return an appropriate dictionary of values for building | 2206 # """Return an appropriate dictionary of values for building |
2203 # this File.""" | 2207 # this File.""" |
2204 # return {'Dir' : self.Dir, | 2208 # return {'Dir' : self.Dir, |
2205 # 'File' : self.File, | 2209 # 'File' : self.File, |
2206 # 'RDirs' : self.RDirs} | 2210 # 'RDirs' : self.RDirs} |
2207 | 2211 |
2208 def _morph(self): | 2212 def _morph(self): |
2209 """Turn a file system node into a File object.""" | 2213 """Turn a file system node into a File object.""" |
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2383 # rebuild anyway when it doesn't match the new content signature, | 2387 # rebuild anyway when it doesn't match the new content signature, |
2384 # but that's probably the best we can do. | 2388 # but that's probably the best we can do. |
2385 import SCons.SConsign | 2389 import SCons.SConsign |
2386 new_entry = SCons.SConsign.SConsignEntry() | 2390 new_entry = SCons.SConsign.SConsignEntry() |
2387 new_entry.binfo = self.new_binfo() | 2391 new_entry.binfo = self.new_binfo() |
2388 binfo = new_entry.binfo | 2392 binfo = new_entry.binfo |
2389 for attr in self.convert_copy_attrs: | 2393 for attr in self.convert_copy_attrs: |
2390 try: | 2394 try: |
2391 value = getattr(old_entry, attr) | 2395 value = getattr(old_entry, attr) |
2392 except AttributeError: | 2396 except AttributeError: |
2393 pass | 2397 continue |
2394 else: | 2398 setattr(binfo, attr, value) |
2395 setattr(binfo, attr, value) | 2399 delattr(old_entry, attr) |
2396 delattr(old_entry, attr) | |
2397 for attr in self.convert_sig_attrs: | 2400 for attr in self.convert_sig_attrs: |
2398 try: | 2401 try: |
2399 sig_list = getattr(old_entry, attr) | 2402 sig_list = getattr(old_entry, attr) |
2400 except AttributeError: | 2403 except AttributeError: |
2401 pass | 2404 continue |
2402 else: | 2405 value = [] |
2403 value = [] | 2406 for sig in sig_list: |
2404 for sig in sig_list: | 2407 ninfo = self.new_ninfo() |
2405 ninfo = self.new_ninfo() | 2408 if len(sig) == 32: |
2406 if len(sig) == 32: | 2409 ninfo.csig = sig |
2407 ninfo.csig = sig | 2410 else: |
2408 else: | 2411 ninfo.timestamp = sig |
2409 ninfo.timestamp = sig | 2412 value.append(ninfo) |
2410 value.append(ninfo) | 2413 setattr(binfo, attr, value) |
2411 setattr(binfo, attr, value) | 2414 delattr(old_entry, attr) |
2412 delattr(old_entry, attr) | |
2413 return new_entry | 2415 return new_entry |
2414 | 2416 |
2415 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) | 2417 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) |
2416 | 2418 |
2417 def get_stored_info(self): | 2419 def get_stored_info(self): |
2418 try: | 2420 try: |
2419 return self._memo['get_stored_info'] | 2421 return self._memo['get_stored_info'] |
2420 except KeyError: | 2422 except KeyError: |
2421 pass | 2423 pass |
2422 | 2424 |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2466 except KeyError: | 2468 except KeyError: |
2467 memo_dict = {} | 2469 memo_dict = {} |
2468 self._memo['get_found_includes'] = memo_dict | 2470 self._memo['get_found_includes'] = memo_dict |
2469 else: | 2471 else: |
2470 try: | 2472 try: |
2471 return memo_dict[memo_key] | 2473 return memo_dict[memo_key] |
2472 except KeyError: | 2474 except KeyError: |
2473 pass | 2475 pass |
2474 | 2476 |
2475 if scanner: | 2477 if scanner: |
| 2478 # result = [n.disambiguate() for n in scanner(self, env, path)] |
2476 result = scanner(self, env, path) | 2479 result = scanner(self, env, path) |
2477 result = map(lambda N: N.disambiguate(), result) | 2480 result = map(lambda N: N.disambiguate(), result) |
2478 else: | 2481 else: |
2479 result = [] | 2482 result = [] |
2480 | 2483 |
2481 memo_dict[memo_key] = result | 2484 memo_dict[memo_key] = result |
2482 | 2485 |
2483 return result | 2486 return result |
2484 | 2487 |
2485 def _createDir(self): | 2488 def _createDir(self): |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2569 source code builder for it. | 2572 source code builder for it. |
2570 | 2573 |
2571 Note that if we found a source builder, we also set the | 2574 Note that if we found a source builder, we also set the |
2572 self.builder attribute, so that all of the methods that actually | 2575 self.builder attribute, so that all of the methods that actually |
2573 *build* this file don't have to do anything different. | 2576 *build* this file don't have to do anything different. |
2574 """ | 2577 """ |
2575 try: | 2578 try: |
2576 scb = self.sbuilder | 2579 scb = self.sbuilder |
2577 except AttributeError: | 2580 except AttributeError: |
2578 scb = self.sbuilder = self.find_src_builder() | 2581 scb = self.sbuilder = self.find_src_builder() |
2579 return not scb is None | 2582 return scb is not None |
2580 | 2583 |
2581 def alter_targets(self): | 2584 def alter_targets(self): |
2582 """Return any corresponding targets in a variant directory. | 2585 """Return any corresponding targets in a variant directory. |
2583 """ | 2586 """ |
2584 if self.is_derived(): | 2587 if self.is_derived(): |
2585 return [], None | 2588 return [], None |
2586 return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) | 2589 return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) |
2587 | 2590 |
2588 def _rmv_existing(self): | 2591 def _rmv_existing(self): |
2589 self.clear_memoized_values() | 2592 self.clear_memoized_values() |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2642 memoizer_counters.append(SCons.Memoize.CountValue('exists')) | 2645 memoizer_counters.append(SCons.Memoize.CountValue('exists')) |
2643 | 2646 |
2644 def exists(self): | 2647 def exists(self): |
2645 try: | 2648 try: |
2646 return self._memo['exists'] | 2649 return self._memo['exists'] |
2647 except KeyError: | 2650 except KeyError: |
2648 pass | 2651 pass |
2649 # Duplicate from source path if we are set up to do this. | 2652 # Duplicate from source path if we are set up to do this. |
2650 if self.duplicate and not self.is_derived() and not self.linked: | 2653 if self.duplicate and not self.is_derived() and not self.linked: |
2651 src = self.srcnode() | 2654 src = self.srcnode() |
2652 if not src is self: | 2655 if src is not self: |
2653 # At this point, src is meant to be copied in a variant director
y. | 2656 # At this point, src is meant to be copied in a variant director
y. |
2654 src = src.rfile() | 2657 src = src.rfile() |
2655 if src.abspath != self.abspath: | 2658 if src.abspath != self.abspath: |
2656 if src.exists(): | 2659 if src.exists(): |
2657 self.do_duplicate(src) | 2660 self.do_duplicate(src) |
2658 # Can't return 1 here because the duplication might | 2661 # Can't return 1 here because the duplication might |
2659 # not actually occur if the -n option is being used. | 2662 # not actually occur if the -n option is being used. |
2660 else: | 2663 else: |
2661 # The source file does not exist. Make sure no old | 2664 # The source file does not exist. Make sure no old |
2662 # copy remains in the variant directory. | 2665 # copy remains in the variant directory. |
(...skipping 13 matching lines...) Expand all Loading... |
2676 | 2679 |
2677 def get_max_drift_csig(self): | 2680 def get_max_drift_csig(self): |
2678 """ | 2681 """ |
2679 Returns the content signature currently stored for this node | 2682 Returns the content signature currently stored for this node |
2680 if it's been unmodified longer than the max_drift value, or the | 2683 if it's been unmodified longer than the max_drift value, or the |
2681 max_drift value is 0. Returns None otherwise. | 2684 max_drift value is 0. Returns None otherwise. |
2682 """ | 2685 """ |
2683 old = self.get_stored_info() | 2686 old = self.get_stored_info() |
2684 mtime = self.get_timestamp() | 2687 mtime = self.get_timestamp() |
2685 | 2688 |
2686 csig = None | |
2687 max_drift = self.fs.max_drift | 2689 max_drift = self.fs.max_drift |
2688 if max_drift > 0: | 2690 if max_drift > 0: |
2689 if (time.time() - mtime) > max_drift: | 2691 if (time.time() - mtime) > max_drift: |
2690 try: | 2692 try: |
2691 n = old.ninfo | 2693 n = old.ninfo |
2692 if n.timestamp and n.csig and n.timestamp == mtime: | 2694 if n.timestamp and n.csig and n.timestamp == mtime: |
2693 csig = n.csig | 2695 return n.csig |
2694 except AttributeError: | 2696 except AttributeError: |
2695 pass | 2697 pass |
2696 elif max_drift == 0: | 2698 elif max_drift == 0: |
2697 try: | 2699 try: |
2698 csig = old.ninfo.csig | 2700 return old.ninfo.csig |
2699 except AttributeError: | 2701 except AttributeError: |
2700 pass | 2702 pass |
2701 | 2703 |
2702 return csig | 2704 return None |
2703 | 2705 |
2704 def get_csig(self): | 2706 def get_csig(self): |
2705 """ | 2707 """ |
2706 Generate a node's content signature, the digested signature | 2708 Generate a node's content signature, the digested signature |
2707 of its content. | 2709 of its content. |
2708 | 2710 |
2709 node - the node | 2711 node - the node |
2710 cache - alternate node to use for the signature cache | 2712 cache - alternate node to use for the signature cache |
2711 returns - the content signature | 2713 returns - the content signature |
2712 """ | 2714 """ |
(...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2747 self.changed_since_last_build = self.decide_target | 2749 self.changed_since_last_build = self.decide_target |
2748 | 2750 |
2749 def changed_content(self, target, prev_ni): | 2751 def changed_content(self, target, prev_ni): |
2750 cur_csig = self.get_csig() | 2752 cur_csig = self.get_csig() |
2751 try: | 2753 try: |
2752 return cur_csig != prev_ni.csig | 2754 return cur_csig != prev_ni.csig |
2753 except AttributeError: | 2755 except AttributeError: |
2754 return 1 | 2756 return 1 |
2755 | 2757 |
2756 def changed_state(self, target, prev_ni): | 2758 def changed_state(self, target, prev_ni): |
2757 return (self.state != SCons.Node.up_to_date) | 2759 return self.state != SCons.Node.up_to_date |
2758 | 2760 |
2759 def changed_timestamp_then_content(self, target, prev_ni): | 2761 def changed_timestamp_then_content(self, target, prev_ni): |
2760 if not self.changed_timestamp_match(target, prev_ni): | 2762 if not self.changed_timestamp_match(target, prev_ni): |
2761 try: | 2763 try: |
2762 self.get_ninfo().csig = prev_ni.csig | 2764 self.get_ninfo().csig = prev_ni.csig |
2763 except AttributeError: | 2765 except AttributeError: |
2764 pass | 2766 pass |
2765 return False | 2767 return False |
2766 return self.changed_content(target, prev_ni) | 2768 return self.changed_content(target, prev_ni) |
2767 | 2769 |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2868 | 2870 |
2869 def get_cachedir_bsig(self): | 2871 def get_cachedir_bsig(self): |
2870 try: | 2872 try: |
2871 return self.cachesig | 2873 return self.cachesig |
2872 except AttributeError: | 2874 except AttributeError: |
2873 pass | 2875 pass |
2874 | 2876 |
2875 # Add the path to the cache signature, because multiple | 2877 # Add the path to the cache signature, because multiple |
2876 # targets built by the same action will all have the same | 2878 # targets built by the same action will all have the same |
2877 # build signature, and we have to differentiate them somehow. | 2879 # build signature, and we have to differentiate them somehow. |
2878 children = self.children() | 2880 children = self.children() |
| 2881 executor = self.get_executor() |
| 2882 # sigs = [n.get_cachedir_csig() for n in children] |
2879 sigs = map(lambda n: n.get_cachedir_csig(), children) | 2883 sigs = map(lambda n: n.get_cachedir_csig(), children) |
2880 executor = self.get_executor() | |
2881 sigs.append(SCons.Util.MD5signature(executor.get_contents())) | 2884 sigs.append(SCons.Util.MD5signature(executor.get_contents())) |
2882 sigs.append(self.path) | 2885 sigs.append(self.path) |
2883 self.cachesig = SCons.Util.MD5collect(sigs) | 2886 result = self.cachesig = SCons.Util.MD5collect(sigs) |
2884 return self.cachesig | 2887 return result |
2885 | 2888 |
2886 | 2889 |
2887 default_fs = None | 2890 default_fs = None |
2888 | 2891 |
2889 def get_default_fs(): | 2892 def get_default_fs(): |
2890 global default_fs | 2893 global default_fs |
2891 if not default_fs: | 2894 if not default_fs: |
2892 default_fs = FS() | 2895 default_fs = FS() |
2893 return default_fs | 2896 return default_fs |
2894 | 2897 |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2964 memo_dict = self._memo['find_file'] | 2967 memo_dict = self._memo['find_file'] |
2965 except KeyError: | 2968 except KeyError: |
2966 memo_dict = {} | 2969 memo_dict = {} |
2967 self._memo['find_file'] = memo_dict | 2970 self._memo['find_file'] = memo_dict |
2968 else: | 2971 else: |
2969 try: | 2972 try: |
2970 return memo_dict[memo_key] | 2973 return memo_dict[memo_key] |
2971 except KeyError: | 2974 except KeyError: |
2972 pass | 2975 pass |
2973 | 2976 |
2974 if verbose: | 2977 if verbose and not callable(verbose): |
2975 if not SCons.Util.is_String(verbose): | 2978 if not SCons.Util.is_String(verbose): |
2976 verbose = "find_file" | 2979 verbose = "find_file" |
2977 if not callable(verbose): | 2980 verbose = ' %s: ' % verbose |
2978 verbose = ' %s: ' % verbose | 2981 verbose = lambda s, v=verbose: sys.stdout.write(v + s) |
2979 verbose = lambda s, v=verbose: sys.stdout.write(v + s) | |
2980 else: | |
2981 verbose = lambda x: x | |
2982 | 2982 |
2983 filedir, filename = os.path.split(filename) | 2983 filedir, filename = os.path.split(filename) |
2984 if filedir: | 2984 if filedir: |
2985 # More compact code that we can't use until we drop | 2985 # More compact code that we can't use until we drop |
2986 # support for Python 1.5.2: | 2986 # support for Python 1.5.2: |
2987 # | 2987 # |
2988 #def filedir_lookup(p, fd=filedir): | 2988 #def filedir_lookup(p, fd=filedir): |
2989 # """ | 2989 # """ |
2990 # A helper function that looks up a directory for a file | 2990 # A helper function that looks up a directory for a file |
2991 # we're trying to find. This only creates the Dir Node | 2991 # we're trying to find. This only creates the Dir Node |
(...skipping 18 matching lines...) Expand all Loading... |
3010 # if isinstance(node, Dir) or isinstance(node, Entry): | 3010 # if isinstance(node, Dir) or isinstance(node, Entry): |
3011 # return node | 3011 # return node |
3012 # return None | 3012 # return None |
3013 #paths = filter(None, map(filedir_lookup, paths)) | 3013 #paths = filter(None, map(filedir_lookup, paths)) |
3014 | 3014 |
3015 self.default_filedir = filedir | 3015 self.default_filedir = filedir |
3016 paths = filter(None, map(self.filedir_lookup, paths)) | 3016 paths = filter(None, map(self.filedir_lookup, paths)) |
3017 | 3017 |
3018 result = None | 3018 result = None |
3019 for dir in paths: | 3019 for dir in paths: |
3020 verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) | 3020 if verbose: |
| 3021 verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) |
3021 node, d = dir.srcdir_find_file(filename) | 3022 node, d = dir.srcdir_find_file(filename) |
3022 if node: | 3023 if node: |
3023 verbose("... FOUND '%s' in '%s'\n" % (filename, d)) | 3024 if verbose: |
| 3025 verbose("... FOUND '%s' in '%s'\n" % (filename, d)) |
3024 result = node | 3026 result = node |
3025 break | 3027 break |
3026 | 3028 |
3027 memo_dict[memo_key] = result | 3029 memo_dict[memo_key] = result |
3028 | 3030 |
3029 return result | 3031 return result |
3030 | 3032 |
3031 find_file = FileFinder().find_file | 3033 find_file = FileFinder().find_file |
3032 | 3034 |
3033 | 3035 |
3034 def invalidate_node_memos(targets): | 3036 def invalidate_node_memos(targets): |
3035 """ | 3037 """ |
3036 Invalidate the memoized values of all Nodes (files or directories) | 3038 Invalidate the memoized values of all Nodes (files or directories) |
3037 that are associated with the given entries. Has been added to | 3039 that are associated with the given entries. Has been added to |
3038 clear the cache of nodes affected by a direct execution of an | 3040 clear the cache of nodes affected by a direct execution of an |
3039 action (e.g. Delete/Copy/Chmod). Existing Node caches become | 3041 action (e.g. Delete/Copy/Chmod). Existing Node caches become |
3040 inconsistent if the action is run through Execute(). The argument | 3042 inconsistent if the action is run through Execute(). The argument |
3041 `targets` can be a single Node object or filename, or a sequence | 3043 `targets` can be a single Node object or filename, or a sequence |
3042 of Nodes/filenames. | 3044 of Nodes/filenames. |
3043 """ | 3045 """ |
3044 from traceback import extract_stack | 3046 from traceback import extract_stack |
3045 | 3047 |
3046 # First check if the cache really needs to be flushed. Only | 3048 # First check if the cache really needs to be flushed. Only |
3047 # actions run in the SConscript with Execute() seem to be | 3049 # actions run in the SConscript with Execute() seem to be |
3048 # affected. XXX The way to check if Execute() is in the stacktrace | 3050 # affected. XXX The way to check if Execute() is in the stacktrace |
3049 # is a very dirty hack and should be replaced by a more sensible | 3051 # is a very dirty hack and should be replaced by a more sensible |
3050 # solution. | 3052 # solution. |
3051 must_invalidate = 0 | 3053 for f in extract_stack(): |
3052 tb = extract_stack() | |
3053 for f in tb: | |
3054 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': | 3054 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': |
3055 must_invalidate = 1 | 3055 break |
3056 if not must_invalidate: | 3056 else: |
| 3057 # Dont have to invalidate, so return |
3057 return | 3058 return |
3058 | 3059 |
3059 if not SCons.Util.is_List(targets): | 3060 if not SCons.Util.is_List(targets): |
3060 targets = [targets] | 3061 targets = [targets] |
3061 | 3062 |
3062 for entry in targets: | 3063 for entry in targets: |
3063 # If the target is a Node object, clear the cache. If it is a | 3064 # If the target is a Node object, clear the cache. If it is a |
3064 # filename, look up potentially existing Node object first. | 3065 # filename, look up potentially existing Node object first. |
3065 try: | 3066 try: |
3066 entry.clear_memoized_values() | 3067 entry.clear_memoized_values() |
3067 except AttributeError: | 3068 except AttributeError: |
3068 # Not a Node object, try to look up Node by filename. XXX | 3069 # Not a Node object, try to look up Node by filename. XXX |
3069 # This creates Node objects even for those filenames which | 3070 # This creates Node objects even for those filenames which |
3070 # do not correspond to an existing Node object. | 3071 # do not correspond to an existing Node object. |
3071 node = get_default_fs().Entry(entry) | 3072 node = get_default_fs().Entry(entry) |
3072 if node: | 3073 if node: |
3073 node.clear_memoized_values() | 3074 node.clear_memoized_values() |
3074 | 3075 |
OLD | NEW |