OLD | NEW |
1 """SCons.Tool.tex | 1 """SCons.Tool.tex |
2 | 2 |
3 Tool-specific initialization for TeX. | 3 Tool-specific initialization for TeX. |
4 | 4 |
5 There normally shouldn't be any need to import this module directly. | 5 There normally shouldn't be any need to import this module directly. |
6 It will usually be imported through the generic SCons.Tool.Tool() | 6 It will usually be imported through the generic SCons.Tool.Tool() |
7 selection method. | 7 selection method. |
8 | 8 |
9 """ | 9 """ |
10 | 10 |
(...skipping 13 matching lines...) Expand all Loading... |
24 # | 24 # |
25 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY | 25 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY |
26 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE | 26 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE |
27 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND | 27 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
28 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE | 28 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE |
29 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION | 29 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
30 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION | 30 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION |
31 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. | 31 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
32 # | 32 # |
33 | 33 |
34 __revision__ = "src/engine/SCons/Tool/tex.py 3603 2008/10/10 05:46:45 scons" | 34 __revision__ = "src/engine/SCons/Tool/tex.py 3842 2008/12/20 22:59:52 scons" |
35 | 35 |
36 import os.path | 36 import os.path |
37 import re | 37 import re |
38 import string | 38 import string |
39 import shutil | 39 import shutil |
40 | 40 |
41 import SCons.Action | 41 import SCons.Action |
42 import SCons.Node | 42 import SCons.Node |
43 import SCons.Node.FS | 43 import SCons.Node.FS |
44 import SCons.Util | 44 import SCons.Util |
| 45 import SCons.Scanner.LaTeX |
45 | 46 |
46 Verbose = False | 47 Verbose = False |
47 | 48 |
48 must_rerun_latex = True | 49 must_rerun_latex = True |
49 | 50 |
50 # these are files that just need to be checked for changes and then rerun latex | 51 # these are files that just need to be checked for changes and then rerun latex |
51 check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm'] | 52 check_suffixes = ['.toc', '.lof', '.lot', '.out', '.nav', '.snm'] |
52 | 53 |
53 # these are files that require bibtex or makeindex to be run when they change | 54 # these are files that require bibtex or makeindex to be run when they change |
54 all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo'] | 55 all_suffixes = check_suffixes + ['.bbl', '.idx', '.nlo', '.glo'] |
(...skipping 16 matching lines...) Expand all Loading... |
71 # search to find citation rerun warnings | 72 # search to find citation rerun warnings |
72 rerun_citations_str = "^LaTeX Warning:.*\n.*Rerun to get citations correct" | 73 rerun_citations_str = "^LaTeX Warning:.*\n.*Rerun to get citations correct" |
73 rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE) | 74 rerun_citations_re = re.compile(rerun_citations_str, re.MULTILINE) |
74 | 75 |
75 # search to find undefined references or citations warnings | 76 # search to find undefined references or citations warnings |
76 undefined_references_str = '(^LaTeX Warning:.*undefined references)|(^Package \w
+ Warning:.*undefined citations)' | 77 undefined_references_str = '(^LaTeX Warning:.*undefined references)|(^Package \w
+ Warning:.*undefined citations)' |
77 undefined_references_re = re.compile(undefined_references_str, re.MULTILINE) | 78 undefined_references_re = re.compile(undefined_references_str, re.MULTILINE) |
78 | 79 |
79 # used by the emitter | 80 # used by the emitter |
80 auxfile_re = re.compile(r".", re.MULTILINE) | 81 auxfile_re = re.compile(r".", re.MULTILINE) |
81 tableofcontents_re = re.compile(r"^[^%]*\\tableofcontents", re.MULTILINE) | 82 tableofcontents_re = re.compile(r"^[^%\n]*\\tableofcontents", re.MULTILINE) |
82 makeindex_re = re.compile(r"^[^%]*\\makeindex", re.MULTILINE) | 83 makeindex_re = re.compile(r"^[^%\n]*\\makeindex", re.MULTILINE) |
83 bibliography_re = re.compile(r"^[^%]*\\bibliography", re.MULTILINE) | 84 bibliography_re = re.compile(r"^[^%\n]*\\bibliography", re.MULTILINE) |
84 listoffigures_re = re.compile(r"^[^%]*\\listoffigures", re.MULTILINE) | 85 listoffigures_re = re.compile(r"^[^%\n]*\\listoffigures", re.MULTILINE) |
85 listoftables_re = re.compile(r"^[^%]*\\listoftables", re.MULTILINE) | 86 listoftables_re = re.compile(r"^[^%\n]*\\listoftables", re.MULTILINE) |
86 hyperref_re = re.compile(r"^[^%]*\\usepackage.*\{hyperref\}", re.MULTILINE) | 87 hyperref_re = re.compile(r"^[^%\n]*\\usepackage.*\{hyperref\}", re.MULTILINE) |
87 makenomenclature_re = re.compile(r"^[^%]*\\makenomenclature", re.MULTILINE) | 88 makenomenclature_re = re.compile(r"^[^%\n]*\\makenomenclature", re.MULTILINE) |
88 makeglossary_re = re.compile(r"^[^%]*\\makeglossary", re.MULTILINE) | 89 makeglossary_re = re.compile(r"^[^%\n]*\\makeglossary", re.MULTILINE) |
89 beamer_re = re.compile(r"^[^%]*\\documentclass\{beamer\}", re.MULTILINE) | 90 beamer_re = re.compile(r"^[^%\n]*\\documentclass\{beamer\}", re.MULTILINE) |
| 91 |
| 92 # search to find all files included by Latex |
| 93 include_re = re.compile(r'^[^%\n]*\\(?:include|input){([^}]*)}', re.MULTILINE) |
| 94 |
| 95 # search to find all graphics files included by Latex |
| 96 includegraphics_re = re.compile(r'^[^%\n]*\\(?:includegraphics(?:\[[^\]]+\])?){(
[^}]*)}', re.MULTILINE) |
90 | 97 |
91 # search to find all files opened by Latex (recorded in .log file) | 98 # search to find all files opened by Latex (recorded in .log file) |
92 openout_re = re.compile(r"\\openout.*`(.*)'") | 99 openout_re = re.compile(r"\\openout.*`(.*)'") |
93 | 100 |
| 101 # list of graphics file extensions for TeX and LaTeX |
| 102 TexGraphics = SCons.Scanner.LaTeX.TexGraphics |
| 103 LatexGraphics = SCons.Scanner.LaTeX.LatexGraphics |
| 104 |
94 # An Action sufficient to build any generic tex file. | 105 # An Action sufficient to build any generic tex file. |
95 TeXAction = None | 106 TeXAction = None |
96 | 107 |
97 # An action to build a latex file. This action might be needed more | 108 # An action to build a latex file. This action might be needed more |
98 # than once if we are dealing with labels and bibtex. | 109 # than once if we are dealing with labels and bibtex. |
99 LaTeXAction = None | 110 LaTeXAction = None |
100 | 111 |
101 # An action to run BibTeX on a file. | 112 # An action to run BibTeX on a file. |
102 BibTeXAction = None | 113 BibTeXAction = None |
103 | 114 |
104 # An action to run MakeIndex on a file. | 115 # An action to run MakeIndex on a file. |
105 MakeIndexAction = None | 116 MakeIndexAction = None |
106 | 117 |
107 # An action to run MakeIndex (for nomencl) on a file. | 118 # An action to run MakeIndex (for nomencl) on a file. |
108 MakeNclAction = None | 119 MakeNclAction = None |
109 | 120 |
110 # An action to run MakeIndex (for glossary) on a file. | 121 # An action to run MakeIndex (for glossary) on a file. |
111 MakeGlossaryAction = None | 122 MakeGlossaryAction = None |
112 | 123 |
113 # Used as a return value of modify_env_var if the variable is not set. | 124 # Used as a return value of modify_env_var if the variable is not set. |
114 class _Null: | 125 _null = SCons.Scanner.LaTeX._null |
115 pass | |
116 _null = _Null | |
117 | 126 |
118 # The user specifies the paths in env[variable], similar to other builders. | 127 modify_env_var = SCons.Scanner.LaTeX.modify_env_var |
119 # They may be relative and must be converted to absolute, as expected | 128 |
120 # by LaTeX and Co. The environment may already have some paths in | 129 def FindFile(name,suffixes,paths,env,requireExt=False): |
121 # env['ENV'][var]. These paths are honored, but the env[var] paths have | 130 if requireExt: |
122 # higher precedence. All changes are un-done on exit. | 131 name = SCons.Util.splitext(name)[0] |
123 def modify_env_var(env, var, abspath): | 132 if Verbose: |
124 try: | 133 print " searching for '%s' with extensions: " % name,suffixes |
125 save = env['ENV'][var] | 134 |
126 except KeyError: | 135 for path in paths: |
127 save = _null | 136 testName = os.path.join(path,name) |
128 env.PrependENVPath(var, abspath) | 137 if Verbose: |
129 try: | 138 print " look for '%s'" % testName |
130 if SCons.Util.is_List(env[var]): | 139 if os.path.exists(testName): |
131 #TODO(1.5) env.PrependENVPath(var, [os.path.abspath(str(p)) for p in
env[var]]) | 140 if Verbose: |
132 env.PrependENVPath(var, map(lambda p: os.path.abspath(str(p)), env[v
ar])) | 141 print " found '%s'" % testName |
| 142 return env.fs.File(testName) |
133 else: | 143 else: |
134 # Split at os.pathsep to convert into absolute path | 144 name_ext = SCons.Util.splitext(testName)[1] |
135 #TODO(1.5) env.PrependENVPath(var, [os.path.abspath(p) for p in str(
env[var]).split(os.pathsep)]) | 145 if name_ext: |
136 env.PrependENVPath(var, map(lambda p: os.path.abspath(p), str(env[va
r]).split(os.pathsep))) | 146 continue |
137 except KeyError: | 147 |
138 pass | 148 # if no suffix try adding those passed in |
139 # Convert into a string explicitly to append ":" (without which it won't sea
rch system | 149 for suffix in suffixes: |
140 # paths as well). The problem is that env.AppendENVPath(var, ":") | 150 testNameExt = testName + suffix |
141 # does not work, refuses to append ":" (os.pathsep). | 151 if Verbose: |
142 if SCons.Util.is_List(env['ENV'][var]): | 152 print " look for '%s'" % testNameExt |
143 env['ENV'][var] = os.pathsep.join(env['ENV'][var]) | 153 |
144 # Append the trailing os.pathsep character here to catch the case with no en
v[var] | 154 if os.path.exists(testNameExt): |
145 env['ENV'][var] = env['ENV'][var] + os.pathsep | 155 if Verbose: |
146 return save | 156 print " found '%s'" % testNameExt |
| 157 return env.fs.File(testNameExt) |
| 158 if Verbose: |
| 159 print " did not find '%s'" % name |
| 160 return None |
147 | 161 |
148 def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
): | 162 def InternalLaTeXAuxAction(XXXLaTeXAction, target = None, source= None, env=None
): |
149 """A builder for LaTeX files that checks the output in the aux file | 163 """A builder for LaTeX files that checks the output in the aux file |
150 and decides how many times to use LaTeXAction, and BibTeXAction.""" | 164 and decides how many times to use LaTeXAction, and BibTeXAction.""" |
151 | 165 |
152 global must_rerun_latex | 166 global must_rerun_latex |
153 | 167 |
154 # This routine is called with two actions. In this file for DVI builds | 168 # This routine is called with two actions. In this file for DVI builds |
155 # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction | 169 # with LaTeXAction and from the pdflatex.py with PDFLaTeXAction |
156 # set this up now for the case where the user requests a different extension | 170 # set this up now for the case where the user requests a different extension |
157 # for the target filename | 171 # for the target filename |
158 if (XXXLaTeXAction == LaTeXAction): | 172 if (XXXLaTeXAction == LaTeXAction): |
159 callerSuffix = ".dvi" | 173 callerSuffix = ".dvi" |
160 else: | 174 else: |
161 callerSuffix = env['PDFSUFFIX'] | 175 callerSuffix = env['PDFSUFFIX'] |
162 | 176 |
163 basename = SCons.Util.splitext(str(source[0]))[0] | 177 basename = SCons.Util.splitext(str(source[0]))[0] |
164 basedir = os.path.split(str(source[0]))[0] | 178 basedir = os.path.split(str(source[0]))[0] |
165 basefile = os.path.split(str(basename))[1] | 179 basefile = os.path.split(str(basename))[1] |
166 abspath = os.path.abspath(basedir) | 180 abspath = os.path.abspath(basedir) |
167 targetext = os.path.splitext(str(target[0]))[1] | 181 targetext = os.path.splitext(str(target[0]))[1] |
168 targetdir = os.path.split(str(target[0]))[0] | 182 targetdir = os.path.split(str(target[0]))[0] |
169 | 183 |
170 saved_env = {} | 184 saved_env = {} |
171 for var in SCons.Scanner.LaTeX.LaTeX.env_variables: | 185 for var in SCons.Scanner.LaTeX.LaTeX.env_variables: |
172 saved_env[var] = modify_env_var(env, var, abspath) | 186 saved_env[var] = modify_env_var(env, var, abspath) |
173 | 187 |
174 # Create a base file names with the target directory since the auxiliary fil
es | 188 # Create base file names with the target directory since the auxiliary files |
175 # will be made there. That's because the *COM variables have the cd | 189 # will be made there. That's because the *COM variables have the cd |
176 # command in the prolog. We check | 190 # command in the prolog. We check |
177 # for the existence of files before opening them--even ones like the | 191 # for the existence of files before opening them--even ones like the |
178 # aux file that TeX always creates--to make it possible to write tests | 192 # aux file that TeX always creates--to make it possible to write tests |
179 # with stubs that don't necessarily generate all of the same files. | 193 # with stubs that don't necessarily generate all of the same files. |
180 | 194 |
181 targetbase = os.path.join(targetdir, basefile) | 195 targetbase = os.path.join(targetdir, basefile) |
182 | 196 |
183 # if there is a \makeindex there will be a .idx and thus | 197 # if there is a \makeindex there will be a .idx and thus |
184 # we have to run makeindex at least once to keep the build | 198 # we have to run makeindex at least once to keep the build |
(...skipping 13 matching lines...) Expand all Loading... |
198 saved_hashes[suffix] = theNode.get_csig() | 212 saved_hashes[suffix] = theNode.get_csig() |
199 | 213 |
200 if Verbose: | 214 if Verbose: |
201 print "hashes: ",saved_hashes | 215 print "hashes: ",saved_hashes |
202 | 216 |
203 must_rerun_latex = True | 217 must_rerun_latex = True |
204 | 218 |
205 # | 219 # |
206 # routine to update MD5 hash and compare | 220 # routine to update MD5 hash and compare |
207 # | 221 # |
208 def check_MD5(filenode, suffix, saved_hashes=saved_hashes): | 222 # TODO(1.5): nested scopes |
| 223 def check_MD5(filenode, suffix, saved_hashes=saved_hashes, targetbase=target
base): |
209 global must_rerun_latex | 224 global must_rerun_latex |
210 # two calls to clear old csig | 225 # two calls to clear old csig |
211 filenode.clear_memoized_values() | 226 filenode.clear_memoized_values() |
212 filenode.ninfo = filenode.new_ninfo() | 227 filenode.ninfo = filenode.new_ninfo() |
213 new_md5 = filenode.get_csig() | 228 new_md5 = filenode.get_csig() |
214 | 229 |
215 if saved_hashes[suffix] == new_md5: | 230 if saved_hashes[suffix] == new_md5: |
216 if Verbose: | 231 if Verbose: |
217 print "file %s not changed" % (targetbase+suffix) | 232 print "file %s not changed" % (targetbase+suffix) |
218 return False # unchanged | 233 return False # unchanged |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
379 command string.""" | 394 command string.""" |
380 if env.GetOption("no_exec"): | 395 if env.GetOption("no_exec"): |
381 if is_LaTeX(source): | 396 if is_LaTeX(source): |
382 result = env.subst('$LATEXCOM',0,target,source)+" ..." | 397 result = env.subst('$LATEXCOM',0,target,source)+" ..." |
383 else: | 398 else: |
384 result = env.subst("$TEXCOM",0,target,source)+" ..." | 399 result = env.subst("$TEXCOM",0,target,source)+" ..." |
385 else: | 400 else: |
386 result = '' | 401 result = '' |
387 return result | 402 return result |
388 | 403 |
389 def tex_emitter(target, source, env): | 404 def tex_eps_emitter(target, source, env): |
| 405 """An emitter for TeX and LaTeX sources when |
| 406 executing tex or latex. It will accept .ps and .eps |
| 407 graphics files |
| 408 """ |
| 409 (target, source) = tex_emitter_core(target, source, env, TexGraphics) |
| 410 |
| 411 return (target, source) |
| 412 |
| 413 def tex_pdf_emitter(target, source, env): |
| 414 """An emitter for TeX and LaTeX sources when |
| 415 executing pdftex or pdflatex. It will accept graphics |
| 416 files of types .pdf, .jpg, .png, .gif, and .tif |
| 417 """ |
| 418 (target, source) = tex_emitter_core(target, source, env, LatexGraphics) |
| 419 |
| 420 return (target, source) |
| 421 |
| 422 def ScanFiles(theFile, target, paths, file_tests, file_tests_search, env, graphi
cs_extensions, targetdir): |
| 423 # for theFile (a Node) update any file_tests and search for graphics files |
| 424 # then find all included files and call ScanFiles for each of them |
| 425 content = theFile.get_contents() |
| 426 if Verbose: |
| 427 print " scanning ",str(theFile) |
| 428 |
| 429 for i in range(len(file_tests_search)): |
| 430 if file_tests[i][0] == None: |
| 431 file_tests[i][0] = file_tests_search[i].search(content) |
| 432 |
| 433 # For each file see if any graphics files are included |
| 434 # and set up target to create ,pdf graphic |
| 435 # is this is in pdflatex toolchain |
| 436 graphic_files = includegraphics_re.findall(content) |
| 437 if Verbose: |
| 438 print "graphics files in '%s': "%str(theFile),graphic_files |
| 439 for graphFile in graphic_files: |
| 440 graphicNode = FindFile(graphFile,graphics_extensions,paths,env,requireEx
t=True) |
| 441 # if building with pdflatex see if we need to build the .pdf version of
the graphic file |
| 442 # I should probably come up with a better way to tell which builder we a
re using. |
| 443 if graphics_extensions == LatexGraphics: |
| 444 # see if we can build this graphics file by epstopdf |
| 445 graphicSrc = FindFile(graphFile,TexGraphics,paths,env,requireExt=Tru
e) |
| 446 # it seems that FindFile checks with no extension added |
| 447 # so if the extension is included in the name then both searches fin
d it |
| 448 # we don't want to try to build a .pdf from a .pdf so make sure src!
=file wanted |
| 449 if (graphicSrc != None) and (graphicSrc != graphicNode): |
| 450 if Verbose: |
| 451 if graphicNode == None: |
| 452 print "need to build '%s' by epstopdf %s -o %s" % (graph
File,graphicSrc,graphFile) |
| 453 else: |
| 454 print "no need to build '%s', but source file %s exists"
% (graphicNode,graphicSrc) |
| 455 graphicNode = env.PDF(graphicSrc) |
| 456 env.Depends(target[0],graphicNode) |
| 457 |
| 458 # recursively call this on each of the included files |
| 459 inc_files = [ ] |
| 460 inc_files.extend( include_re.findall(content) ) |
| 461 if Verbose: |
| 462 print "files included by '%s': "%str(theFile),inc_files |
| 463 # inc_files is list of file names as given. need to find them |
| 464 # using TEXINPUTS paths. |
| 465 |
| 466 for src in inc_files: |
| 467 srcNode = srcNode = FindFile(src,['.tex','.ltx','.latex'],paths,env,requ
ireExt=False) |
| 468 if srcNode != None: |
| 469 file_test = ScanFiles(srcNode, target, paths, file_tests, file_tests
_search, env, graphics_extensions, targetdir) |
| 470 if Verbose: |
| 471 print " done scanning ",str(theFile) |
| 472 return file_tests |
| 473 |
| 474 def tex_emitter_core(target, source, env, graphics_extensions): |
390 """An emitter for TeX and LaTeX sources. | 475 """An emitter for TeX and LaTeX sources. |
391 For LaTeX sources we try and find the common created files that | 476 For LaTeX sources we try and find the common created files that |
392 are needed on subsequent runs of latex to finish tables of contents, | 477 are needed on subsequent runs of latex to finish tables of contents, |
393 bibliographies, indices, lists of figures, and hyperlink references. | 478 bibliographies, indices, lists of figures, and hyperlink references. |
394 """ | 479 """ |
395 targetbase = SCons.Util.splitext(str(target[0]))[0] | 480 targetbase = SCons.Util.splitext(str(target[0]))[0] |
396 basename = SCons.Util.splitext(str(source[0]))[0] | 481 basename = SCons.Util.splitext(str(source[0]))[0] |
397 basefile = os.path.split(str(basename))[1] | 482 basefile = os.path.split(str(basename))[1] |
398 | 483 |
399 basedir = os.path.split(str(source[0]))[0] | 484 basedir = os.path.split(str(source[0]))[0] |
| 485 targetdir = os.path.split(str(target[0]))[0] |
400 abspath = os.path.abspath(basedir) | 486 abspath = os.path.abspath(basedir) |
401 target[0].attributes.path = abspath | 487 target[0].attributes.path = abspath |
402 | 488 |
403 # | 489 # |
404 # file names we will make use of in searching the sources and log file | 490 # file names we will make use of in searching the sources and log file |
405 # | 491 # |
406 emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.g
lg'] + all_suffixes | 492 emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.g
lg'] + all_suffixes |
407 auxfilename = targetbase + '.aux' | 493 auxfilename = targetbase + '.aux' |
408 logfilename = targetbase + '.log' | 494 logfilename = targetbase + '.log' |
409 | 495 |
410 env.SideEffect(auxfilename,target[0]) | 496 env.SideEffect(auxfilename,target[0]) |
411 env.SideEffect(logfilename,target[0]) | 497 env.SideEffect(logfilename,target[0]) |
412 env.Clean(target[0],auxfilename) | 498 env.Clean(target[0],auxfilename) |
413 env.Clean(target[0],logfilename) | 499 env.Clean(target[0],logfilename) |
414 | 500 |
415 content = source[0].get_contents() | 501 content = source[0].get_contents() |
| 502 |
416 idx_exists = os.path.exists(targetbase + '.idx') | 503 idx_exists = os.path.exists(targetbase + '.idx') |
417 nlo_exists = os.path.exists(targetbase + '.nlo') | 504 nlo_exists = os.path.exists(targetbase + '.nlo') |
418 glo_exists = os.path.exists(targetbase + '.glo') | 505 glo_exists = os.path.exists(targetbase + '.glo') |
419 | 506 |
420 file_tests = [(auxfile_re.search(content),['.aux']), | 507 # set up list with the regular expressions |
421 (makeindex_re.search(content) or idx_exists,['.idx', '.ind', '
.ilg']), | 508 # we use to find features used |
422 (bibliography_re.search(content),['.bbl', '.blg']), | 509 file_tests_search = [auxfile_re, |
423 (tableofcontents_re.search(content),['.toc']), | 510 makeindex_re, |
424 (listoffigures_re.search(content),['.lof']), | 511 bibliography_re, |
425 (listoftables_re.search(content),['.lot']), | 512 tableofcontents_re, |
426 (hyperref_re.search(content),['.out']), | 513 listoffigures_re, |
427 (makenomenclature_re.search(content) or nlo_exists,['.nlo', '.
nls', '.nlg']), | 514 listoftables_re, |
428 (makeglossary_re.search(content) or glo_exists,['.glo', '.gls'
, '.glg']), | 515 hyperref_re, |
429 (beamer_re.search(content),['.nav', '.snm', '.out', '.toc']) ] | 516 makenomenclature_re, |
430 # Note we add the various makeindex files if the file produced by latex exis
ts (.idx, .glo, .nlo) | 517 makeglossary_re, |
431 # This covers the case where the \makeindex, \makenomenclature, or \makeglos
sary | 518 beamer_re ] |
432 # is not in the main file but we want to clean the files and those made by m
akeindex | 519 # set up list with the file suffixes that need emitting |
| 520 # when a feature is found |
| 521 file_tests_suff = [['.aux'], |
| 522 ['.idx', '.ind', '.ilg'], |
| 523 ['.bbl', '.blg'], |
| 524 ['.toc'], |
| 525 ['.lof'], |
| 526 ['.lot'], |
| 527 ['.out'], |
| 528 ['.nlo', '.nls', '.nlg'], |
| 529 ['.glo', '.gls', '.glg'], |
| 530 ['.nav', '.snm', '.out', '.toc'] ] |
| 531 # build the list of lists |
| 532 file_tests = [] |
| 533 for i in range(len(file_tests_search)): |
| 534 file_tests.append( [None, file_tests_suff[i]] ) |
433 | 535 |
434 # TO-DO: need to add a way for the user to extend this list for whatever | 536 # TO-DO: need to add a way for the user to extend this list for whatever |
435 # auxiliary files they create in other (or their own) packages | 537 # auxiliary files they create in other (or their own) packages |
436 | 538 |
| 539 # get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS'] |
| 540 savedpath = modify_env_var(env, 'TEXINPUTS', abspath) |
| 541 paths = env['ENV']['TEXINPUTS'] |
| 542 if SCons.Util.is_List(paths): |
| 543 pass |
| 544 else: |
| 545 # Split at os.pathsep to convert into absolute path |
| 546 # TODO(1.5) |
| 547 #paths = paths.split(os.pathsep) |
| 548 paths = string.split(paths, os.pathsep) |
| 549 |
| 550 # now that we have the path list restore the env |
| 551 if savedpath is _null: |
| 552 try: |
| 553 del env['ENV']['TEXINPUTS'] |
| 554 except KeyError: |
| 555 pass # was never set |
| 556 else: |
| 557 env['ENV']['TEXINPUTS'] = savedpath |
| 558 if Verbose: |
| 559 print "search path ",paths |
| 560 |
| 561 file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_sear
ch, env, graphics_extensions, targetdir) |
| 562 |
437 for (theSearch,suffix_list) in file_tests: | 563 for (theSearch,suffix_list) in file_tests: |
438 if theSearch: | 564 if theSearch: |
439 for suffix in suffix_list: | 565 for suffix in suffix_list: |
440 env.SideEffect(targetbase + suffix,target[0]) | 566 env.SideEffect(targetbase + suffix,target[0]) |
441 env.Clean(target[0],targetbase + suffix) | 567 env.Clean(target[0],targetbase + suffix) |
442 | 568 |
443 # read log file to get all other files that latex creates and will read on t
he next pass | 569 # read log file to get all other files that latex creates and will read on t
he next pass |
444 if os.path.exists(logfilename): | 570 if os.path.exists(logfilename): |
445 content = open(logfilename, "rb").read() | 571 content = open(logfilename, "rb").read() |
446 out_files = openout_re.findall(content) | 572 out_files = openout_re.findall(content) |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
489 global TeXLaTeXAction | 615 global TeXLaTeXAction |
490 if TeXLaTeXAction is None: | 616 if TeXLaTeXAction is None: |
491 TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction, | 617 TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction, |
492 strfunction=TeXLaTeXStrFunction) | 618 strfunction=TeXLaTeXStrFunction) |
493 | 619 |
494 import dvi | 620 import dvi |
495 dvi.generate(env) | 621 dvi.generate(env) |
496 | 622 |
497 bld = env['BUILDERS']['DVI'] | 623 bld = env['BUILDERS']['DVI'] |
498 bld.add_action('.tex', TeXLaTeXAction) | 624 bld.add_action('.tex', TeXLaTeXAction) |
499 bld.add_emitter('.tex', tex_emitter) | 625 bld.add_emitter('.tex', tex_eps_emitter) |
500 | 626 |
501 env['TEX'] = 'tex' | 627 env['TEX'] = 'tex' |
502 env['TEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') | 628 env['TEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') |
503 env['TEXCOM'] = 'cd ${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}' | 629 env['TEXCOM'] = 'cd ${TARGET.dir} && $TEX $TEXFLAGS ${SOURCE.file}' |
504 | 630 |
505 # Duplicate from latex.py. If latex.py goes away, then this is still OK. | 631 # Duplicate from latex.py. If latex.py goes away, then this is still OK. |
506 env['LATEX'] = 'latex' | 632 env['LATEX'] = 'latex' |
507 env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') | 633 env['LATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') |
508 env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}
' | 634 env['LATEXCOM'] = 'cd ${TARGET.dir} && $LATEX $LATEXFLAGS ${SOURCE.file}
' |
509 env['LATEXRETRIES'] = 3 | 635 env['LATEXRETRIES'] = 3 |
(...skipping 16 matching lines...) Expand all Loading... |
526 env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg' | 652 env['MAKENCLFLAGS'] = '-s ${MAKENCLSTYLE} -t ${SOURCE.filebase}.nlg' |
527 env['MAKENCLCOM'] = 'cd ${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $
MAKENCLFLAGS -o ${SOURCE.filebase}.nls' | 653 env['MAKENCLCOM'] = 'cd ${TARGET.dir} && $MAKENCL ${SOURCE.filebase}.nlo $
MAKENCLFLAGS -o ${SOURCE.filebase}.nls' |
528 | 654 |
529 # Duplicate from pdflatex.py. If latex.py goes away, then this is still OK. | 655 # Duplicate from pdflatex.py. If latex.py goes away, then this is still OK. |
530 env['PDFLATEX'] = 'pdflatex' | 656 env['PDFLATEX'] = 'pdflatex' |
531 env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') | 657 env['PDFLATEXFLAGS'] = SCons.Util.CLVar('-interaction=nonstopmode') |
532 env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURC
E.file}' | 658 env['PDFLATEXCOM'] = 'cd ${TARGET.dir} && $PDFLATEX $PDFLATEXFLAGS ${SOURC
E.file}' |
533 | 659 |
534 def exists(env): | 660 def exists(env): |
535 return env.Detect('tex') | 661 return env.Detect('tex') |
OLD | NEW |