OLD | NEW |
1 #!/usr/bin/python | 1 #!/usr/bin/python |
2 | 2 |
| 3 from compiler.ast import Const |
| 4 from compiler.ast import Dict |
| 5 from compiler.ast import Discard |
| 6 from compiler.ast import List |
| 7 from compiler.ast import Module |
| 8 from compiler.ast import Node |
| 9 from compiler.ast import Stmt |
| 10 import compiler |
3 import copy | 11 import copy |
4 import gyp.common | 12 import gyp.common |
5 import optparse | 13 import optparse |
6 import os.path | 14 import os.path |
7 import re | 15 import re |
8 import shlex | 16 import shlex |
9 import subprocess | 17 import subprocess |
10 import sys | 18 import sys |
11 | 19 |
12 | 20 |
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
100 if build_file_path in included: | 108 if build_file_path in included: |
101 return included | 109 return included |
102 | 110 |
103 included.append(build_file_path) | 111 included.append(build_file_path) |
104 | 112 |
105 for included_build_file in aux_data[build_file_path].get('included', []): | 113 for included_build_file in aux_data[build_file_path].get('included', []): |
106 GetIncludedBuildFiles(included_build_file, aux_data, included) | 114 GetIncludedBuildFiles(included_build_file, aux_data, included) |
107 | 115 |
108 return included | 116 return included |
109 | 117 |
| 118 def CheckedEval(file_contents): |
| 119 """Return the eval of a gyp file. |
| 120 |
| 121 The gyp file is restricted to dictionaries and lists only, and |
| 122 repeated keys are not allowed. |
| 123 |
| 124 Note that this is slower than eval() is. |
| 125 """ |
| 126 |
| 127 ast = compiler.parse(file_contents) |
| 128 assert isinstance(ast, Module) |
| 129 c1 = ast.getChildren() |
| 130 assert c1[0] is None |
| 131 assert isinstance(c1[1], Stmt) |
| 132 c2 = c1[1].getChildren() |
| 133 assert isinstance(c2[0], Discard) |
| 134 c3 = c2[0].getChildren() |
| 135 assert len(c3) == 1 |
| 136 return CheckNode(c3[0],0) |
| 137 |
| 138 def CheckNode(node, level): |
| 139 if isinstance(node, Dict): |
| 140 c = node.getChildren() |
| 141 dict = {} |
| 142 for n in range(0, len(c), 2): |
| 143 assert isinstance(c[n], Const) |
| 144 key = c[n].getChildren()[0] |
| 145 if key in dict: |
| 146 raise KeyError, "Key '" + key + "' repeated at level " \ |
| 147 + repr(level) |
| 148 dict[key] = CheckNode(c[n + 1], level + 1) |
| 149 return dict |
| 150 elif isinstance(node, List): |
| 151 c = node.getChildren() |
| 152 list = [] |
| 153 for child in c: |
| 154 list.append(CheckNode(child, level + 1)) |
| 155 return list |
| 156 elif isinstance(node, Const): |
| 157 return node.getChildren()[0] |
| 158 else: |
| 159 raise TypeError, "Unknown AST node " + repr(node) |
110 | 160 |
111 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, | 161 def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, |
112 is_target): | 162 is_target, check): |
113 if build_file_path in data: | 163 if build_file_path in data: |
114 return data[build_file_path] | 164 return data[build_file_path] |
115 | 165 |
116 if os.path.exists(build_file_path): | 166 if os.path.exists(build_file_path): |
117 build_file_contents = open(build_file_path).read() | 167 build_file_contents = open(build_file_path).read() |
118 else: | 168 else: |
119 raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) | 169 raise Exception("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) |
120 | 170 |
121 build_file_data = None | 171 build_file_data = None |
122 try: | 172 try: |
123 build_file_data = eval(build_file_contents, {'__builtins__': None}, None) | 173 if check: |
| 174 build_file_data = CheckedEval(build_file_contents) |
| 175 else: |
| 176 build_file_data = eval(build_file_contents, {'__builtins__': None}, |
| 177 None) |
124 except SyntaxError, e: | 178 except SyntaxError, e: |
125 e.filename = build_file_path | 179 e.filename = build_file_path |
126 raise | 180 raise |
127 except Exception, e: | 181 except Exception, e: |
128 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) | 182 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) |
129 raise | 183 raise |
130 | 184 |
131 data[build_file_path] = build_file_data | 185 data[build_file_path] = build_file_data |
132 aux_data[build_file_path] = {} | 186 aux_data[build_file_path] = {} |
133 | 187 |
134 # Scan for includes and merge them in. | 188 # Scan for includes and merge them in. |
135 try: | 189 try: |
136 if is_target: | 190 if is_target: |
137 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, | 191 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, |
138 aux_data, variables, includes) | 192 aux_data, variables, includes, check) |
139 else: | 193 else: |
140 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, | 194 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, |
141 aux_data, variables, None) | 195 aux_data, variables, None, check) |
142 except Exception, e: | 196 except Exception, e: |
143 gyp.common.ExceptionAppend(e, | 197 gyp.common.ExceptionAppend(e, |
144 'while reading includes of ' + build_file_path) | 198 'while reading includes of ' + build_file_path) |
145 raise | 199 raise |
146 | 200 |
147 return build_file_data | 201 return build_file_data |
148 | 202 |
149 | 203 |
150 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, | 204 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, |
151 variables, includes): | 205 variables, includes, check): |
152 includes_list = [] | 206 includes_list = [] |
153 if includes != None: | 207 if includes != None: |
154 includes_list.extend(includes) | 208 includes_list.extend(includes) |
155 if 'includes' in subdict: | 209 if 'includes' in subdict: |
156 for include in subdict['includes']: | 210 for include in subdict['includes']: |
157 # "include" is specified relative to subdict_path, so compute the real | 211 # "include" is specified relative to subdict_path, so compute the real |
158 # path to include by appending the provided "include" to the directory | 212 # path to include by appending the provided "include" to the directory |
159 # in which subdict_path resides. | 213 # in which subdict_path resides. |
160 relative_include = \ | 214 relative_include = \ |
161 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) | 215 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) |
162 includes_list.append(relative_include) | 216 includes_list.append(relative_include) |
163 # Unhook the includes list, it's no longer needed. | 217 # Unhook the includes list, it's no longer needed. |
164 del subdict['includes'] | 218 del subdict['includes'] |
165 | 219 |
166 # Merge in the included files. | 220 # Merge in the included files. |
167 for include in includes_list: | 221 for include in includes_list: |
168 if not 'included' in aux_data[subdict_path]: | 222 if not 'included' in aux_data[subdict_path]: |
169 aux_data[subdict_path]['included'] = [] | 223 aux_data[subdict_path]['included'] = [] |
170 aux_data[subdict_path]['included'].append(include) | 224 aux_data[subdict_path]['included'].append(include) |
171 MergeDicts(subdict, | 225 MergeDicts(subdict, |
172 LoadOneBuildFile(include, data, aux_data, variables, None, | 226 LoadOneBuildFile(include, data, aux_data, variables, None, |
173 False), | 227 False, check), |
174 subdict_path, include) | 228 subdict_path, include) |
175 | 229 |
176 # Recurse into subdictionaries. | 230 # Recurse into subdictionaries. |
177 for k, v in subdict.iteritems(): | 231 for k, v in subdict.iteritems(): |
178 if v.__class__ == dict: | 232 if v.__class__ == dict: |
179 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, | 233 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, |
180 None) | 234 None, check) |
181 elif v.__class__ == list: | 235 elif v.__class__ == list: |
182 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables) | 236 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, |
| 237 check) |
183 | 238 |
184 | 239 |
185 # This recurses into lists so that it can look for dicts. | 240 # This recurses into lists so that it can look for dicts. |
186 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, | 241 def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, |
187 variables): | 242 variables, check): |
188 for item in sublist: | 243 for item in sublist: |
189 if item.__class__ == dict: | 244 if item.__class__ == dict: |
190 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, | 245 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, |
191 variables, None) | 246 variables, None, check) |
192 elif item.__class__ == list: | 247 elif item.__class__ == list: |
193 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, | 248 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, |
194 variables) | 249 variables, check) |
195 | 250 |
196 | 251 |
197 # TODO(mark): I don't love this name. It just means that it's going to load | 252 # TODO(mark): I don't love this name. It just means that it's going to load |
198 # a build file that contains targets and is expected to provide a targets dict | 253 # a build file that contains targets and is expected to provide a targets dict |
199 # that contains the targets... | 254 # that contains the targets... |
200 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, | 255 def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, |
201 depth): | 256 depth, check): |
202 global absolute_build_file_paths | 257 global absolute_build_file_paths |
203 | 258 |
204 # If depth is set, predefine the DEPTH variable to be a relative path from | 259 # If depth is set, predefine the DEPTH variable to be a relative path from |
205 # this build file's directory to the directory identified by depth. | 260 # this build file's directory to the directory identified by depth. |
206 if depth: | 261 if depth: |
207 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) | 262 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) |
208 if d == '': | 263 if d == '': |
209 variables['DEPTH'] = '.' | 264 variables['DEPTH'] = '.' |
210 else: | 265 else: |
211 variables['DEPTH'] = d | 266 variables['DEPTH'] = d |
212 | 267 |
213 # If the generator needs absolue paths, then do so. | 268 # If the generator needs absolue paths, then do so. |
214 if absolute_build_file_paths: | 269 if absolute_build_file_paths: |
215 build_file_path = os.path.abspath(build_file_path) | 270 build_file_path = os.path.abspath(build_file_path) |
216 | 271 |
217 if build_file_path in data: | 272 if build_file_path in data: |
218 # Already loaded. | 273 # Already loaded. |
219 return | 274 return |
220 | 275 |
221 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, | 276 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, |
222 includes, True) | 277 includes, True, check) |
223 | 278 |
224 # Store DEPTH for later use in generators. | 279 # Store DEPTH for later use in generators. |
225 build_file_data['_DEPTH'] = depth | 280 build_file_data['_DEPTH'] = depth |
226 | 281 |
227 # Set up the included_files key indicating which .gyp files contributed to | 282 # Set up the included_files key indicating which .gyp files contributed to |
228 # this target dict. | 283 # this target dict. |
229 if 'included_files' in build_file_data: | 284 if 'included_files' in build_file_data: |
230 raise KeyError, build_file_path + ' must not contain included_files key' | 285 raise KeyError, build_file_path + ' must not contain included_files key' |
231 | 286 |
232 included = GetIncludedBuildFiles(build_file_path, aux_data) | 287 included = GetIncludedBuildFiles(build_file_path, aux_data) |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
276 | 331 |
277 if 'targets' in build_file_data: | 332 if 'targets' in build_file_data: |
278 for target_dict in build_file_data['targets']: | 333 for target_dict in build_file_data['targets']: |
279 if 'dependencies' not in target_dict: | 334 if 'dependencies' not in target_dict: |
280 continue | 335 continue |
281 for dependency in target_dict['dependencies']: | 336 for dependency in target_dict['dependencies']: |
282 other_build_file = \ | 337 other_build_file = \ |
283 gyp.common.BuildFileAndTarget(build_file_path, dependency)[0] | 338 gyp.common.BuildFileAndTarget(build_file_path, dependency)[0] |
284 try: | 339 try: |
285 LoadTargetBuildFile(other_build_file, data, aux_data, variables, | 340 LoadTargetBuildFile(other_build_file, data, aux_data, variables, |
286 includes, depth) | 341 includes, depth, check) |
287 except Exception, e: | 342 except Exception, e: |
288 gyp.common.ExceptionAppend( | 343 gyp.common.ExceptionAppend( |
289 e, 'while loading dependencies of %s' % build_file_path) | 344 e, 'while loading dependencies of %s' % build_file_path) |
290 raise | 345 raise |
291 | 346 |
292 return data | 347 return data |
293 | 348 |
294 # Look for the bracket that matches the first bracket seen in a | 349 # Look for the bracket that matches the first bracket seen in a |
295 # string, and return the start and end as a tuple. For example, if | 350 # string, and return the start and end as a tuple. For example, if |
296 # the input is something like "<(foo <(bar)) blah", then it would | 351 # the input is something like "<(foo <(bar)) blah", then it would |
(...skipping 1380 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1677 if working_directory and not isinstance(working_directory, str): | 1732 if working_directory and not isinstance(working_directory, str): |
1678 raise Exception("The 'working_directory' for 'run_as' in target %s " | 1733 raise Exception("The 'working_directory' for 'run_as' in target %s " |
1679 "in file %s should be a string." % | 1734 "in file %s should be a string." % |
1680 (target_name, build_file)) | 1735 (target_name, build_file)) |
1681 environment = run_as.get('environment') | 1736 environment = run_as.get('environment') |
1682 if environment and not isinstance(environment, dict): | 1737 if environment and not isinstance(environment, dict): |
1683 raise Exception("The 'environment' for 'run_as' in target %s " | 1738 raise Exception("The 'environment' for 'run_as' in target %s " |
1684 "in file %s should be a dictionary." % | 1739 "in file %s should be a dictionary." % |
1685 (target_name, build_file)) | 1740 (target_name, build_file)) |
1686 | 1741 |
1687 def Load(build_files, variables, includes, depth, generator_input_info): | 1742 def Load(build_files, variables, includes, depth, generator_input_info, check): |
1688 # Set up path_sections and non_configuration_keys with the default data plus | 1743 # Set up path_sections and non_configuration_keys with the default data plus |
1689 # the generator-specifc data. | 1744 # the generator-specifc data. |
1690 global path_sections | 1745 global path_sections |
1691 path_sections = base_path_sections[:] | 1746 path_sections = base_path_sections[:] |
1692 path_sections.extend(generator_input_info['path_sections']) | 1747 path_sections.extend(generator_input_info['path_sections']) |
1693 | 1748 |
1694 global non_configuration_keys | 1749 global non_configuration_keys |
1695 non_configuration_keys = base_non_configuration_keys[:] | 1750 non_configuration_keys = base_non_configuration_keys[:] |
1696 non_configuration_keys.extend(generator_input_info['non_configuration_keys']) | 1751 non_configuration_keys.extend(generator_input_info['non_configuration_keys']) |
1697 | 1752 |
(...skipping 13 matching lines...) Expand all Loading... |
1711 # the |data| dictionary such that the keys to |data| are build file names, | 1766 # the |data| dictionary such that the keys to |data| are build file names, |
1712 # and the values are the entire build file contents after "early" or "pre" | 1767 # and the values are the entire build file contents after "early" or "pre" |
1713 # processing has been done and includes have been resolved. | 1768 # processing has been done and includes have been resolved. |
1714 data = {} | 1769 data = {} |
1715 aux_data = {} | 1770 aux_data = {} |
1716 for build_file in build_files: | 1771 for build_file in build_files: |
1717 # Normalize paths everywhere. This is important because paths will be | 1772 # Normalize paths everywhere. This is important because paths will be |
1718 # used as keys to the data dict and for references between input files. | 1773 # used as keys to the data dict and for references between input files. |
1719 build_file = os.path.normpath(build_file) | 1774 build_file = os.path.normpath(build_file) |
1720 try: | 1775 try: |
1721 LoadTargetBuildFile(build_file, data, aux_data, variables, includes, depth
) | 1776 LoadTargetBuildFile(build_file, data, aux_data, variables, includes, |
| 1777 depth, check) |
1722 except Exception, e: | 1778 except Exception, e: |
1723 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) | 1779 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) |
1724 raise | 1780 raise |
1725 | 1781 |
1726 # Build a dict to access each target's subdict by qualified name. | 1782 # Build a dict to access each target's subdict by qualified name. |
1727 targets = BuildTargetsDict(data) | 1783 targets = BuildTargetsDict(data) |
1728 | 1784 |
1729 # Fully qualify all dependency links. | 1785 # Fully qualify all dependency links. |
1730 QualifyDependencies(targets) | 1786 QualifyDependencies(targets) |
1731 | 1787 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1777 | 1833 |
1778 # Validate run_as sections in targets. | 1834 # Validate run_as sections in targets. |
1779 for target in flat_list: | 1835 for target in flat_list: |
1780 build_file = gyp.common.BuildFileAndTarget('', target)[0] | 1836 build_file = gyp.common.BuildFileAndTarget('', target)[0] |
1781 ValidateRunAsInTarget(target, targets[target], build_file) | 1837 ValidateRunAsInTarget(target, targets[target], build_file) |
1782 | 1838 |
1783 # TODO(mark): Return |data| for now because the generator needs a list of | 1839 # TODO(mark): Return |data| for now because the generator needs a list of |
1784 # build files that came in. In the future, maybe it should just accept | 1840 # build files that came in. In the future, maybe it should just accept |
1785 # a list, and not the whole data dict. | 1841 # a list, and not the whole data dict. |
1786 return [flat_list, targets, data] | 1842 return [flat_list, targets, data] |
OLD | NEW |