Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 # Copyright (c) 2017 The Chromium Authors. All rights reserved. | |
| 2 # Use of this source code is governed by a BSD-style license that can be | |
| 3 # found in the LICENSE file. | |
| 4 | |
| 5 """Generic generator for configuration files in JSON5 format. | |
| 6 | |
| 7 The configuration file is expected to contain either a data array or a data map, | |
| 8 an optional parameters validation map, and an optional metdata map. Examples: | |
| 9 { | |
| 10 data: [ | |
| 11 "simple_item", | |
| 12 "simple_item2", | |
| 13 {name:"complex_item", param:"Hello World"}, | |
| 14 ], | |
| 15 } | |
| 16 | |
| 17 { | |
| 18 metadata: { | |
| 19 namespace: "css", | |
| 20 }, | |
| 21 parameters: { | |
| 22 param1: {default: 1, valid_values:[1,2,3]}, | |
| 23 param2: {valid_type: "str"}, | |
| 24 }, | |
| 25 data: { | |
| 26 "simple_item": {}, | |
| 27 "item": {param1:1, param2: "Hello World"}, | |
| 28 "bad_item_fails_validation": { | |
| 29 name: "bad_item_fails_validation", | |
| 30 param1: "bad_value_fails_valid_values_check", | |
| 31 param2: 1.9, | |
| 32 unknown_param_fails_validation: true, | |
| 33 }, | |
| 34 }, | |
| 35 } | |
| 36 | |
| 37 The entries in data array/map are stored in the array self.name_dictionaries. | |
| 38 Each entry in name_dictionaries is always stored as a dictionary. | |
| 39 A simple non-map item is converted to a dictionary containing one entry with | |
| 40 key of "name" and its value the simple item. | |
| 41 | |
| 42 The order of entries in name_dictionaries is the same as that specified in the | |
| 43 data array. While for the data map case, by default the entries are sorted | |
| 44 alphabetically by name. | |
| 45 | |
| 46 The optional map "parameters" specifies the default values and the valid values | |
| 47 or valid types contained in the data entries. If parameters is specified, then | |
| 48 data entries may not contain keys not present in parameters. | |
| 49 | |
| 50 The optional map "metadata" overrides the values specified in default_metadata | |
| 51 if present, and stored as self.metadata. Keys in "metadata" must be present in | |
| 52 default_metadata or an exception raised. | |
| 53 """ | |
| 54 | |
| 55 import ast | |
| 56 import copy | |
| 57 import os | |
| 58 import os.path | |
| 59 import optparse | |
| 60 import re | |
| 61 | |
| 62 | |
| 63 def _json5_load(lines): | |
| 64 # Use json5.loads when json5 is available. Currently we use simple | |
| 65 # regexs to convert well-formed JSON5 to PYL format. | |
| 66 # Strip away comments and quote unquoted keys. | |
| 67 re_comment = re.compile(r"^\s*//.*$|//+ .*$", re.MULTILINE) | |
| 68 re_map_keys = re.compile(r"^\s+([$A-Za-z_][\w]*)\s*:", re.MULTILINE) | |
| 69 pyl = re.sub(re_map_keys, r"'\1':", re.sub(re_comment, "", lines)) | |
| 70 # Convert map values of true/false to Python version True/False. | |
| 71 re_true = re.compile(r":\s*true\b") | |
| 72 re_false = re.compile(r":\s*false\b") | |
| 73 pyl = re.sub(re_true, ":True", re.sub(re_false, ":False", pyl)) | |
| 74 return ast.literal_eval(pyl) | |
| 75 | |
| 76 | |
| 77 def _merge_doc(doc, doc2): | |
| 78 def _merge_dict(key): | |
| 79 if key in doc or key in doc2: | |
| 80 merged = doc.get(key, {}) | |
| 81 merged.update(doc2.get(key, {})) | |
| 82 doc[key] = merged | |
| 83 | |
| 84 _merge_dict("metadata") | |
| 85 _merge_dict("parameters") | |
| 86 if type(doc["data"]) is list: | |
| 87 doc["data"].extend(doc2["data"]) | |
| 88 else: | |
| 89 _merge_dict("data") | |
| 90 | |
| 91 | |
| 92 class Json5File(object): | |
| 93 def __init__(self, doc, default_metadata=None): | |
| 94 self.name_dictionaries = [] | |
| 95 self.metadata = copy.deepcopy(default_metadata if default_metadata else {}) | |
| 96 self._defaults = {} | |
| 97 self._process(doc) | |
| 98 | |
| 99 @classmethod | |
| 100 def load_from_files(cls, file_paths, default_metadata): | |
| 101 merged_doc = dict() | |
| 102 for path in file_paths: | |
| 103 assert path.endswith(".json5") | |
| 104 with open(os.path.abspath(path)) as json5_file: | |
| 105 doc = _json5_load(json5_file.read()) | |
| 106 if not merged_doc: | |
| 107 merged_doc = doc | |
| 108 else: | |
| 109 _merge_doc(merged_doc, doc) | |
| 110 return Json5File(merged_doc, default_metadata) | |
| 111 | |
| 112 def _process(self, doc): | |
| 113 # Process optional metadata map entries. | |
| 114 for key, value in doc.get("metadata", {}).items(): | |
| 115 self._process_metadata(key, value) | |
| 116 # Get optional parameters map, and get the default value map from it. | |
| 117 parameters = doc.get("parameters", {}) | |
| 118 if parameters: | |
| 119 self._get_defaults(parameters) | |
| 120 # Process normal entries. | |
| 121 items = doc["data"] | |
| 122 if type(items) is list: | |
| 123 for item in items: | |
| 124 entry = self._get_entry(item, parameters) | |
| 125 self.name_dictionaries.append(entry) | |
| 126 else: | |
| 127 for key, value in items.items(): | |
| 128 value["name"] = key | |
| 129 entry = self._get_entry(value, parameters) | |
| 130 self.name_dictionaries.append(entry) | |
| 131 self.name_dictionaries.sort(key=lambda entry: entry["name"]) | |
| 132 | |
| 133 def _process_metadata(self, key, value): | |
| 134 if key not in self.metadata: | |
| 135 raise Exception("Unknown metadata: '%s'\nKnown metadata: %s" % | |
| 136 (key, self.metadata.keys())) | |
| 137 self.metadata[key] = value | |
| 138 | |
| 139 def _get_defaults(self, parameters): | |
| 140 for key, value in parameters.items(): | |
| 141 if value and "default" in value: | |
| 142 self._defaults[key] = value["default"] | |
| 143 else: | |
| 144 self._defaults[key] = None | |
| 145 | |
| 146 def _get_entry(self, item, parameters): | |
| 147 entry = copy.deepcopy(self._defaults) | |
| 148 if type(item) is not dict: | |
| 149 entry["name"] = item | |
| 150 return entry | |
| 151 if "name" not in item: | |
| 152 raise Exception("Missing name in item: %s" % item) | |
| 153 entry["name"] = item.pop("name") | |
| 154 for key, value in item.items(): | |
| 155 if key not in parameters: | |
| 156 raise Exception( | |
| 157 "Unknown parameter: '%s'\nKnown params: %s" % | |
| 158 (key, parameters.keys())) | |
| 159 if parameters[key]: | |
| 160 self._validate_parameter(parameters[key], value) | |
| 161 entry[key] = value | |
| 162 return entry | |
| 163 | |
| 164 def _validate_parameter(self, parameter, value): | |
| 165 valid_values = parameter.get("valid_values") | |
| 166 if valid_values and value not in valid_values: | |
| 167 raise Exception("Unknown value: '%s'\nKnown values: %s" % | |
| 168 (value, valid_values)) | |
| 169 valid_type = parameter.get("valid_type") | |
| 170 if valid_type and type(value).__name__ != valid_type: | |
| 171 raise Exception("Incorrect type: '%s'\nExpected type: %s" % | |
| 172 (type(value).__name__, valid_type)) | |
| 173 | |
| 174 | |
| 175 class Writer(object): | |
| 176 # Subclasses should override. | |
| 177 class_name = None | |
| 178 default_metadata = None | |
| 179 | |
| 180 def __init__(self, json5_files): | |
| 181 self._outputs = {} # file_name -> generator | |
| 182 self.gperf_path = None | |
| 183 if isinstance(json5_files, basestring): | |
| 184 json5_files = [json5_files] | |
| 185 if json5_files: | |
| 186 self.json5_file = Json5File.load_from_files(json5_files, | |
| 187 self.default_metadata) | |
| 188 | |
| 189 def _write_file_if_changed(self, output_dir, contents, file_name): | |
| 190 path = os.path.join(output_dir, file_name) | |
| 191 | |
| 192 # The build system should ensure our output directory exists, but just | |
| 193 # in case. | |
| 194 directory = os.path.dirname(path) | |
| 195 if not os.path.exists(directory): | |
| 196 os.makedirs(directory) | |
| 197 | |
| 198 # Only write the file if the contents have changed. This allows ninja to | |
| 199 # skip rebuilding targets which depend on the output. | |
| 200 with open(path, "a+") as output_file: | |
| 201 output_file.seek(0) | |
| 202 if output_file.read() != contents: | |
| 203 output_file.truncate(0) | |
| 204 output_file.write(contents) | |
| 205 | |
| 206 def write_files(self, output_dir): | |
| 207 for file_name, generator in self._outputs.items(): | |
| 208 self._write_file_if_changed(output_dir, generator(), file_name) | |
| 209 | |
| 210 def set_gperf_path(self, gperf_path): | |
| 211 self.gperf_path = gperf_path | |
| 212 | |
| 213 | |
| 214 class Maker(object): | |
| 215 def __init__(self, writer_class): | |
| 216 self._writer_class = writer_class | |
| 217 | |
| 218 def main(self, argv): | |
| 219 script_name = os.path.basename(argv[0]) | |
| 220 args = argv[1:] | |
| 221 if len(args) < 1: | |
| 222 print "USAGE: %s INPUT_FILE" % script_name | |
| 223 exit(1) | |
| 224 | |
| 225 parser = optparse.OptionParser() | |
|
dcheng
2017/01/16 05:52:52
Nit: use argparse, optparse is deprecated in 2.7
ktyliu
2017/01/16 06:19:56
Thanks for pointing it out.
Changed to use argpars
| |
| 226 | |
| 227 parser.add_option("--gperf", default="gperf") | |
| 228 parser.add_option("--developer_dir", | |
| 229 help="Path to Xcode.") | |
| 230 parser.add_option("--output_dir", default=os.getcwd()) | |
| 231 options, args = parser.parse_args() | |
| 232 | |
| 233 if options.developer_dir: | |
| 234 os.environ["DEVELOPER_DIR"] = options.developer_dir | |
| 235 | |
| 236 writer = self._writer_class(args) | |
| 237 writer.set_gperf_path(options.gperf) | |
| 238 writer.write_files(options.output_dir) | |
| OLD | NEW |