| OLD | NEW |
| 1 # Copyright (c) 2017 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2017 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
| 4 | 4 |
| 5 """Generic generator for configuration files in JSON5 format. | 5 """Generic generator for configuration files in JSON5 format. |
| 6 | 6 |
| 7 The configuration file is expected to contain either a data array or a data map, | 7 The configuration file is expected to contain either a data array or a data map, |
| 8 an optional parameters validation map, and an optional metdata map. Examples: | 8 an optional parameters validation map, and an optional metdata map. Examples: |
| 9 { | 9 { |
| 10 data: [ | 10 data: [ |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 58 import os | 58 import os |
| 59 import os.path | 59 import os.path |
| 60 import re | 60 import re |
| 61 | 61 |
| 62 | 62 |
| 63 def _json5_load(lines): | 63 def _json5_load(lines): |
| 64 # Use json5.loads when json5 is available. Currently we use simple | 64 # Use json5.loads when json5 is available. Currently we use simple |
| 65 # regexs to convert well-formed JSON5 to PYL format. | 65 # regexs to convert well-formed JSON5 to PYL format. |
| 66 # Strip away comments and quote unquoted keys. | 66 # Strip away comments and quote unquoted keys. |
| 67 re_comment = re.compile(r"^\s*//.*$|//+ .*$", re.MULTILINE) | 67 re_comment = re.compile(r"^\s*//.*$|//+ .*$", re.MULTILINE) |
| 68 re_map_keys = re.compile(r"^\s+([$A-Za-z_][\w]*)\s*:", re.MULTILINE) | 68 re_map_keys = re.compile(r"^\s*([$A-Za-z_][\w]*)\s*:", re.MULTILINE) |
| 69 pyl = re.sub(re_map_keys, r"'\1':", re.sub(re_comment, "", lines)) | 69 pyl = re.sub(re_map_keys, r"'\1':", re.sub(re_comment, "", lines)) |
| 70 # Convert map values of true/false to Python version True/False. | 70 # Convert map values of true/false to Python version True/False. |
| 71 re_true = re.compile(r":\s*true\b") | 71 re_true = re.compile(r":\s*true\b") |
| 72 re_false = re.compile(r":\s*false\b") | 72 re_false = re.compile(r":\s*false\b") |
| 73 pyl = re.sub(re_true, ":True", re.sub(re_false, ":False", pyl)) | 73 pyl = re.sub(re_true, ":True", re.sub(re_false, ":False", pyl)) |
| 74 return ast.literal_eval(pyl) | 74 return ast.literal_eval(pyl) |
| 75 | 75 |
| 76 | 76 |
| 77 def _merge_doc(doc, doc2): | 77 def _merge_doc(doc, doc2): |
| 78 def _merge_dict(key): | 78 def _merge_dict(key): |
| 79 if key in doc or key in doc2: | 79 if key in doc or key in doc2: |
| 80 merged = doc.get(key, {}) | 80 merged = doc.get(key, {}) |
| 81 merged.update(doc2.get(key, {})) | 81 merged.update(doc2.get(key, {})) |
| 82 doc[key] = merged | 82 doc[key] = merged |
| 83 | 83 |
| 84 _merge_dict("metadata") | 84 _merge_dict("metadata") |
| 85 _merge_dict("parameters") | 85 _merge_dict("parameters") |
| 86 if type(doc["data"]) is list: | 86 if type(doc["data"]) is list: |
| 87 doc["data"].extend(doc2["data"]) | 87 doc["data"].extend(doc2["data"]) |
| 88 else: | 88 else: |
| 89 _merge_dict("data") | 89 _merge_dict("data") |
| 90 | 90 |
| 91 | 91 |
| 92 class Json5File(object): | 92 class Json5File(object): |
| 93 def __init__(self, doc, default_metadata=None): | 93 def __init__(self, file_paths, doc, default_metadata=None, default_parameter
s=None): |
| 94 self.file_paths = file_paths |
| 94 self.name_dictionaries = [] | 95 self.name_dictionaries = [] |
| 95 self.metadata = copy.deepcopy(default_metadata if default_metadata else
{}) | 96 self.metadata = copy.deepcopy(default_metadata if default_metadata else
{}) |
| 97 self.parameters = copy.deepcopy(default_parameters if default_parameters
else {}) |
| 96 self._defaults = {} | 98 self._defaults = {} |
| 97 self._process(doc) | 99 self._process(doc) |
| 98 | 100 |
| 99 @classmethod | 101 @classmethod |
| 100 def load_from_files(cls, file_paths, default_metadata): | 102 def load_from_files(cls, file_paths, default_metadata, default_parameters=No
ne): |
| 101 merged_doc = dict() | 103 merged_doc = dict() |
| 102 for path in file_paths: | 104 for path in file_paths: |
| 103 assert path.endswith(".json5") | 105 assert path.endswith(".json5") |
| 104 with open(os.path.abspath(path)) as json5_file: | 106 with open(os.path.abspath(path)) as json5_file: |
| 105 doc = _json5_load(json5_file.read()) | 107 doc = _json5_load(json5_file.read()) |
| 106 if not merged_doc: | 108 if not merged_doc: |
| 107 merged_doc = doc | 109 merged_doc = doc |
| 108 else: | 110 else: |
| 109 _merge_doc(merged_doc, doc) | 111 _merge_doc(merged_doc, doc) |
| 110 return Json5File(merged_doc, default_metadata) | 112 return Json5File(file_paths, merged_doc, default_metadata, default_param
eters) |
| 111 | 113 |
| 112 def _process(self, doc): | 114 def _process(self, doc): |
| 113 # Process optional metadata map entries. | 115 # Process optional metadata map entries. |
| 114 for key, value in doc.get("metadata", {}).items(): | 116 for key, value in doc.get("metadata", {}).items(): |
| 115 self._process_metadata(key, value) | 117 self._process_metadata(key, value) |
| 116 # Get optional parameters map, and get the default value map from it. | 118 # Get optional parameters map, and get the default value map from it. |
| 117 parameters = doc.get("parameters", {}) | 119 self.parameters.update(doc.get("parameters", {})) |
| 118 if parameters: | 120 if self.parameters: |
| 119 self._get_defaults(parameters) | 121 self._get_defaults() |
| 120 # Process normal entries. | 122 # Process normal entries. |
| 121 items = doc["data"] | 123 items = doc["data"] |
| 122 if type(items) is list: | 124 if type(items) is list: |
| 123 for item in items: | 125 for item in items: |
| 124 entry = self._get_entry(item, parameters) | 126 entry = self._get_entry(item) |
| 125 self.name_dictionaries.append(entry) | 127 self.name_dictionaries.append(entry) |
| 126 else: | 128 else: |
| 127 for key, value in items.items(): | 129 for key, value in items.items(): |
| 128 value["name"] = key | 130 value["name"] = key |
| 129 entry = self._get_entry(value, parameters) | 131 entry = self._get_entry(value) |
| 130 self.name_dictionaries.append(entry) | 132 self.name_dictionaries.append(entry) |
| 131 self.name_dictionaries.sort(key=lambda entry: entry["name"]) | 133 self.name_dictionaries.sort(key=lambda entry: entry["name"]) |
| 132 | 134 |
| 133 def _process_metadata(self, key, value): | 135 def _process_metadata(self, key, value): |
| 134 if key not in self.metadata: | 136 if key not in self.metadata: |
| 135 raise Exception("Unknown metadata: '%s'\nKnown metadata: %s" % | 137 raise Exception("Unknown metadata: '%s'\nKnown metadata: %s" % |
| 136 (key, self.metadata.keys())) | 138 (key, self.metadata.keys())) |
| 137 self.metadata[key] = value | 139 self.metadata[key] = value |
| 138 | 140 |
| 139 def _get_defaults(self, parameters): | 141 def _get_defaults(self): |
| 140 for key, value in parameters.items(): | 142 for key, value in self.parameters.items(): |
| 141 if value and "default" in value: | 143 if value and "default" in value: |
| 142 self._defaults[key] = value["default"] | 144 self._defaults[key] = value["default"] |
| 143 else: | 145 else: |
| 144 self._defaults[key] = None | 146 self._defaults[key] = None |
| 145 | 147 |
| 146 def _get_entry(self, item, parameters): | 148 def _get_entry(self, item): |
| 147 entry = copy.deepcopy(self._defaults) | 149 entry = copy.deepcopy(self._defaults) |
| 148 if type(item) is not dict: | 150 if type(item) is not dict: |
| 149 entry["name"] = item | 151 entry["name"] = item |
| 150 return entry | 152 return entry |
| 151 if "name" not in item: | 153 if "name" not in item: |
| 152 raise Exception("Missing name in item: %s" % item) | 154 raise Exception("Missing name in item: %s" % item) |
| 153 entry["name"] = item.pop("name") | 155 entry["name"] = item.pop("name") |
| 154 for key, value in item.items(): | 156 for key, value in item.items(): |
| 155 if key not in parameters: | 157 if key not in self.parameters: |
| 156 raise Exception( | 158 raise Exception( |
| 157 "Unknown parameter: '%s'\nKnown params: %s" % | 159 "Unknown parameter: '%s'\nKnown params: %s" % |
| 158 (key, parameters.keys())) | 160 (key, self.parameters.keys())) |
| 159 if parameters[key]: | 161 if self.parameters[key]: |
| 160 self._validate_parameter(parameters[key], value) | 162 self._validate_parameter(self.parameters[key], value) |
| 161 entry[key] = value | 163 entry[key] = value |
| 162 return entry | 164 return entry |
| 163 | 165 |
| 164 def _validate_parameter(self, parameter, value): | 166 def _validate_parameter(self, parameter, value): |
| 165 valid_values = parameter.get("valid_values") | 167 valid_values = parameter.get("valid_values") |
| 166 if valid_values and value not in valid_values: | 168 if valid_values and value not in valid_values: |
| 167 raise Exception("Unknown value: '%s'\nKnown values: %s" % | 169 raise Exception("Unknown value: '%s'\nKnown values: %s" % |
| 168 (value, valid_values)) | 170 (value, valid_values)) |
| 169 valid_type = parameter.get("valid_type") | 171 valid_type = parameter.get("valid_type") |
| 170 if valid_type and type(value).__name__ != valid_type: | 172 if valid_type and type(value).__name__ != valid_type: |
| 171 raise Exception("Incorrect type: '%s'\nExpected type: %s" % | 173 raise Exception("Incorrect type: '%s'\nExpected type: %s" % |
| 172 (type(value).__name__, valid_type)) | 174 (type(value).__name__, valid_type)) |
| 173 | 175 |
| 174 | 176 |
| 175 class Writer(object): | 177 class Writer(object): |
| 176 # Subclasses should override. | 178 # Subclasses should override. |
| 177 class_name = None | 179 class_name = None |
| 178 default_metadata = None | 180 default_metadata = None |
| 181 default_parameters = None |
| 179 | 182 |
| 180 def __init__(self, json5_files): | 183 def __init__(self, json5_files): |
| 181 self._outputs = {} # file_name -> generator | 184 self._outputs = {} # file_name -> generator |
| 182 self.gperf_path = None | 185 self.gperf_path = None |
| 183 if isinstance(json5_files, basestring): | 186 if isinstance(json5_files, basestring): |
| 184 json5_files = [json5_files] | 187 json5_files = [json5_files] |
| 185 if json5_files: | 188 if json5_files: |
| 186 self.json5_file = Json5File.load_from_files(json5_files, | 189 self.json5_file = Json5File.load_from_files(json5_files, |
| 187 self.default_metadata) | 190 self.default_metadata, |
| 191 self.default_parameters) |
| 188 | 192 |
| 189 def _write_file_if_changed(self, output_dir, contents, file_name): | 193 def _write_file_if_changed(self, output_dir, contents, file_name): |
| 190 path = os.path.join(output_dir, file_name) | 194 path = os.path.join(output_dir, file_name) |
| 191 | 195 |
| 192 # The build system should ensure our output directory exists, but just | 196 # The build system should ensure our output directory exists, but just |
| 193 # in case. | 197 # in case. |
| 194 directory = os.path.dirname(path) | 198 directory = os.path.dirname(path) |
| 195 if not os.path.exists(directory): | 199 if not os.path.exists(directory): |
| 196 os.makedirs(directory) | 200 os.makedirs(directory) |
| 197 | 201 |
| (...skipping 26 matching lines...) Expand all Loading... |
| 224 parser.add_argument("--developer_dir", help="Path to Xcode.") | 228 parser.add_argument("--developer_dir", help="Path to Xcode.") |
| 225 parser.add_argument("--output_dir", default=os.getcwd()) | 229 parser.add_argument("--output_dir", default=os.getcwd()) |
| 226 args = parser.parse_args() | 230 args = parser.parse_args() |
| 227 | 231 |
| 228 if args.developer_dir: | 232 if args.developer_dir: |
| 229 os.environ["DEVELOPER_DIR"] = args.developer_dir | 233 os.environ["DEVELOPER_DIR"] = args.developer_dir |
| 230 | 234 |
| 231 writer = self._writer_class(args.files) | 235 writer = self._writer_class(args.files) |
| 232 writer.set_gperf_path(args.gperf) | 236 writer.set_gperf_path(args.gperf) |
| 233 writer.write_files(args.output_dir) | 237 writer.write_files(args.output_dir) |
| OLD | NEW |