| OLD | NEW |
| (Empty) | |
| 1 # Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 # Use of this source code is governed by a BSD-style license that can be |
| 3 # found in the LICENSE file. |
| 4 |
| 5 import argparse |
| 6 import copy |
| 7 from datetime import datetime |
| 8 import os |
| 9 |
| 10 from code import Code |
| 11 import json_parse |
| 12 |
| 13 # The template for the header file of the generated FeatureProvider. |
| 14 HEADER_FILE_TEMPLATE = """ |
| 15 // Copyright %(year)s The Chromium Authors. All rights reserved. |
| 16 // Use of this source code is governed by a BSD-style license that can be |
| 17 // found in the LICENSE file. |
| 18 |
| 19 // GENERATED FROM THE FEATURES FILE: |
| 20 // %(source_files)s |
| 21 // DO NOT EDIT. |
| 22 |
| 23 #ifndef %(header_guard)s |
| 24 #define %(header_guard)s |
| 25 |
| 26 #include "extensions/common/features/base_feature_provider.h" |
| 27 |
| 28 namespace extensions { |
| 29 |
| 30 class %(provider_class)s : public BaseFeatureProvider { |
| 31 public: |
| 32 %(provider_class)s(); |
| 33 ~%(provider_class)s() override; |
| 34 |
| 35 private: |
| 36 DISALLOW_COPY_AND_ASSIGN(%(provider_class)s); |
| 37 }; |
| 38 |
| 39 } // namespace extensions |
| 40 |
| 41 #endif // %(header_guard)s |
| 42 """ |
| 43 |
| 44 # The beginning of the .cc file for the generated FeatureProvider. |
| 45 CC_FILE_BEGIN = """ |
| 46 // Copyright %(year)s The Chromium Authors. All rights reserved. |
| 47 // Use of this source code is governed by a BSD-style license that can be |
| 48 // found in the LICENSE file. |
| 49 |
| 50 // GENERATED FROM THE FEATURES FILE: |
| 51 // %(source_files)s |
| 52 // DO NOT EDIT. |
| 53 |
| 54 #include "%(header_file_path)s" |
| 55 |
| 56 #include "extensions/common/features/api_feature.h" |
| 57 #include "extensions/common/features/complex_feature.h" |
| 58 |
| 59 namespace extensions { |
| 60 |
| 61 """ |
| 62 |
| 63 # The end of the .cc file for the generated FeatureProvider. |
| 64 CC_FILE_END = """ |
| 65 %(provider_class)s::~%(provider_class)s() {} |
| 66 |
| 67 } // namespace extensions |
| 68 """ |
| 69 |
| 70 # A "grammar" for what is and isn't allowed in the features.json files. This |
| 71 # grammar has to list all possible keys and the requirements for each. The |
| 72 # format of each entry is: |
| 73 # 'key': { |
| 74 # allowed_type_1: type_definition, |
| 75 # allowed_type_2: type_definition |
| 76 # } |
| 77 # |allowed_types| are the types of values that can be used for a given key. The |
| 78 # possible values are list, unicode, bool, and int. |
| 79 # |type_definitions| provide more restrictions on the given type. The options |
| 80 # are: |
| 81 # 'subtype': Only applicable for lists. If provided, this enforces that each |
| 82 # entry in the list is of the specified type. |
| 83 # 'cpp_map': A map of strings to C++ enums. When the compiler sees the given |
| 84 # enum string, it will replace it with the C++ version in the |
| 85 # compiled code. For instance, if a feature specifies |
| 86 # 'channel': 'stable', the generated C++ will assign |
| 87 # version_info::Channel::STABLE to channel. The keys in this map |
| 88 # also serve as a list all of possible values. |
| 89 # 'allow_all': Only applicable for lists. If present, this will check for |
| 90 # a value of "all" for a list value, and will replace it with |
| 91 # the collection of all possible values. For instance, if a |
| 92 # feature specifies 'contexts': 'all', the generated C++ will |
| 93 # assign the list of Feature::BLESSED_EXTENSION_CONTEXT, |
| 94 # Feature::BLESSED_WEB_PAGE_CONTEXT et al for contexts. |
| 95 # 'values': A list of all possible allowed values for a given key. |
| 96 # If a type definition does not have any restrictions (beyond the type itself), |
| 97 # an empty definition ({}) is used. |
| 98 FEATURE_GRAMMAR = ( |
| 99 { |
| 100 'blacklist': { |
| 101 list: {'subtype': unicode} |
| 102 }, |
| 103 'channel': { |
| 104 unicode: { |
| 105 'cpp_map': { |
| 106 'trunk': 'version_info::Channel::UNKNOWN', |
| 107 'canary': 'version_info::Channel::CANARY', |
| 108 'dev': 'version_info::Channel::DEV', |
| 109 'beta': 'version_info::Channel::BETA', |
| 110 'stable': 'version_info::Channel::STABLE', |
| 111 } |
| 112 } |
| 113 }, |
| 114 'command_line_switch': { |
| 115 unicode: {} |
| 116 }, |
| 117 'component_extensions_auto_granted': { |
| 118 bool: {} |
| 119 }, |
| 120 'contexts': { |
| 121 list: { |
| 122 'cpp_map': { |
| 123 'blessed_extension': 'Feature::BLESSED_EXTENSION_CONTEXT', |
| 124 'blessed_web_page': 'Feature::BLESSED_WEB_PAGE_CONTEXT', |
| 125 'content_script': 'Feature::CONTENT_SCRIPT_CONTEXT', |
| 126 'extension_service_worker': 'Feature::SERVICE_WORKER_CONTEXT', |
| 127 'web_page': 'Feature::WEB_PAGE_CONTEXT', |
| 128 'webui': 'Feature::WEBUI_CONTEXT', |
| 129 'unblessed_extension': 'Feature::UNBLESSED_EXTENSION_CONTEXT', |
| 130 }, |
| 131 'allow_all': True |
| 132 }, |
| 133 }, |
| 134 'default_parent': { |
| 135 bool: {'values': [True]} |
| 136 }, |
| 137 'dependencies': { |
| 138 list: {'subtype': unicode} |
| 139 }, |
| 140 'extension_types': { |
| 141 list: { |
| 142 'cpp_map': { |
| 143 'extension': 'Manifest::TYPE_EXTENSION', |
| 144 'hosted_app': 'Manifest::TYPE_HOSTED_APP', |
| 145 'legacy_packaged_app': 'Manifest::TYPE_LEGACY_PACKAGED_APP', |
| 146 'platform_app': 'Manifest::TYPE_PLATFORM_APP', |
| 147 'shared_module': 'Manifest::TYPE_SHARED_MODULE', |
| 148 'theme': 'Manifest::TYPE_THEME', |
| 149 }, |
| 150 'allow_all': True |
| 151 }, |
| 152 }, |
| 153 'location': { |
| 154 unicode: { |
| 155 'cpp_map': { |
| 156 'component': 'SimpleFeature::COMPONENT_LOCATION', |
| 157 'external_component': 'SimpleFeature::EXTERNAL_COMPONENT_LOCATION', |
| 158 'policy': 'SimpleFeature::POLICY_LOCATION', |
| 159 } |
| 160 } |
| 161 }, |
| 162 'internal': { |
| 163 bool: {'values': [True]} |
| 164 }, |
| 165 'matches': { |
| 166 list: {'subtype': unicode} |
| 167 }, |
| 168 'max_manifest_version': { |
| 169 int: {'values': [1]} |
| 170 }, |
| 171 'min_manifest_version': { |
| 172 int: {'values': [2]} |
| 173 }, |
| 174 'noparent': { |
| 175 bool: {'values': [True]} |
| 176 }, |
| 177 'platforms': { |
| 178 list: { |
| 179 'cpp_map': { |
| 180 'chromeos': 'Feature::CHROMEOS_PLATFORM', |
| 181 'linux': 'Feature::LINUX_PLATFORM', |
| 182 'mac': 'Feature::MACOSX_PLATFORM', |
| 183 'win': 'Feature::WIN_PLATFORM', |
| 184 } |
| 185 } |
| 186 }, |
| 187 'whitelist': { |
| 188 list: {'subtype': unicode} |
| 189 }, |
| 190 }) |
| 191 |
| 192 # These keys are used to find the parents of different features, but are not |
| 193 # compiled into the features themselves. |
| 194 IGNORED_KEYS = ['noparent', 'default_parent'] |
| 195 |
| 196 # By default, if an error is encountered, assert to stop the compilation. This |
| 197 # can be disabled for testing. |
| 198 ENABLE_ASSERTIONS = True |
| 199 |
| 200 # JSON parsing returns all strings of characters as unicode types. For testing, |
| 201 # we can enable converting all string types to unicode to avoid writing u'' |
| 202 # everywhere. |
| 203 STRINGS_TO_UNICODE = False |
| 204 |
| 205 class Feature(object): |
| 206 """A representation of a single simple feature that can handle all parsing, |
| 207 validation, and code generation. |
| 208 """ |
| 209 def __init__(self, name): |
| 210 self.name = name |
| 211 self.has_parent = False |
| 212 self.errors = [] |
| 213 self.feature_values = {} |
| 214 |
| 215 def _GetType(self, value): |
| 216 """Returns the type of the given value. This can be different than type() if |
| 217 STRINGS_TO_UNICODE is enabled. |
| 218 """ |
| 219 t = type(value) |
| 220 if not STRINGS_TO_UNICODE: |
| 221 return t |
| 222 if t is str: |
| 223 return unicode |
| 224 return t |
| 225 |
| 226 def _AddError(self, key, error): |
| 227 """Adds an error to the feature. If ENABLE_ASSERTIONS is active, this will |
| 228 also assert to stop the compilation process (since errors should never be |
| 229 found in production). |
| 230 """ |
| 231 self.errors.append('Error parsing feature "%s" at key "%s": %s' % |
| 232 (self.name, key, error)) |
| 233 if ENABLE_ASSERTIONS: |
| 234 assert False, ('Error parsing feature "%s" at key "%s": %s' % |
| 235 (self.name, key, error)) |
| 236 |
| 237 def _GetCheckedValue(self, key, expected_type, expected_values, |
| 238 cpp_map, value): |
| 239 """Returns the C++ value for a given key's python value, or None if the |
| 240 value is invalid. |
| 241 key: The key being parsed. |
| 242 expected_type: The expected type for this value, or None if any type is |
| 243 allowed. |
| 244 expected_values: The list of allowed values for this value, or None if any |
| 245 value is allowed. |
| 246 cpp_map: The map from python value -> cpp value for all allowed values, |
| 247 or None if no special mapping should be made. |
| 248 value: The value to check. |
| 249 """ |
| 250 valid = True |
| 251 if expected_values and value not in expected_values: |
| 252 self._AddError(key, 'Illegal value: "%s"' % value) |
| 253 valid = False |
| 254 |
| 255 t = self._GetType(value) |
| 256 if expected_type and t is not expected_type: |
| 257 self._AddError(key, 'Illegal value: "%s"' % value) |
| 258 valid = False |
| 259 |
| 260 if not valid: |
| 261 return None |
| 262 |
| 263 if cpp_map: |
| 264 return cpp_map[value] |
| 265 |
| 266 if t in [str, unicode]: |
| 267 return '"%s"' % str(value) |
| 268 if t is int: |
| 269 return str(value) |
| 270 if t is bool: |
| 271 return 'true' if value else 'false' |
| 272 assert False, 'Unsupported type: %s' % value |
| 273 |
| 274 def _ParseKey(self, key, value, grammar): |
| 275 """Parses the specific key according to the grammar rule for that key if it |
| 276 is present in the json value. |
| 277 key: The key to parse. |
| 278 value: The full value for this feature. |
| 279 grammar: The rule for the specific key. |
| 280 """ |
| 281 if key not in value: |
| 282 return |
| 283 v = value[key] |
| 284 |
| 285 is_all = False |
| 286 if v == 'all' and list in grammar and 'allow_all' in grammar[list]: |
| 287 v = [] |
| 288 is_all = True |
| 289 |
| 290 t = self._GetType(v) |
| 291 if t not in grammar: |
| 292 self._AddError(key, 'Illegal value: "%s"' % v) |
| 293 return |
| 294 |
| 295 expected = grammar[t] |
| 296 expected_values = None |
| 297 cpp_map = None |
| 298 if 'values' in expected: |
| 299 expected_values = expected['values'] |
| 300 elif 'cpp_map' in expected: |
| 301 cpp_map = expected['cpp_map'] |
| 302 expected_values = cpp_map.keys() |
| 303 |
| 304 if is_all: |
| 305 v = copy.deepcopy(expected_values) |
| 306 |
| 307 expected_type = None |
| 308 if t is list and 'subtype' in expected: |
| 309 expected_type = expected['subtype'] |
| 310 |
| 311 cpp_value = None |
| 312 # If this value is a list, iterate over each entry and validate. Otherwise, |
| 313 # validate the single value. |
| 314 if t is list: |
| 315 cpp_value = [] |
| 316 for sv in v: |
| 317 cpp_sv = self._GetCheckedValue(key, expected_type, |
| 318 expected_values, cpp_map, sv) |
| 319 if cpp_sv: |
| 320 cpp_value.append(cpp_sv) |
| 321 if cpp_value: |
| 322 cpp_value = '{' + ','.join(cpp_value) + '}' |
| 323 else: |
| 324 cpp_value = self._GetCheckedValue(key, expected_type, expected_values, |
| 325 cpp_map, v) |
| 326 |
| 327 if cpp_value: |
| 328 self.feature_values[key] = cpp_value |
| 329 |
| 330 def SetParent(self, parent): |
| 331 """Sets the parent of this feature, and inherits all properties from that |
| 332 parent. |
| 333 """ |
| 334 assert not self.feature_values, 'Parents must be set before parsing' |
| 335 self.feature_values = copy.deepcopy(parent.feature_values) |
| 336 self.has_parent = True |
| 337 |
| 338 def Parse(self, json): |
| 339 """Parses the feature from the given json value.""" |
| 340 for key in json.keys(): |
| 341 if key not in FEATURE_GRAMMAR: |
| 342 self._AddError(key, 'Unrecognized key') |
| 343 for key, key_grammar in FEATURE_GRAMMAR.iteritems(): |
| 344 self. _ParseKey(key, json, key_grammar) |
| 345 |
| 346 def GetCode(self, feature_class): |
| 347 """Returns the Code object for generating this feature.""" |
| 348 c = Code() |
| 349 c.Append('std::unique_ptr<%s> feature(new %s());' % |
| 350 (feature_class, feature_class)) |
| 351 c.Append('feature->set_name("%s");' % self.name) |
| 352 for key in sorted(self.feature_values.keys()): |
| 353 if key in IGNORED_KEYS: |
| 354 continue; |
| 355 c.Append('feature->set_%s(%s);' % (key, self.feature_values[key])) |
| 356 return c |
| 357 |
| 358 class FeatureCompiler(object): |
| 359 """A compiler to load, parse, and generate C++ code for a number of |
| 360 features.json files.""" |
| 361 def __init__(self, chrome_root, source_files, feature_class, |
| 362 provider_class, out_root, out_base_filename): |
| 363 # See __main__'s ArgumentParser for documentation on these properties. |
| 364 self._chrome_root = chrome_root |
| 365 self._source_files = source_files |
| 366 self._feature_class = feature_class |
| 367 self._provider_class = provider_class |
| 368 self._out_root = out_root |
| 369 self._out_base_filename = out_base_filename |
| 370 |
| 371 # The json value for the feature files. |
| 372 self._json = {} |
| 373 # The parsed features. |
| 374 self._features = {} |
| 375 |
| 376 def _Load(self): |
| 377 """Loads and parses the source from each input file and puts the result in |
| 378 self._json.""" |
| 379 for f in self._source_files: |
| 380 abs_source_file = os.path.join(self._chrome_root, f) |
| 381 try: |
| 382 with open(abs_source_file, 'r') as f: |
| 383 f_json = json_parse.Parse(f.read()) |
| 384 except: |
| 385 print('FAILED: Exception encountered while loading "%s"' % |
| 386 abs_source_file) |
| 387 raise |
| 388 dupes = set(f_json) & set(self._json) |
| 389 assert not dupes, 'Duplicate keys found: %s' % list(dupes) |
| 390 self._json.update(f_json) |
| 391 |
| 392 def _FindParent(self, feature_name, feature_value): |
| 393 """Checks to see if a feature has a parent. If it does, returns the |
| 394 parent.""" |
| 395 sep = feature_name.rfind('.') |
| 396 if sep is -1 or 'noparent' in feature_value: |
| 397 return None |
| 398 parent_name = feature_name[:sep] |
| 399 if parent_name not in self._features: |
| 400 # TODO(devlin): It'd be kind of nice to be able to assert that the |
| 401 # deduced parent name is in our features, but some dotted features don't |
| 402 # have parents and also don't have noparent, e.g. system.cpu. We should |
| 403 # probably just noparent them so that we can assert this. |
| 404 # raise KeyError('Could not find parent "%s" for feature "%s".' % |
| 405 # (parent_name, feature_name)) |
| 406 return None |
| 407 parent_value = self._features[parent_name] |
| 408 parent = parent_value |
| 409 if type(parent_value) is list: |
| 410 for p in parent_value: |
| 411 if 'default_parent' in p.feature_values: |
| 412 parent = p |
| 413 break |
| 414 assert parent, 'No default parent found for %s' % parent_name |
| 415 return parent |
| 416 |
| 417 def _CompileFeature(self, feature_name, feature_value): |
| 418 """Parses a single feature.""" |
| 419 if 'nocompile' in feature_value: |
| 420 assert feature_value['nocompile'], ( |
| 421 'nocompile should only be true; otherwise omit this key.') |
| 422 return |
| 423 parent = self._FindParent(feature_name, feature_value) |
| 424 # Handle complex features, which are lists of simple features. |
| 425 if type(feature_value) is list: |
| 426 feature_list = [] |
| 427 # This doesn't handle nested complex features. I think that's probably for |
| 428 # the best. |
| 429 for v in feature_value: |
| 430 feature = Feature(feature_name) |
| 431 if parent: |
| 432 feature.SetParent(parent) |
| 433 feature.Parse(v) |
| 434 feature_list.append(feature) |
| 435 self._features[feature_name] = feature_list |
| 436 return |
| 437 |
| 438 feature = Feature(feature_name) |
| 439 if parent: |
| 440 feature.SetParent(parent) |
| 441 feature.Parse(feature_value) |
| 442 self._features[feature_name] = feature |
| 443 |
| 444 def Compile(self): |
| 445 """Parses all features after loading the input files.""" |
| 446 self._Load(); |
| 447 # Iterate over in sorted order so that parents come first. |
| 448 for k in sorted(self._json.keys()): |
| 449 self._CompileFeature(k, self._json[k]) |
| 450 |
| 451 def Render(self): |
| 452 """Returns the Code object for the body of the .cc file, which handles the |
| 453 initialization of all features.""" |
| 454 c = Code() |
| 455 c.Append('%s::%s() {' % (self._provider_class, self._provider_class)) |
| 456 c.Sblock() |
| 457 for k in sorted(self._features.keys()): |
| 458 c.Sblock('{') |
| 459 feature = self._features[k] |
| 460 if type(feature) is list: |
| 461 c.Append('std::unique_ptr<ComplexFeature::FeatureList> features(') |
| 462 c.Append(' new ComplexFeature::FeatureList());') |
| 463 for f in feature: |
| 464 c.Sblock('{') |
| 465 c.Concat(f.GetCode(self._feature_class)) |
| 466 c.Append('features->push_back(std::move(feature));') |
| 467 c.Eblock('}') |
| 468 c.Append('std::unique_ptr<ComplexFeature> feature(') |
| 469 c.Append(' new ComplexFeature(std::move(features)));') |
| 470 c.Append('feature->set_name("%s");' % k) |
| 471 else: |
| 472 c.Concat(feature.GetCode(self._feature_class)) |
| 473 c.Append('features_["%s"] = std::move(feature);' % k) |
| 474 c.Eblock('}') |
| 475 c.Eblock('}') |
| 476 return c |
| 477 |
| 478 def Write(self): |
| 479 """Writes the output.""" |
| 480 header_file_path = self._out_base_filename + '.h' |
| 481 cc_file_path = self._out_base_filename + '.cc' |
| 482 substitutions = ({ |
| 483 'header_file_path': header_file_path, |
| 484 'header_guard': (header_file_path.replace('/', '_'). |
| 485 replace('.', '_').upper()), |
| 486 'provider_class': self._provider_class, |
| 487 'source_files': str(self._source_files), |
| 488 'year': str(datetime.now().year) |
| 489 }) |
| 490 if not os.path.exists(self._out_root): |
| 491 os.makedirs(self._out_root) |
| 492 # Write the .h file. |
| 493 with open(os.path.join(self._out_root, header_file_path), 'w') as f: |
| 494 header_file = Code() |
| 495 header_file.Append(HEADER_FILE_TEMPLATE) |
| 496 header_file.Substitute(substitutions) |
| 497 f.write(header_file.Render().strip()) |
| 498 # Write the .cc file. |
| 499 with open(os.path.join(self._out_root, cc_file_path), 'w') as f: |
| 500 cc_file = Code() |
| 501 cc_file.Append(CC_FILE_BEGIN) |
| 502 cc_file.Substitute(substitutions) |
| 503 cc_file.Concat(self.Render()) |
| 504 cc_end = Code() |
| 505 cc_end.Append(CC_FILE_END) |
| 506 cc_end.Substitute(substitutions) |
| 507 cc_file.Concat(cc_end) |
| 508 f.write(cc_file.Render().strip()) |
| 509 |
| 510 if __name__ == '__main__': |
| 511 parser = argparse.ArgumentParser(description='Compile json feature files') |
| 512 parser.add_argument('chrome_root', type=str, |
| 513 help='The root directory of the chrome checkout') |
| 514 parser.add_argument( |
| 515 'feature_class', type=str, |
| 516 help='The name of the class to use in feature generation ' + |
| 517 '(e.g. APIFeature, PermissionFeature)') |
| 518 parser.add_argument('provider_class', type=str, |
| 519 help='The name of the class for the feature provider') |
| 520 parser.add_argument('out_root', type=str, |
| 521 help='The root directory to generate the C++ files into') |
| 522 parser.add_argument( |
| 523 'out_base_filename', type=str, |
| 524 help='The base filename for the C++ files (.h and .cc will be appended)') |
| 525 parser.add_argument('source_files', type=str, nargs='+', |
| 526 help='The source features.json files') |
| 527 args = parser.parse_args() |
| 528 c = FeatureCompiler(args.chrome_root, args.source_files, args.feature_class, |
| 529 args.provider_class, args.out_root, |
| 530 args.out_base_filename) |
| 531 c.Compile() |
| 532 c.Write() |
| OLD | NEW |