| Index: chrome/common/extensions/docs/server2/features_bundle.py
|
| diff --git a/chrome/common/extensions/docs/server2/features_bundle.py b/chrome/common/extensions/docs/server2/features_bundle.py
|
| index 98a0dfe1d49ac693b51d3d6b37a21ac8fd2216e4..091b407502d5a5cc6d7e73f0a1a034e29b17b9e6 100644
|
| --- a/chrome/common/extensions/docs/server2/features_bundle.py
|
| +++ b/chrome/common/extensions/docs/server2/features_bundle.py
|
| @@ -2,13 +2,16 @@
|
| # Use of this source code is governed by a BSD-style license that can be
|
| # found in the LICENSE file.
|
|
|
| -import posixpath
|
| +from copy import copy
|
|
|
| +from branch_utility import BranchUtility
|
| from compiled_file_system import SingleFile, Unicode
|
| +from docs_server_utils import StringIdentity
|
| from extensions_paths import API_PATHS, JSON_TEMPLATES
|
| -import features_utility
|
| from file_system import FileNotFoundError
|
| from future import Future
|
| +from path_util import Join
|
| +from platform_util import GetExtensionTypes, PlatformToExtensionType
|
| from third_party.json_schema_compiler.json_parse import Parse
|
|
|
|
|
| @@ -17,130 +20,351 @@ _MANIFEST_FEATURES = '_manifest_features.json'
|
| _PERMISSION_FEATURES = '_permission_features.json'
|
|
|
|
|
| -def _GetFeaturePaths(feature_file, *extra_paths):
|
| - paths = [posixpath.join(api_path, feature_file) for api_path in API_PATHS]
|
| - paths.extend(extra_paths)
|
| - return paths
|
| -
|
| -
|
| -def _AddPlatformsAndChannelsFromDependencies(feature,
|
| - api_features,
|
| - manifest_features,
|
| - permission_features):
|
| - features_map = {
|
| - 'api': api_features,
|
| - 'manifest': manifest_features,
|
| - 'permission': permission_features,
|
| - }
|
| - dependencies = feature.get('dependencies')
|
| - if dependencies is None:
|
| - return
|
| - platforms = set()
|
| - channel = None
|
| - for dependency in dependencies:
|
| - dep_type, dep_name = dependency.split(':')
|
| - dependency_features = features_map[dep_type]
|
| - dependency_feature = dependency_features.get(dep_name)
|
| - # If the dependency can't be resolved, it is inaccessible and therefore
|
| - # so is this feature.
|
| - if dependency_feature is None:
|
| - return
|
| - # Import the platforms from the dependency. The logic is a bit odd; if
|
| - # |feature| specifies platforms the it's considered an override. If not,
|
| - # we form the union of all dependency's platforms.
|
| - # TODO(kalman): Fix this (see http://crbug.com/322094).
|
| - platforms.update(dependency_feature.get('platforms', set()))
|
| - # Import the channel from the dependency.
|
| - channel = dependency_feature.get('channel', channel)
|
| - if platforms and not feature.get('platforms'):
|
| - feature['platforms'] = list(platforms)
|
| - if channel and not feature.get('channel'):
|
| - feature['channel'] = channel
|
| +def HasParentFeature(feature_name, feature, all_feature_names):
|
| + # A feature has a parent if it has a . in its name, its parent exists,
|
| + # and it does not explicitly specify that it has no parent.
|
| + return ('.' in feature_name and
|
| + feature_name.rsplit('.', 1)[0] in all_feature_names and
|
| + not feature.get('noparent'))
|
|
|
|
|
| -class _FeaturesCache(object):
|
| - def __init__(self, file_system, compiled_fs_factory, json_paths):
|
| - populate = self._CreateCache
|
| - if len(json_paths) == 1:
|
| - populate = SingleFile(populate)
|
| +def GetParentFeature(feature_name, feature, all_feature_names):
|
| + '''Returns the name of the parent feature, or None if it does not have a
|
| + parent.
|
| + '''
|
| + if not HasParentFeature(feature_name, feature, all_feature_names):
|
| + return None
|
| + return feature_name.rsplit('.', 1)[0]
|
| +
|
| +
|
| +def _CreateFeaturesFromJSONFutures(json_futures):
|
| + '''Returns a dict of features. The value of each feature is a list with
|
| + all of its possible values.
|
| + '''
|
| + def ignore_feature(name, value):
|
| + '''Returns true if this feature should be ignored. Features are ignored if
|
| + they are only available to whitelisted apps or component extensions/apps, as
|
| + in these cases the APIs are not available to public developers.
|
| +
|
| + Private APIs are also unavailable to public developers, but logic elsewhere
|
| + makes sure they are not listed. So they shouldn't be ignored via this
|
| + mechanism.
|
| + '''
|
| + if name.endswith('Private'):
|
| + return False
|
| + return value.get('location') == 'component' or 'whitelist' in value
|
| +
|
| + features = {}
|
| +
|
| + for json_future in json_futures:
|
| + try:
|
| + features_json = Parse(json_future.Get())
|
| + except FileNotFoundError:
|
| + # Not all file system configurations have the extra files.
|
| + continue
|
| + for name, rawvalue in features_json.iteritems():
|
| + if name not in features:
|
| + features[name] = []
|
| + for value in (rawvalue if isinstance(rawvalue, list) else (rawvalue,)):
|
| + if not ignore_feature(name, value):
|
| + features[name].append(value)
|
| +
|
| + return features
|
| +
|
| +
|
| +def _CopyParentFeatureValues(child, parent):
|
| + '''Takes data from feature dict |parent| and copies/merges it
|
| + into feature dict |child|. Two passes are run over the features,
|
| + and on the first pass features are not resolved across caches,
|
| + so a None value for |parent| may be passed in.
|
| + '''
|
| + if parent is None:
|
| + return child
|
| + merged = copy(parent)
|
| + merged.pop('noparent', None)
|
| + merged.pop('name', None)
|
| + merged.update(child)
|
| + return merged
|
| +
|
| +
|
| +def _ResolveFeature(feature_name,
|
| + feature_values,
|
| + extra_feature_values,
|
| + platform,
|
| + features_type,
|
| + features_map):
|
| + '''Filters and combines the possible values for a feature into one dict.
|
| +
|
| + It uses |features_map| to resolve dependencies for each value and inherit
|
| + unspecified platform and channel data. |feature_values| is then filtered
|
| + by platform and all values with the most stable platform are merged into one
|
| + dict. All values in |extra_feature_values| get merged into this dict.
|
| +
|
| + Returns |resolve_successful| and |feature|. |resolve_successful| is False
|
| + if the feature's dependencies have not been merged yet themselves, meaning
|
| + that this feature can not be reliably resolved yet. |feature| is the
|
| + resulting feature dict, or None if the feature does not exist on the
|
| + platform specified.
|
| + '''
|
| + feature = None
|
| + most_stable_channel = None
|
| + for value in feature_values:
|
| + # If 'extension_types' or 'channel' is unspecified, these values should
|
| + # be inherited from dependencies. If they are specified, these values
|
| + # should override anything specified by dependencies.
|
| + inherit_valid_platform = 'extension_types' not in value
|
| + if inherit_valid_platform:
|
| + valid_platform = None
|
| + else:
|
| + valid_platform = (value['extension_types'] == 'all' or
|
| + platform in value['extension_types'])
|
| + inherit_channel = 'channel' not in value
|
| + channel = value.get('channel')
|
| +
|
| + dependencies = value.get('dependencies', [])
|
| + parent = GetParentFeature(
|
| + feature_name, value, features_map[features_type]['all_names'])
|
| + if parent is not None:
|
| + # The parent data needs to be resolved so the child can inherit it.
|
| + if parent in features_map[features_type].get('unresolved', ()):
|
| + return False, None
|
| + value = _CopyParentFeatureValues(
|
| + value, features_map[features_type]['resolved'].get(parent))
|
| + # Add the parent as a dependency to ensure proper platform filtering.
|
| + dependencies.append(features_type + ':' + parent)
|
| +
|
| + for dependency in dependencies:
|
| + dep_type, dep_name = dependency.split(':')
|
| + if (dep_type not in features_map or
|
| + dep_name in features_map[dep_type].get('unresolved', ())):
|
| + # The dependency itself has not been merged yet or the features map
|
| + # does not have the needed data. Fail to resolve.
|
| + return False, None
|
| +
|
| + dep = features_map[dep_type]['resolved'].get(dep_name)
|
| + if inherit_valid_platform and (valid_platform is None or valid_platform):
|
| + # If dep is None, the dependency does not exist because it has been
|
| + # filtered out by platform. This feature value does not explicitly
|
| + # specify platform data, so filter this feature value out.
|
| + # Only run this check if valid_platform is True or None so that it
|
| + # can't be reset once it is False.
|
| + valid_platform = dep is not None
|
| + if inherit_channel and dep and 'channel' in dep:
|
| + if channel is None or BranchUtility.NewestChannel(
|
| + (dep['channel'], channel)) != channel:
|
| + # Inherit the least stable channel from the dependencies.
|
| + channel = dep['channel']
|
| +
|
| + # Default to stable on all platforms.
|
| + if valid_platform is None:
|
| + valid_platform = True
|
| + if valid_platform and channel is None:
|
| + channel = 'stable'
|
| +
|
| + if valid_platform:
|
| + # The feature value is valid. Merge it into the feature dict.
|
| + if feature is None or BranchUtility.NewestChannel(
|
| + (most_stable_channel, channel)) != channel:
|
| + # If this is the first feature value to be merged, copy the dict.
|
| + # If this feature value has a more stable channel than the most stable
|
| + # channel so far, replace the old dict so that it only merges values
|
| + # from the most stable channel.
|
| + feature = copy(value)
|
| + most_stable_channel = channel
|
| + elif channel == most_stable_channel:
|
| + feature.update(value)
|
|
|
| - self._cache = compiled_fs_factory.Create(file_system, populate, type(self))
|
| + if feature is None:
|
| + # Nothing was left after filtering the values, but all dependency resolves
|
| + # were successful. This feature does not exist on |platform|.
|
| + return True, None
|
| +
|
| + # Merge in any extra values.
|
| + for value in extra_feature_values:
|
| + feature.update(value)
|
| +
|
| + # Cleanup, fill in missing fields.
|
| + if 'name' not in feature:
|
| + feature['name'] = feature_name
|
| + feature['channel'] = most_stable_channel
|
| + return True, feature
|
| +
|
| +
|
| +class _FeaturesCache(object):
|
| + def __init__(self,
|
| + file_system,
|
| + compiled_fs_factory,
|
| + json_paths,
|
| + extra_paths,
|
| + platform,
|
| + features_type):
|
| + self._cache = compiled_fs_factory.Create(
|
| + file_system, self._CreateCache, type(self), category=platform)
|
| self._text_cache = compiled_fs_factory.ForUnicode(file_system)
|
| - self._json_path = json_paths[0]
|
| - self._extra_paths = json_paths[1:]
|
| + self._json_paths = json_paths
|
| + self._extra_paths = extra_paths
|
| + self._platform = platform
|
| + self._features_type = features_type
|
|
|
| @Unicode
|
| def _CreateCache(self, _, features_json):
|
| + json_path_futures = [self._text_cache.GetFromFile(path)
|
| + for path in self._json_paths[1:]]
|
| extra_path_futures = [self._text_cache.GetFromFile(path)
|
| for path in self._extra_paths]
|
| - features = features_utility.Parse(Parse(features_json))
|
| - for path_future in extra_path_futures:
|
| - try:
|
| - extra_json = path_future.Get()
|
| - except FileNotFoundError:
|
| - # Not all file system configurations have the extra files.
|
| - continue
|
| - features = features_utility.MergedWith(
|
| - features_utility.Parse(Parse(extra_json)), features)
|
| +
|
| + features_values = _CreateFeaturesFromJSONFutures(
|
| + [Future(value=features_json)] + json_path_futures)
|
| +
|
| + extra_features_values = _CreateFeaturesFromJSONFutures(extra_path_futures)
|
| +
|
| + features = {
|
| + 'resolved': {},
|
| + 'unresolved': copy(features_values),
|
| + 'extra': extra_features_values,
|
| + 'all_names': set(features_values.keys())
|
| + }
|
| +
|
| + # Merges as many feature values as possible without resolving dependencies
|
| + # from other FeaturesCaches. Pass in a features_map with just this
|
| + # FeatureCache's features_type. Makes repeated passes until no new
|
| + # resolves are successful.
|
| + new_resolves = True
|
| + while new_resolves:
|
| + new_resolves = False
|
| + for feature_name, feature_values in features_values.iteritems():
|
| + if feature_name not in features['unresolved']:
|
| + continue
|
| + resolve_successful, feature = _ResolveFeature(
|
| + feature_name,
|
| + feature_values,
|
| + extra_features_values.get(feature_name, ()),
|
| + self._platform,
|
| + self._features_type,
|
| + {self._features_type: features})
|
| + if resolve_successful:
|
| + del features['unresolved'][feature_name]
|
| + new_resolves = True
|
| + if feature is not None:
|
| + features['resolved'][feature_name] = feature
|
| +
|
| return features
|
|
|
| def GetFeatures(self):
|
| - if self._json_path is None:
|
| + if not self._json_paths:
|
| return Future(value={})
|
| - return self._cache.GetFromFile(self._json_path)
|
| + return self._cache.GetFromFile(self._json_paths[0])
|
|
|
|
|
| class FeaturesBundle(object):
|
| '''Provides access to properties of API, Manifest, and Permission features.
|
| '''
|
| - def __init__(self, file_system, compiled_fs_factory, object_store_creator):
|
| - self._api_cache = _FeaturesCache(
|
| - file_system,
|
| - compiled_fs_factory,
|
| - _GetFeaturePaths(_API_FEATURES))
|
| - self._manifest_cache = _FeaturesCache(
|
| - file_system,
|
| - compiled_fs_factory,
|
| - _GetFeaturePaths(_MANIFEST_FEATURES,
|
| - posixpath.join(JSON_TEMPLATES, 'manifest.json')))
|
| - self._permission_cache = _FeaturesCache(
|
| - file_system,
|
| - compiled_fs_factory,
|
| - _GetFeaturePaths(_PERMISSION_FEATURES,
|
| - posixpath.join(JSON_TEMPLATES, 'permissions.json')))
|
| - self._identity = file_system.GetIdentity()
|
| + def __init__(self,
|
| + file_system,
|
| + compiled_fs_factory,
|
| + object_store_creator,
|
| + platform):
|
| + def create_features_cache(features_type, feature_file, *extra_paths):
|
| + return _FeaturesCache(
|
| + file_system,
|
| + compiled_fs_factory,
|
| + [Join(path, feature_file) for path in API_PATHS],
|
| + extra_paths,
|
| + self._platform,
|
| + features_type)
|
| +
|
| + if platform not in GetExtensionTypes():
|
| + self._platform = PlatformToExtensionType(platform)
|
| + else:
|
| + self._platform = platform
|
| +
|
| + self._caches = {
|
| + 'api': create_features_cache('api', _API_FEATURES),
|
| + 'manifest': create_features_cache(
|
| + 'manifest',
|
| + _MANIFEST_FEATURES,
|
| + Join(JSON_TEMPLATES, 'manifest.json')),
|
| + 'permission': create_features_cache(
|
| + 'permission',
|
| + _PERMISSION_FEATURES,
|
| + Join(JSON_TEMPLATES, 'permissions.json'))
|
| + }
|
| + # Namespace the object store by the file system ID because this class is
|
| + # used by the availability finder cross-channel.
|
| self._object_store = object_store_creator.Create(
|
| _FeaturesCache,
|
| - category=self._identity)
|
| + category=StringIdentity(file_system.GetIdentity(), self._platform))
|
|
|
| def GetPermissionFeatures(self):
|
| - return self._permission_cache.GetFeatures()
|
| + return self.GetFeatures('permission', ('permission',))
|
|
|
| def GetManifestFeatures(self):
|
| - return self._manifest_cache.GetFeatures()
|
| + return self.GetFeatures('manifest', ('manifest',))
|
|
|
| def GetAPIFeatures(self):
|
| - api_features = self._object_store.Get('api_features').Get()
|
| - if api_features is not None:
|
| - return Future(value=api_features)
|
| + return self.GetFeatures('api', ('api', 'manifest', 'permission'))
|
| +
|
| + def GetFeatures(self, features_type, dependencies):
|
| + '''Resolves all dependencies in the categories specified by |dependencies|.
|
| + Returns the features in the |features_type| category.
|
| + '''
|
| + features = self._object_store.Get(features_type).Get()
|
| + if features is not None:
|
| + return Future(value=features)
|
| +
|
| + futures = {}
|
| + for cache_type in dependencies:
|
| + dependency_features = self._object_store.Get(cache_type).Get()
|
| + if dependency_features is not None:
|
| + # Get cached dependencies if possible. If it has been cached, all
|
| + # of its features have been resolved, so the other fields are
|
| + # unnecessary.
|
| + futures[cache_type] = Future(value={'resolved': dependency_features})
|
| + else:
|
| + futures[cache_type] = self._caches[cache_type].GetFeatures()
|
|
|
| - api_features_future = self._api_cache.GetFeatures()
|
| - manifest_features_future = self._manifest_cache.GetFeatures()
|
| - permission_features_future = self._permission_cache.GetFeatures()
|
| def resolve():
|
| - api_features = api_features_future.Get()
|
| - manifest_features = manifest_features_future.Get()
|
| - permission_features = permission_features_future.Get()
|
| - # TODO(rockot): Handle inter-API dependencies more gracefully.
|
| - # Not yet a problem because there is only one such case (windows -> tabs).
|
| - # If we don't store this value before annotating platforms, inter-API
|
| - # dependencies will lead to infinite recursion.
|
| - for feature in api_features.itervalues():
|
| - _AddPlatformsAndChannelsFromDependencies(
|
| - feature, api_features, manifest_features, permission_features)
|
| - self._object_store.Set('api_features', api_features)
|
| - return api_features
|
| - return Future(callback=resolve)
|
| + features_map = {}
|
| + for cache_type, future in futures.iteritems():
|
| + # Copy down to features_map level because the 'resolved' and
|
| + # 'unresolved' dicts will be modified.
|
| + features_map[cache_type] = dict((c, copy(d))
|
| + for c, d in future.Get().iteritems())
|
| +
|
| + def has_unresolved():
|
| + '''Determines if there are any unresolved features left over in any
|
| + of the categories in |dependencies|.
|
| + '''
|
| + return any(cache['unresolved'] for cache in features_map.itervalues())
|
|
|
| - def GetIdentity(self):
|
| - return self._identity
|
| + # Iterate until everything is resolved. If dependencies are multiple
|
| + # levels deep, it might take multiple passes to inherit data to the
|
| + # topmost feature.
|
| + while has_unresolved():
|
| + for cache_type, cache in features_map.iteritems():
|
| + to_remove = []
|
| + for feature_name, feature_values in cache['unresolved'].iteritems():
|
| + resolve_successful, feature = _ResolveFeature(
|
| + feature_name,
|
| + feature_values,
|
| + cache['extra'].get(feature_name, ()),
|
| + self._platform,
|
| + cache_type,
|
| + features_map)
|
| + if not resolve_successful:
|
| + continue # Try again on the next iteration of the while loop
|
| +
|
| + # When successfully resolved, remove it from the unresolved dict.
|
| + # Add it to the resolved dict if it didn't get deleted.
|
| + to_remove.append(feature_name)
|
| + if feature is not None:
|
| + cache['resolved'][feature_name] = feature
|
| +
|
| + for key in to_remove:
|
| + del cache['unresolved'][key]
|
| +
|
| + for cache_type, cache in features_map.iteritems():
|
| + self._object_store.Set(cache_type, cache['resolved'])
|
| + return features_map[features_type]['resolved']
|
| +
|
| + return Future(callback=resolve)
|
|
|