Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: chrome/common/extensions/docs/server2/features_bundle.py

Issue 344453003: Docserver: separate models for apps and extensions (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 6 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright 2013 The Chromium Authors. All rights reserved. 1 # Copyright 2013 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 import posixpath 5 from copy import copy
6 6
7 from branch_utility import BranchUtility
7 from compiled_file_system import SingleFile, Unicode 8 from compiled_file_system import SingleFile, Unicode
9 from docs_server_utils import StringIdentity
8 from extensions_paths import API_PATHS, JSON_TEMPLATES 10 from extensions_paths import API_PATHS, JSON_TEMPLATES
9 import features_utility
10 from file_system import FileNotFoundError 11 from file_system import FileNotFoundError
11 from future import Future 12 from future import Future
13 from path_util import Join
14 from platform_util import GetExtensionTypes, PlatformToExtensionType
12 from third_party.json_schema_compiler.json_parse import Parse 15 from third_party.json_schema_compiler.json_parse import Parse
13 16
14 17
15 _API_FEATURES = '_api_features.json' 18 _API_FEATURES = '_api_features.json'
16 _MANIFEST_FEATURES = '_manifest_features.json' 19 _MANIFEST_FEATURES = '_manifest_features.json'
17 _PERMISSION_FEATURES = '_permission_features.json' 20 _PERMISSION_FEATURES = '_permission_features.json'
18 21
19 22
20 def _GetFeaturePaths(feature_file, *extra_paths): 23 def HasParentFeature(feature_name, feature, all_feature_names):
21 paths = [posixpath.join(api_path, feature_file) for api_path in API_PATHS] 24 # A feature has a parent if it has a . in its name, its parent exists,
22 paths.extend(extra_paths) 25 # and it does not explicitly specify that it has no parent.
23 return paths 26 return ('.' in feature_name and
24 27 feature_name.rsplit('.', 1)[0] in all_feature_names and
25 28 not feature.get('noparent'))
26 def _AddPlatformsAndChannelsFromDependencies(feature, 29
27 api_features, 30
28 manifest_features, 31 def GetParentFeature(feature_name, feature, all_feature_names):
29 permission_features): 32 '''Returns the name of the parent feature, or None if it does not have a
30 features_map = { 33 parent.
31 'api': api_features, 34 '''
32 'manifest': manifest_features, 35 if not HasParentFeature(feature_name, feature, all_feature_names):
33 'permission': permission_features, 36 return None
34 } 37 return feature_name.rsplit('.', 1)[0]
35 dependencies = feature.get('dependencies') 38
36 if dependencies is None: 39
37 return 40 def _CreateFeaturesFromJSONFutures(json_futures):
38 platforms = set() 41 '''Returns a dict of features. The value of each feature is a list with
39 channel = None 42 all of its possible values.
40 for dependency in dependencies: 43 '''
41 dep_type, dep_name = dependency.split(':') 44 def ignore_feature(name, value):
42 dependency_features = features_map[dep_type] 45 '''Returns true if this feature should be ignored. Features are ignored if
43 dependency_feature = dependency_features.get(dep_name) 46 they are only available to whitelisted apps or component extensions/apps, as
44 # If the dependency can't be resolved, it is inaccessible and therefore 47 in these cases the APIs are not available to public developers.
45 # so is this feature. 48
46 if dependency_feature is None: 49 Private APIs are also unavailable to public developers, but logic elsewhere
47 return 50 makes sure they are not listed. So they shouldn't be ignored via this
48 # Import the platforms from the dependency. The logic is a bit odd; if 51 mechanism.
49 # |feature| specifies platforms the it's considered an override. If not, 52 '''
50 # we form the union of all dependency's platforms. 53 if name.endswith('Private'):
51 # TODO(kalman): Fix this (see http://crbug.com/322094). 54 return False
52 platforms.update(dependency_feature.get('platforms', set())) 55 return value.get('location') == 'component' or 'whitelist' in value
53 # Import the channel from the dependency. 56
54 channel = dependency_feature.get('channel', channel) 57 features = {}
55 if platforms and not feature.get('platforms'): 58
56 feature['platforms'] = list(platforms) 59 for json_future in json_futures:
57 if channel and not feature.get('channel'): 60 try:
58 feature['channel'] = channel 61 features_json = Parse(json_future.Get())
62 except FileNotFoundError:
63 # Not all file system configurations have the extra files.
64 continue
65 for name, rawvalue in features_json.iteritems():
66 if name not in features:
67 features[name] = []
68 for value in (rawvalue if isinstance(rawvalue, list) else (rawvalue,)):
69 if not ignore_feature(name, value):
70 features[name].append(value)
71
72 return features
73
74
75 def _CopyParentFeatureValues(child, parent):
76 '''Takes data from feature dict |parent| and copies/merges it
77 into feature dict |child|.
78 '''
79 if parent is not None:
not at google - send to devlin 2014/06/20 22:40:28 this should really be an assert I think, since you
ahernandez 2014/06/21 00:08:20 I was thinking the same thing, but None still mana
ahernandez 2014/06/21 01:10:17 I looked into this a bit more, and the reason that
not at google - send to devlin 2014/06/23 22:45:02 ok, add comment
80 for key, val in parent.iteritems():
81 # Don't copy over the parent name or the noparent key.
82 if key not in child and key != 'name' and key != 'noparent':
not at google - send to devlin 2014/06/20 22:40:28 "if key not in child and key not in ('name', 'nopa
ahernandez 2014/06/21 00:18:00 Sometimes the children don't have a name explicitl
83 child[key] = parent[key]
84 else:
85 # If the parent has a key with a single value, we don't want to
86 # overwrite the child's value. However, list values should be merged.
not at google - send to devlin 2014/06/20 22:40:28 actually list values shouldn't be merged. any list
87 if isinstance(val, list) and isinstance(child[key], list):
88 child[key] = list(set(child[key]) | set(parent[key]))
89
not at google - send to devlin 2014/06/20 22:40:28 anyway, I'd actually rather we return a new value
90 return child
91
92
93 def _ResolveFeature(feature_name,
94 feature_values,
95 extra_feature_values,
96 platform,
97 features_type,
98 features_map):
not at google - send to devlin 2014/06/20 22:40:28 indent += 2 for these to align with the (
99 '''Filters and combines the possible values for a feature into one dict.
100
101 It uses |features_map| to resolve dependencies for each value and inherit
102 unspecified platform and channel data. |feature_values| is then filtered
103 by platform and all values with the most stable platform are merged into one
104 dict. All values in |extra_feature_values| get merged into this dict.
105
106 Returns |resolve_successful| and |feature|. |resolve_successful| is False
107 if the feature's dependencies have not been merged yet themselves, meaning
108 that this feature can not be reliably resolved yet. |feature| is the
109 resulting feature dict, or None if the feature does not exist on the
110 platform specified.
111 '''
112 feature = None
113 most_stable_channel = None
114 for value in feature_values:
115 # If 'extension_types' or 'channel' is unspecified, these values should
116 # be inherited from dependencies. If they are specified, these values
117 # should override anything specified by dependencies.
118 inherit_valid_platform = 'extension_types' not in value
119 if inherit_valid_platform:
120 valid_platform = None
121 else:
122 valid_platform = (value['extension_types'] == 'all' or
123 platform in value['extension_types'])
124 inherit_channel = 'channel' not in value
125 channel = value.get('channel')
126
127 dependencies = value.get('dependencies', [])
128 parent = GetParentFeature(
129 feature_name, value, features_map[features_type]['all_names'])
130 if parent is not None:
131 # The parent data needs to be resolved so the child can inherit it.
132 if parent in features_map[features_type].get('unresolved', ()):
133 return False, None
134 value = _CopyParentFeatureValues(
135 value, features_map[features_type]['resolved'].get(parent))
136 # Add the parent as a dependency to ensure proper platform filtering.
137 dependencies.append(features_type + ':' + parent)
138
139 for dependency in dependencies:
140 dep_type, dep_name = dependency.split(':')
141 if (dep_type not in features_map or
142 dep_name in features_map[dep_type].get('unresolved', ())):
143 # The dependency itself has not been merged yet or the features map
144 # does not have the needed data. Fail to resolve.
145 return False, None
146
147 dep = features_map[dep_type]['resolved'].get(dep_name)
148 if inherit_valid_platform and (valid_platform is None or valid_platform):
149 # If dep is None, the dependency does not exist because it has been
150 # filtered out by platform. This feature value does not explicitly
151 # specify platform data, so filter this feature value out.
152 # Only run this check if valid_platform is True or None so that it
153 # can't be reset once it is False.
154 valid_platform = dep is not None
155 if inherit_channel and dep and 'channel' in dep:
156 if channel is None or BranchUtility.NewestChannel(
157 (dep['channel'], channel)) != channel:
158 # Inherit the least stable channel from the dependencies.
159 channel = dep['channel']
160
161 # Default to stable on all platforms.
162 if valid_platform is None:
163 valid_platform = True
164 if valid_platform and channel is None:
165 channel = 'stable'
166
167 if valid_platform:
168 # The feature value is valid. Merge it into the feature dict.
169 if feature is None or BranchUtility.NewestChannel(
170 (most_stable_channel, channel)) != channel:
171 # If this is the first feature value to be merged, copy the dict.
172 # If this feature value has a more stable channel than the most stable
173 # channel so far, replace the old dict so that it only merges values
174 # from the most stable channel.
175 feature = copy(value)
176 most_stable_channel = channel
177 elif channel == most_stable_channel:
178 feature.update(value)
179
180 if feature is None:
181 # Nothing was left after filtering the values, but all dependency resolves
182 # were successful. This feature does not exist on |platform|.
183 return True, None
184
185 # Merge in any extra values.
186 for value in extra_feature_values:
187 feature.update(value)
188
189 # Cleanup, fill in missing fields.
190 if 'name' not in feature:
191 feature['name'] = feature_name
192 feature['channel'] = most_stable_channel
193 return True, feature
59 194
60 195
61 class _FeaturesCache(object): 196 class _FeaturesCache(object):
62 def __init__(self, file_system, compiled_fs_factory, json_paths): 197 def __init__(self,
63 populate = self._CreateCache 198 file_system,
64 if len(json_paths) == 1: 199 compiled_fs_factory,
65 populate = SingleFile(populate) 200 json_paths,
66 201 extra_paths,
67 self._cache = compiled_fs_factory.Create(file_system, populate, type(self)) 202 platform,
203 features_type):
204 self._cache = compiled_fs_factory.Create(
205 file_system, self._CreateCache, type(self), category=platform)
68 self._text_cache = compiled_fs_factory.ForUnicode(file_system) 206 self._text_cache = compiled_fs_factory.ForUnicode(file_system)
69 self._json_path = json_paths[0] 207 self._json_paths = json_paths
70 self._extra_paths = json_paths[1:] 208 self._extra_paths = extra_paths
209 self._platform = platform
210 self._features_type = features_type
71 211
72 @Unicode 212 @Unicode
73 def _CreateCache(self, _, features_json): 213 def _CreateCache(self, _, features_json):
214 json_path_futures = [self._text_cache.GetFromFile(path)
215 for path in self._json_paths[1:]]
74 extra_path_futures = [self._text_cache.GetFromFile(path) 216 extra_path_futures = [self._text_cache.GetFromFile(path)
75 for path in self._extra_paths] 217 for path in self._extra_paths]
76 features = features_utility.Parse(Parse(features_json)) 218
77 for path_future in extra_path_futures: 219 features_values = _CreateFeaturesFromJSONFutures(
78 try: 220 [Future(value=features_json)] + json_path_futures)
79 extra_json = path_future.Get() 221
80 except FileNotFoundError: 222 extra_features_values = _CreateFeaturesFromJSONFutures(extra_path_futures)
81 # Not all file system configurations have the extra files. 223
82 continue 224 features = {
83 features = features_utility.MergedWith( 225 'resolved': {},
84 features_utility.Parse(Parse(extra_json)), features) 226 'unresolved': copy(features_values),
227 'extra': extra_features_values,
228 'all_names': set(features_values.keys())
229 }
230
231 # Merges as many feature values as possible without resolving dependencies
232 # from other FeaturesCaches. Pass in a features_map with just this
233 # FeatureCache's features_type. Makes repeated passes until no new
234 # resolves are successful.
235 new_resolves = True
236 while new_resolves:
237 new_resolves = False
238 for feature_name, feature_values in features_values.iteritems():
239 if feature_name not in features['unresolved']:
240 continue
241 resolve_successful, feature = _ResolveFeature(
242 feature_name,
243 feature_values,
244 extra_features_values.get(feature_name, ()),
245 self._platform,
246 self._features_type,
247 {self._features_type: features})
248 if resolve_successful:
249 del features['unresolved'][feature_name]
250 new_resolves = True
251 if feature is not None:
252 features['resolved'][feature_name] = feature
253
85 return features 254 return features
86 255
87 def GetFeatures(self): 256 def GetFeatures(self):
88 if self._json_path is None: 257 if not self._json_paths:
89 return Future(value={}) 258 return Future(value={})
90 return self._cache.GetFromFile(self._json_path) 259 return self._cache.GetFromFile(self._json_paths[0])
91 260
92 261
93 class FeaturesBundle(object): 262 class FeaturesBundle(object):
94 '''Provides access to properties of API, Manifest, and Permission features. 263 '''Provides access to properties of API, Manifest, and Permission features.
95 ''' 264 '''
96 def __init__(self, file_system, compiled_fs_factory, object_store_creator): 265 def __init__(self,
97 self._api_cache = _FeaturesCache( 266 file_system,
98 file_system, 267 compiled_fs_factory,
99 compiled_fs_factory, 268 object_store_creator,
100 _GetFeaturePaths(_API_FEATURES)) 269 platform):
101 self._manifest_cache = _FeaturesCache( 270 def create_features_cache(features_type, feature_file, *extra_paths):
102 file_system, 271 return _FeaturesCache(
103 compiled_fs_factory, 272 file_system,
104 _GetFeaturePaths(_MANIFEST_FEATURES, 273 compiled_fs_factory,
105 posixpath.join(JSON_TEMPLATES, 'manifest.json'))) 274 [Join(path, feature_file) for path in API_PATHS],
106 self._permission_cache = _FeaturesCache( 275 extra_paths,
107 file_system, 276 self._platform,
108 compiled_fs_factory, 277 features_type)
109 _GetFeaturePaths(_PERMISSION_FEATURES, 278
110 posixpath.join(JSON_TEMPLATES, 'permissions.json'))) 279 if platform not in GetExtensionTypes():
111 self._identity = file_system.GetIdentity() 280 self._platform = PlatformToExtensionType(platform)
281 else:
282 self._platform = platform
283
284 self._caches = {
285 'api': create_features_cache('api', _API_FEATURES),
286 'manifest': create_features_cache(
287 'manifest',
288 _MANIFEST_FEATURES,
289 Join(JSON_TEMPLATES, 'manifest.json')),
290 'permission': create_features_cache(
291 'permission',
292 _PERMISSION_FEATURES,
293 Join(JSON_TEMPLATES, 'permissions.json'))
294 }
295 # Namespace the object store by the file system ID because this class is
296 # used by the availability finder cross-channel.
112 self._object_store = object_store_creator.Create( 297 self._object_store = object_store_creator.Create(
113 _FeaturesCache, 298 _FeaturesCache,
114 category=self._identity) 299 category=StringIdentity(file_system.GetIdentity(), self._platform))
115 300
116 def GetPermissionFeatures(self): 301 def GetPermissionFeatures(self):
117 return self._permission_cache.GetFeatures() 302 return self.GetFeatures('permission', ('permission',))
118 303
119 def GetManifestFeatures(self): 304 def GetManifestFeatures(self):
120 return self._manifest_cache.GetFeatures() 305 return self.GetFeatures('manifest', ('manifest',))
121 306
122 def GetAPIFeatures(self): 307 def GetAPIFeatures(self):
123 api_features = self._object_store.Get('api_features').Get() 308 return self.GetFeatures('api', ('api', 'manifest', 'permission'))
124 if api_features is not None: 309
125 return Future(value=api_features) 310 def GetFeatures(self, features_type, dependencies):
126 311 '''Resolves all dependencies in the categories specified by |dependencies|.
127 api_features_future = self._api_cache.GetFeatures() 312 Returns the features in the |features_type| category.
128 manifest_features_future = self._manifest_cache.GetFeatures() 313 '''
129 permission_features_future = self._permission_cache.GetFeatures() 314 features = self._object_store.Get(features_type).Get()
315 if features is not None:
316 return Future(value=features)
317
318 futures = {}
319 for cache_type in dependencies:
320 dependency_features = self._object_store.Get(cache_type).Get()
321 if dependency_features is not None:
322 # Get cached dependencies if possible. If it has been cached, all
323 # of its features have been resolved, so the other fields are
324 # unnecessary.
325 futures[cache_type] = Future(value={'resolved': dependency_features})
326 else:
327 futures[cache_type] = self._caches[cache_type].GetFeatures()
328
130 def resolve(): 329 def resolve():
131 api_features = api_features_future.Get() 330 features_map = {}
132 manifest_features = manifest_features_future.Get() 331 for cache_type, future in futures.iteritems():
133 permission_features = permission_features_future.Get() 332 # Copy down to features_map level because the 'resolved' and
134 # TODO(rockot): Handle inter-API dependencies more gracefully. 333 # 'unresolved' dicts will be modified.
135 # Not yet a problem because there is only one such case (windows -> tabs). 334 features_map[cache_type] = dict((c, copy(d))
136 # If we don't store this value before annotating platforms, inter-API 335 for c, d in future.Get().iteritems())
137 # dependencies will lead to infinite recursion. 336
138 for feature in api_features.itervalues(): 337 def has_unresolved():
139 _AddPlatformsAndChannelsFromDependencies( 338 '''Determines if there are any unresolved features left over in any
140 feature, api_features, manifest_features, permission_features) 339 of the categories in |dependencies|.
141 self._object_store.Set('api_features', api_features) 340 '''
142 return api_features 341 return any(cache['unresolved'] for cache in features_map.itervalues())
342
343 # Iterate until everything is resolved. If dependencies are multiple
344 # levels deep, it might take multiple passes to inherit data to the
345 # topmost feature.
346 while has_unresolved():
347 for cache_type, cache in features_map.iteritems():
348 to_remove = []
349 for feature_name, feature_values in cache['unresolved'].iteritems():
350 resolve_successful, feature = _ResolveFeature(
351 feature_name,
352 feature_values,
353 cache['extra'].get(feature_name, ()),
354 self._platform,
355 cache_type,
356 features_map)
357 if not resolve_successful:
358 continue # Try again on the next iteration of the while loop
359
360 # When successfully resolved, remove it from the unresolved dict.
361 # Add it to the resolved dict if it didn't get deleted.
362 to_remove.append(feature_name)
363 if feature is not None:
364 cache['resolved'][feature_name] = feature
365
366 for key in to_remove:
367 del cache['unresolved'][key]
368
369 for cache_type, cache in features_map.iteritems():
370 self._object_store.Set(cache_type, cache['resolved'])
371 return features_map[features_type]['resolved']
372
143 return Future(callback=resolve) 373 return Future(callback=resolve)
144
145 def GetIdentity(self):
146 return self._identity
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698