Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(8612)

Unified Diff: chrome/common/extensions/docs/server2/content_provider.py

Issue 429723005: Docserver: Only fetch content versions in the crons, not their contents. Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: rebase Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/common/extensions/docs/server2/content_provider.py
diff --git a/chrome/common/extensions/docs/server2/content_provider.py b/chrome/common/extensions/docs/server2/content_provider.py
index e04dc7f4489a23ed207bbfb6dbf4049399f3ce88..f12f923937c90653ec6d145110f3170e11e5923d 100644
--- a/chrome/common/extensions/docs/server2/content_provider.py
+++ b/chrome/common/extensions/docs/server2/content_provider.py
@@ -197,16 +197,36 @@ class ContentProvider(object):
.Then(lambda found: found or path))
def Cron(self):
- futures = [self._path_canonicalizer.Cron()]
- for root, _, files in self.file_system.Walk(''):
- for f in files:
- futures.append(self.GetContentAndType(Join(root, f)))
- # Also cache the extension-less version of the file if needed.
- base, ext = posixpath.splitext(f)
- if f != SITE_VERIFICATION_FILE and ext in self._default_extensions:
- futures.append(self.GetContentAndType(Join(root, base)))
- # TODO(kalman): Cache .zip files for each directory (if supported).
- return All(futures, except_pass=Exception, except_pass_log=True)
+ def map_cron_paths(op):
+ results = []
+ for root, _, files in self.file_system.Walk(''):
+ for f in files:
+ results.append(op(Join(root, f)))
+ # Also cache the extension-less version of the file if needed.
+ base, ext = posixpath.splitext(f)
+ if f != SITE_VERIFICATION_FILE and ext in self._default_extensions:
+ results.append(op(Join(root, base)))
+ # TODO(kalman): Cache .zip files for each directory (if supported).
+ return results
+
+ # XXX(kalman): Need to do this stuff in APIModels as well - basically
+ # anywhere the _patch logic has been implemented. Err, sort of.
+ # Has it been implemented in here? Does it need to be?
+
+ # Immediately stat everything so that files are guaranteed to be eventually
+ # up to date. See http://crbug.com/398042 for background.
+ All(map_cron_paths(self.GetVersion)).Get()
+
+ # Update content in the future.
+ futures = [('<path_canonicalizer>', # semi-arbitrary string since there is
+ # no path associated with this Future.
+ self._path_canonicalizer.Cron())]
+ futures += map_cron_paths(lambda path: (path, self.GetContentAndType(path)))
+ def resolve():
+ for label, future in futures:
+ try: future.Get()
+ except: logging.error('%s: %s' % (label, traceback.format_exc()))
+ return Future(callback=resolve)
def __repr__(self):
return 'ContentProvider of <%s>' % repr(self.file_system)
« no previous file with comments | « chrome/common/extensions/docs/server2/compiled_file_system.py ('k') | chrome/common/extensions/docs/server2/cron.yaml » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698