Index: chrome/common/extensions/docs/server2/fake_fetchers.py |
diff --git a/chrome/common/extensions/docs/server2/fake_fetchers.py b/chrome/common/extensions/docs/server2/fake_fetchers.py |
index 38625f1f3a526860f18a5e8e90bd12f69929761b..ea30b83d79ffa28dbfb98cc05710b2a01972e918 100644 |
--- a/chrome/common/extensions/docs/server2/fake_fetchers.py |
+++ b/chrome/common/extensions/docs/server2/fake_fetchers.py |
@@ -3,15 +3,15 @@ |
# found in the LICENSE file. |
# These are fake fetchers that are used for testing and the preview server. |
-# They return canned responses for URLs. appengine_wrappers.py uses the fake |
-# fetchers if the App Engine imports fail. |
+# They return canned responses for URLs. url_fetcher_fake.py uses the fake |
+# fetchers if other URL fetching APIs are unavailable. |
import base64 |
import json |
import os |
import re |
-import appengine_wrappers |
+import url_fetcher_fake |
from extensions_paths import SERVER2 |
from path_util import IsDirectory |
from test_util import ReadFile, ChromiumPath |
@@ -77,48 +77,6 @@ class _FakeSubversionServer(_FakeFetcher): |
return None |
-_GITILES_BASE_RE = re.escape('%s/%s' % |
- (url_constants.GITILES_BASE, url_constants.GITILES_SRC_ROOT)) |
-_GITILES_BRANCH_BASE_RE = re.escape('%s/%s/%s' % |
- (url_constants.GITILES_BASE, |
- url_constants.GITILES_SRC_ROOT, |
- url_constants.GITILES_BRANCHES_PATH)) |
-# NOTE: _GITILES_BRANCH_BASE_RE must be first, because _GITILES_BASE_RE is |
-# a more general pattern. |
-_GITILES_URL_RE = r'(%s|%s)/' % (_GITILES_BRANCH_BASE_RE, _GITILES_BASE_RE) |
-_GITILES_URL_TO_COMMIT_PATTERN = re.compile(r'%s[^/]+$' % _GITILES_URL_RE) |
-_GITILES_URL_TO_PATH_PATTERN = re.compile(r'%s.+?/(.*)' % _GITILES_URL_RE) |
-def _ExtractPathFromGitilesUrl(url): |
- return _GITILES_URL_TO_PATH_PATTERN.match(url).group(2) |
- |
- |
-class _FakeGitilesServer(_FakeFetcher): |
- def fetch(self, url): |
- if _GITILES_URL_TO_COMMIT_PATTERN.match(url) is not None: |
- return json.dumps({'commit': '1' * 40}) |
- path = _ExtractPathFromGitilesUrl(url) |
- chromium_path = ChromiumPath(path) |
- if self._IsDir(chromium_path): |
- jsn = {} |
- dir_stat = self._Stat(chromium_path) |
- jsn['id'] = dir_stat |
- jsn['entries'] = [] |
- for f in self._ListDir(chromium_path): |
- if f.startswith('.'): |
- continue |
- f_path = os.path.join(chromium_path, f) |
- jsn['entries'].append({ |
- 'id': self._Stat(f_path), |
- 'name': f, |
- 'type': 'tree' if self._IsDir(f_path) else 'blob' |
- }) |
- return json.dumps(jsn) |
- try: |
- return base64.b64encode(ReadFile(path)) |
- except IOError: |
- return None |
- |
- |
class _FakeViewvcServer(_FakeFetcher): |
def fetch(self, url): |
path = ChromiumPath(_ExtractPathFromSvnUrl(url)) |
@@ -181,7 +139,7 @@ class _FakeRietveldTarball(_FakeFetcher): |
def ConfigureFakeFetchers(): |
'''Configure the fake fetcher paths relative to the docs directory. |
''' |
- appengine_wrappers.ConfigureFakeUrlFetch({ |
+ url_fetcher_fake.ConfigureFakeUrlFetch({ |
url_constants.OMAHA_HISTORY: _FakeOmahaHistory(), |
url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(), |
'%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(), |
@@ -190,6 +148,4 @@ def ConfigureFakeFetchers(): |
'%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(), |
'%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(), |
'%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(), |
- '%s/.*' % _GITILES_BASE_RE: _FakeGitilesServer(), |
- '%s/.*' % _GITILES_BRANCH_BASE_RE: _FakeGitilesServer() |
}) |