OLD | NEW |
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 # These are fake fetchers that are used for testing and the preview server. | 5 # These are fake fetchers that are used for testing and the preview server. |
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake | 6 # They return canned responses for URLs. url_fetcher_fake.py uses the fake |
7 # fetchers if the App Engine imports fail. | 7 # fetchers if other URL fetching APIs are unavailable. |
8 | 8 |
9 import base64 | 9 import base64 |
10 import json | 10 import json |
11 import os | 11 import os |
12 import re | 12 import re |
13 | 13 |
14 import appengine_wrappers | 14 import url_fetcher_fake |
15 from extensions_paths import SERVER2 | 15 from extensions_paths import SERVER2 |
16 from path_util import IsDirectory | 16 from path_util import IsDirectory |
17 from test_util import ReadFile, ChromiumPath | 17 from test_util import ReadFile, ChromiumPath |
18 import url_constants | 18 import url_constants |
19 | 19 |
20 | 20 |
21 # TODO(kalman): Investigate why logging in this class implies that the server | 21 # TODO(kalman): Investigate why logging in this class implies that the server |
22 # isn't properly caching some fetched files; often it fetches the same file | 22 # isn't properly caching some fetched files; often it fetches the same file |
23 # 10+ times. This may be a test anomaly. | 23 # 10+ times. This may be a test anomaly. |
24 | 24 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
70 html.append('</html>') | 70 html.append('</html>') |
71 return '\n'.join(html) | 71 return '\n'.join(html) |
72 except OSError as e: | 72 except OSError as e: |
73 return None | 73 return None |
74 try: | 74 try: |
75 return ReadFile(path) | 75 return ReadFile(path) |
76 except IOError: | 76 except IOError: |
77 return None | 77 return None |
78 | 78 |
79 | 79 |
80 _GITILES_BASE_RE = re.escape('%s/%s' % | |
81 (url_constants.GITILES_BASE, url_constants.GITILES_SRC_ROOT)) | |
82 _GITILES_BRANCH_BASE_RE = re.escape('%s/%s/%s' % | |
83 (url_constants.GITILES_BASE, | |
84 url_constants.GITILES_SRC_ROOT, | |
85 url_constants.GITILES_BRANCHES_PATH)) | |
86 # NOTE: _GITILES_BRANCH_BASE_RE must be first, because _GITILES_BASE_RE is | |
87 # a more general pattern. | |
88 _GITILES_URL_RE = r'(%s|%s)/' % (_GITILES_BRANCH_BASE_RE, _GITILES_BASE_RE) | |
89 _GITILES_URL_TO_COMMIT_PATTERN = re.compile(r'%s[^/]+$' % _GITILES_URL_RE) | |
90 _GITILES_URL_TO_PATH_PATTERN = re.compile(r'%s.+?/(.*)' % _GITILES_URL_RE) | |
91 def _ExtractPathFromGitilesUrl(url): | |
92 return _GITILES_URL_TO_PATH_PATTERN.match(url).group(2) | |
93 | |
94 | |
95 class _FakeGitilesServer(_FakeFetcher): | |
96 def fetch(self, url): | |
97 if _GITILES_URL_TO_COMMIT_PATTERN.match(url) is not None: | |
98 return json.dumps({'commit': '1' * 40}) | |
99 path = _ExtractPathFromGitilesUrl(url) | |
100 chromium_path = ChromiumPath(path) | |
101 if self._IsDir(chromium_path): | |
102 jsn = {} | |
103 dir_stat = self._Stat(chromium_path) | |
104 jsn['id'] = dir_stat | |
105 jsn['entries'] = [] | |
106 for f in self._ListDir(chromium_path): | |
107 if f.startswith('.'): | |
108 continue | |
109 f_path = os.path.join(chromium_path, f) | |
110 jsn['entries'].append({ | |
111 'id': self._Stat(f_path), | |
112 'name': f, | |
113 'type': 'tree' if self._IsDir(f_path) else 'blob' | |
114 }) | |
115 return json.dumps(jsn) | |
116 try: | |
117 return base64.b64encode(ReadFile(path)) | |
118 except IOError: | |
119 return None | |
120 | |
121 | |
122 class _FakeViewvcServer(_FakeFetcher): | 80 class _FakeViewvcServer(_FakeFetcher): |
123 def fetch(self, url): | 81 def fetch(self, url): |
124 path = ChromiumPath(_ExtractPathFromSvnUrl(url)) | 82 path = ChromiumPath(_ExtractPathFromSvnUrl(url)) |
125 if self._IsDir(path): | 83 if self._IsDir(path): |
126 html = ['<table><tbody><tr>...</tr>'] | 84 html = ['<table><tbody><tr>...</tr>'] |
127 # The version of the directory. | 85 # The version of the directory. |
128 dir_stat = self._Stat(path) | 86 dir_stat = self._Stat(path) |
129 html.append('<tr>') | 87 html.append('<tr>') |
130 html.append('<td>Directory revision:</td>') | 88 html.append('<td>Directory revision:</td>') |
131 html.append('<td><a>%s</a><a></a></td>' % dir_stat) | 89 html.append('<td><a>%s</a><a></a></td>' % dir_stat) |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
174 | 132 |
175 def fetch(self, url): | 133 def fetch(self, url): |
176 return _ReadTestData( | 134 return _ReadTestData( |
177 'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2', | 135 'rietveld_patcher', self._base_pattern.match(url).group(1) + '.tar.bz2', |
178 mode='rb') | 136 mode='rb') |
179 | 137 |
180 | 138 |
181 def ConfigureFakeFetchers(): | 139 def ConfigureFakeFetchers(): |
182 '''Configure the fake fetcher paths relative to the docs directory. | 140 '''Configure the fake fetcher paths relative to the docs directory. |
183 ''' | 141 ''' |
184 appengine_wrappers.ConfigureFakeUrlFetch({ | 142 url_fetcher_fake.ConfigureFakeUrlFetch({ |
185 url_constants.OMAHA_HISTORY: _FakeOmahaHistory(), | 143 url_constants.OMAHA_HISTORY: _FakeOmahaHistory(), |
186 url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(), | 144 url_constants.OMAHA_PROXY_URL: _FakeOmahaProxy(), |
187 '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(), | 145 '%s/.*' % url_constants.SVN_URL: _FakeSubversionServer(), |
188 '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(), | 146 '%s/.*' % url_constants.VIEWVC_URL: _FakeViewvcServer(), |
189 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(), | 147 '%s/.*/commits/.*' % url_constants.GITHUB_REPOS: _FakeGithubStat(), |
190 '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(), | 148 '%s/.*/zipball' % url_constants.GITHUB_REPOS: _FakeGithubZip(), |
191 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(), | 149 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldAPI(), |
192 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(), | 150 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: _FakeRietveldTarball(), |
193 '%s/.*' % _GITILES_BASE_RE: _FakeGitilesServer(), | |
194 '%s/.*' % _GITILES_BRANCH_BASE_RE: _FakeGitilesServer() | |
195 }) | 151 }) |
OLD | NEW |