OLD | NEW |
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 # These are fake fetchers that are used for testing and the preview server. | 5 # These are fake fetchers that are used for testing and the preview server. |
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake | 6 # They return canned responses for URLs. appengine_wrappers.py uses the fake |
7 # fetchers if the App Engine imports fail. | 7 # fetchers if the App Engine imports fail. |
8 | 8 |
9 import os | 9 import os |
10 import re | 10 import re |
(...skipping 26 matching lines...) Expand all Loading... |
37 'test_data', | 37 'test_data', |
38 'branch_utility', | 38 'branch_utility', |
39 'first.json')) | 39 'first.json')) |
40 | 40 |
41 class FakeSubversionServer(_FakeFetcher): | 41 class FakeSubversionServer(_FakeFetcher): |
42 def __init__(self, base_path): | 42 def __init__(self, base_path): |
43 _FakeFetcher.__init__(self, base_path) | 43 _FakeFetcher.__init__(self, base_path) |
44 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') | 44 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') |
45 | 45 |
46 def fetch(self, url): | 46 def fetch(self, url): |
| 47 url = url.rsplit('?', 1)[0] |
47 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) | 48 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) |
48 if self._IsDir(path): | 49 if self._IsDir(path): |
49 html = ['<html>Revision 000000'] | 50 html = ['<html>Revision 000000'] |
50 try: | 51 try: |
51 for f in self._ListDir(path): | 52 for f in self._ListDir(path): |
52 if f.startswith('.'): | 53 if f.startswith('.'): |
53 continue | 54 continue |
54 if self._IsDir(os.path.join(path, f)): | 55 if self._IsDir(os.path.join(path, f)): |
55 html.append('<a>' + f + '/</a>') | 56 html.append('<a>' + f + '/</a>') |
56 else: | 57 else: |
57 html.append('<a>' + f + '</a>') | 58 html.append('<a>' + f + '</a>') |
58 html.append('</html>') | 59 html.append('</html>') |
59 return '\n'.join(html) | 60 return '\n'.join(html) |
60 except OSError as e: | 61 except OSError as e: |
61 raise FileNotFoundError('Listing %s failed: %s' (path, e)) | 62 raise FileNotFoundError('Listing %s failed: %s' (path, e)) |
62 try: | 63 try: |
63 return self._ReadFile(path) | 64 return self._ReadFile(path) |
64 except IOError as e: | 65 except IOError as e: |
65 raise FileNotFoundError('Reading %s failed: %s' % (path, e)) | 66 raise FileNotFoundError('Reading %s failed: %s' % (path, e)) |
66 | 67 |
67 class FakeViewvcServer(_FakeFetcher): | 68 class FakeViewvcServer(_FakeFetcher): |
68 def __init__(self, base_path): | 69 def __init__(self, base_path): |
69 _FakeFetcher.__init__(self, base_path) | 70 _FakeFetcher.__init__(self, base_path) |
70 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') | 71 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') |
71 | 72 |
72 def fetch(self, url): | 73 def fetch(self, url): |
| 74 url = url.rsplit('?', 1)[0] |
73 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) | 75 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) |
74 if self._IsDir(path): | 76 if self._IsDir(path): |
75 html = ['<table><tbody><tr>...</tr>'] | 77 html = ['<table><tbody><tr>...</tr>'] |
76 for f in self._ListDir(path): | 78 for f in self._ListDir(path): |
77 if f.startswith('.'): | 79 if f.startswith('.'): |
78 continue | 80 continue |
79 html.append('<tr>') | 81 html.append('<tr>') |
80 html.append(' <td><a>%s%s</a></td>' % ( | 82 html.append(' <td><a>%s%s</a></td>' % ( |
81 f, '/' if self._IsDir(os.path.join(path, f)) else '')) | 83 f, '/' if self._IsDir(os.path.join(path, f)) else '')) |
82 stat = self._Stat(os.path.join(path, f)) | 84 stat = self._Stat(os.path.join(path, f)) |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
115 docs = '/'.join((sys.path[0], os.pardir)) | 117 docs = '/'.join((sys.path[0], os.pardir)) |
116 appengine_wrappers.ConfigureFakeUrlFetch({ | 118 appengine_wrappers.ConfigureFakeUrlFetch({ |
117 url_constants.OMAHA_PROXY_URL: FakeOmahaProxy(docs), | 119 url_constants.OMAHA_PROXY_URL: FakeOmahaProxy(docs), |
118 '%s/.*' % url_constants.SVN_URL: FakeSubversionServer(docs), | 120 '%s/.*' % url_constants.SVN_URL: FakeSubversionServer(docs), |
119 '%s/.*' % url_constants.VIEWVC_URL: FakeViewvcServer(docs), | 121 '%s/.*' % url_constants.VIEWVC_URL: FakeViewvcServer(docs), |
120 '%s/commits/.*' % url_constants.GITHUB_URL: FakeGithubStat(docs), | 122 '%s/commits/.*' % url_constants.GITHUB_URL: FakeGithubStat(docs), |
121 '%s/zipball' % url_constants.GITHUB_URL: FakeGithubZip(docs), | 123 '%s/zipball' % url_constants.GITHUB_URL: FakeGithubZip(docs), |
122 re.escape(url_constants.OPEN_ISSUES_CSV_URL): FakeIssuesFetcher(docs), | 124 re.escape(url_constants.OPEN_ISSUES_CSV_URL): FakeIssuesFetcher(docs), |
123 re.escape(url_constants.CLOSED_ISSUES_CSV_URL): FakeIssuesFetcher(docs) | 125 re.escape(url_constants.CLOSED_ISSUES_CSV_URL): FakeIssuesFetcher(docs) |
124 }) | 126 }) |
OLD | NEW |