Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(74)

Side by Side Diff: chrome/common/extensions/docs/server2/fake_fetchers.py

Issue 12996003: Dynamically generate a heading for Extension Docs API pages (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Addressing comments - Patch currently being broken up Created 7 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be 2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file. 3 # found in the LICENSE file.
4 4
5 # These are fake fetchers that are used for testing and the preview server. 5 # These are fake fetchers that are used for testing and the preview server.
6 # They return canned responses for URLs. appengine_wrappers.py uses the fake 6 # They return canned responses for URLs. appengine_wrappers.py uses the fake
7 # fetchers if the App Engine imports fail. 7 # fetchers if the App Engine imports fail.
8 8
9 import os 9 import os
10 import re 10 import re
11 import sys 11 import sys
12 12
13 import appengine_wrappers 13 import appengine_wrappers
14 from file_system import FileNotFoundError 14 from file_system import FileNotFoundError
15 import svn_constants
15 import url_constants 16 import url_constants
16 17
17 class _FakeFetcher(object): 18 class _FakeFetcher(object):
18 def __init__(self, base_path): 19 def __init__(self, base_path):
19 self._base_path = base_path 20 self._base_path = base_path
20 21
21 def _ReadFile(self, path, mode='rb'): 22 def _ReadFile(self, path, mode='rb'):
22 with open(os.path.join(self._base_path, path), mode) as f: 23 with open(os.path.join(self._base_path, path), mode) as f:
23 return f.read() 24 return f.read()
24 25
25 def _ListDir(self, path): 26 def _ListDir(self, path):
26 return os.listdir(os.path.join(self._base_path, path)) 27 return os.listdir(os.path.join(self._base_path, path))
27 28
28 def _IsDir(self, path): 29 def _IsDir(self, path):
29 return os.path.isdir(os.path.join(self._base_path, path)) 30 return os.path.isdir(os.path.join(self._base_path, path))
30 31
31 def _Stat(self, path): 32 def _Stat(self, path):
32 return int(os.stat(os.path.join(self._base_path, path)).st_mtime) 33 return int(os.stat(os.path.join(self._base_path, path)).st_mtime)
33 34
34 class FakeOmahaProxy(_FakeFetcher): 35 class FakeOmahaProxy(_FakeFetcher):
35 def fetch(self, url): 36 def fetch(self, url):
36 return self._ReadFile(os.path.join('server2', 37 return self._ReadFile(os.path.join('server2',
37 'test_data', 38 'test_data',
38 'branch_utility', 39 'branch_utility',
39 'first.json')) 40 'first.json'))
40 41
42 class FakeOmahaHistory(_FakeFetcher):
43 def fetch(self, url):
44 return self._ReadFile(os.path.join('server2',
45 'test_data',
46 'branch_utility',
47 'second.json'))
48
41 class FakeSubversionServer(_FakeFetcher): 49 class FakeSubversionServer(_FakeFetcher):
42 def __init__(self, base_path): 50 def __init__(self, base_path):
43 _FakeFetcher.__init__(self, base_path) 51 _FakeFetcher.__init__(self, base_path)
44 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)') 52 self._base_pattern = re.compile(r'.*chrome/common/extensions/(.*)')
45 53
46 def fetch(self, url): 54 def fetch(self, url):
47 url = url.rsplit('?', 1)[0] 55 url = url.rsplit('?', 1)[0]
48 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1)) 56 path = os.path.join(os.pardir, self._base_pattern.match(url).group(1))
49 if self._IsDir(path): 57 if self._IsDir(path):
50 html = ['<html>Revision 000000'] 58 html = ['<html>Revision 000000']
(...skipping 86 matching lines...) Expand 10 before | Expand all | Expand 10 after
137 self._base_pattern.match(url).group(1) + '.tar.bz2')) 145 self._base_pattern.match(url).group(1) + '.tar.bz2'))
138 except IOError: 146 except IOError:
139 return None 147 return None
140 148
141 def ConfigureFakeFetchers(): 149 def ConfigureFakeFetchers():
142 '''Configure the fake fetcher paths relative to the docs directory. 150 '''Configure the fake fetcher paths relative to the docs directory.
143 ''' 151 '''
144 docs = '/'.join((sys.path[0], os.pardir)) 152 docs = '/'.join((sys.path[0], os.pardir))
145 appengine_wrappers.ConfigureFakeUrlFetch({ 153 appengine_wrappers.ConfigureFakeUrlFetch({
146 url_constants.OMAHA_PROXY_URL: FakeOmahaProxy(docs), 154 url_constants.OMAHA_PROXY_URL: FakeOmahaProxy(docs),
155 re.escape(url_constants.OMAHA_DEV_HISTORY): FakeOmahaHistory(docs),
147 '%s/.*' % url_constants.SVN_URL: FakeSubversionServer(docs), 156 '%s/.*' % url_constants.SVN_URL: FakeSubversionServer(docs),
148 '%s/.*' % url_constants.VIEWVC_URL: FakeViewvcServer(docs), 157 '%s/.*' % url_constants.VIEWVC_URL: FakeViewvcServer(docs),
149 '%s/commits/.*' % url_constants.GITHUB_URL: FakeGithubStat(docs), 158 '%s/commits/.*' % url_constants.GITHUB_URL: FakeGithubStat(docs),
150 '%s/zipball' % url_constants.GITHUB_URL: FakeGithubZip(docs), 159 '%s/zipball' % url_constants.GITHUB_URL: FakeGithubZip(docs),
151 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: FakeRietveldAPI(docs), 160 '%s/api/.*' % url_constants.CODEREVIEW_SERVER: FakeRietveldAPI(docs),
152 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER: 161 '%s/tarball/.*' % url_constants.CODEREVIEW_SERVER:
153 FakeRietveldTarball(docs), 162 FakeRietveldTarball(docs),
154 }) 163 })
OLDNEW
« no previous file with comments | « chrome/common/extensions/docs/server2/cron_servlet_test.py ('k') | chrome/common/extensions/docs/server2/instance_servlet.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698