OLD | NEW |
---|---|
1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 # Use of this source code is governed by a BSD-style license that can be | 2 # Use of this source code is governed by a BSD-style license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 def IsDeadlineExceededError(error): | |
6 '''A general way of determining whether |error| is a DeadlineExceededError, | |
7 since there are 3 different types thrown by AppEngine and we might as well | |
8 handle them all the same way. For more info see: | |
9 https://developers.google.com/appengine/articles/deadlineexceedederrors | |
10 ''' | |
11 return type(error).__name__ == 'DeadlineExceededError' | |
12 | |
13 | |
14 def IsDownloadError(error): | |
15 return type(error).__name__ == 'DownloadError' | |
16 | |
17 | 5 |
18 # This will attempt to import the actual App Engine modules, and if it fails, | 6 # This will attempt to import the actual App Engine modules, and if it fails, |
19 # they will be replaced with fake modules. This is useful during testing. | 7 # they will be replaced with fake modules. This is useful during testing. |
20 try: | 8 try: |
21 import google.appengine.api.app_identity as app_identity | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
No longer used. Killed.
| |
22 import google.appengine.api.files as files | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
Used by github FS, but treating those as killed to
| |
23 import google.appengine.api.logservice as logservice | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
Very little to wrap. Logic merged into custom_logg
| |
24 import google.appengine.api.memcache as memcache | 9 import google.appengine.api.memcache as memcache |
25 import google.appengine.api.taskqueue as taskqueue | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
No longer used. Killed.
| |
26 import google.appengine.api.urlfetch as urlfetch | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
We now have a first class UrlFetcher interface. en
| |
27 import google.appengine.ext.blobstore as blobstore | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
No longer used except by github. Killed.
| |
28 from google.appengine.ext.blobstore.blobstore import BlobReferenceProperty | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
No longer used except by github. Killed.
| |
29 import google.appengine.ext.db as db | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
We have an (implied) PersistentObjectStore interfa
| |
30 import webapp2 | |
Ken Rockot(use gerrit already)
2015/05/26 00:26:23
Only used in one place that's only run in AppEngin
| |
31 except ImportError: | 10 except ImportError: |
32 import re | |
33 from StringIO import StringIO | |
34 | |
35 FAKE_URL_FETCHER_CONFIGURATION = None | |
36 | |
37 def ConfigureFakeUrlFetch(configuration): | |
38 """|configuration| is a dictionary mapping strings to fake urlfetch classes. | |
39 A fake urlfetch class just needs to have a fetch method. The keys of the | |
40 dictionary are treated as regex, and they are matched with the URL to | |
41 determine which fake urlfetch is used. | |
42 """ | |
43 global FAKE_URL_FETCHER_CONFIGURATION | |
44 FAKE_URL_FETCHER_CONFIGURATION = dict( | |
45 (re.compile(k), v) for k, v in configuration.iteritems()) | |
46 | |
47 def _GetConfiguration(key): | |
48 if not FAKE_URL_FETCHER_CONFIGURATION: | |
49 raise ValueError('No fake fetch paths have been configured. ' | |
50 'See ConfigureFakeUrlFetch in appengine_wrappers.py.') | |
51 for k, v in FAKE_URL_FETCHER_CONFIGURATION.iteritems(): | |
52 if k.match(key): | |
53 return v | |
54 raise ValueError('No configuration found for %s' % key) | |
55 | |
56 class _RPC(object): | 11 class _RPC(object): |
57 def __init__(self, result=None): | 12 def __init__(self, result=None): |
58 self.result = result | 13 self.result = result |
59 | 14 |
60 def get_result(self): | 15 def get_result(self): |
61 return self.result | 16 return self.result |
62 | 17 |
63 def wait(self): | 18 def wait(self): |
64 pass | 19 pass |
65 | 20 |
66 class FakeAppIdentity(object): | |
67 """A fake app_identity module that returns no access tokens.""" | |
68 def get_access_token(self, scope): | |
69 return (None, None) | |
70 app_identity = FakeAppIdentity() | |
71 | |
72 class FakeUrlFetch(object): | |
73 """A fake urlfetch module that uses the current | |
74 |FAKE_URL_FETCHER_CONFIGURATION| to map urls to fake fetchers. | |
75 """ | |
76 class DownloadError(Exception): | |
77 pass | |
78 | |
79 class _Response(object): | |
80 def __init__(self, content): | |
81 self.content = content | |
82 self.headers = {'Content-Type': 'none'} | |
83 self.status_code = 200 | |
84 | |
85 def fetch(self, url, **kwargs): | |
86 url = url.split('?', 1)[0] | |
87 response = self._Response(_GetConfiguration(url).fetch(url)) | |
88 if response.content is None: | |
89 response.status_code = 404 | |
90 return response | |
91 | |
92 def create_rpc(self, **kwargs): | |
93 return _RPC() | |
94 | |
95 def make_fetch_call(self, rpc, url, **kwargs): | |
96 rpc.result = self.fetch(url) | |
97 urlfetch = FakeUrlFetch() | |
98 | |
99 _BLOBS = {} | |
100 class FakeBlobstore(object): | |
101 class BlobNotFoundError(Exception): | |
102 pass | |
103 | |
104 class BlobReader(object): | |
105 def __init__(self, blob_key): | |
106 self._data = _BLOBS[blob_key].getvalue() | |
107 | |
108 def read(self): | |
109 return self._data | |
110 | |
111 blobstore = FakeBlobstore() | |
112 | |
113 class FakeFileInterface(object): | |
114 """This class allows a StringIO object to be used in a with block like a | |
115 file. | |
116 """ | |
117 def __init__(self, io): | |
118 self._io = io | |
119 | |
120 def __exit__(self, *args): | |
121 pass | |
122 | |
123 def write(self, data): | |
124 self._io.write(data) | |
125 | |
126 def __enter__(self, *args): | |
127 return self._io | |
128 | |
129 class FakeFiles(object): | |
130 _next_blobstore_key = 0 | |
131 class blobstore(object): | |
132 @staticmethod | |
133 def create(): | |
134 FakeFiles._next_blobstore_key += 1 | |
135 return FakeFiles._next_blobstore_key | |
136 | |
137 @staticmethod | |
138 def get_blob_key(filename): | |
139 return filename | |
140 | |
141 def open(self, filename, mode): | |
142 _BLOBS[filename] = StringIO() | |
143 return FakeFileInterface(_BLOBS[filename]) | |
144 | |
145 def GetBlobKeys(self): | |
146 return _BLOBS.keys() | |
147 | |
148 def finalize(self, filename): | |
149 pass | |
150 | |
151 files = FakeFiles() | |
152 | |
153 class Logservice(object): | |
154 AUTOFLUSH_ENABLED = True | |
155 | |
156 def flush(self): | |
157 pass | |
158 | |
159 logservice = Logservice() | |
160 | 21 |
161 class InMemoryMemcache(object): | 22 class InMemoryMemcache(object): |
162 """An in-memory memcache implementation. | 23 """An in-memory memcache implementation. |
163 """ | 24 """ |
164 def __init__(self): | 25 def __init__(self): |
165 self._namespaces = {} | 26 self._namespaces = {} |
166 | 27 |
167 class Client(object): | 28 class Client(object): |
168 def set_multi_async(self, mapping, namespace='', time=0): | 29 def set_multi_async(self, mapping, namespace='', time=0): |
169 return _RPC(result=dict( | 30 return _RPC(result=dict( |
(...skipping 15 matching lines...) Expand all Loading... | |
185 | 46 |
186 def delete_multi(self, keys, namespace=''): | 47 def delete_multi(self, keys, namespace=''): |
187 for k in keys: | 48 for k in keys: |
188 self.delete(k, namespace=namespace) | 49 self.delete(k, namespace=namespace) |
189 | 50 |
190 def _GetNamespace(self, namespace): | 51 def _GetNamespace(self, namespace): |
191 if namespace not in self._namespaces: | 52 if namespace not in self._namespaces: |
192 self._namespaces[namespace] = {} | 53 self._namespaces[namespace] = {} |
193 return self._namespaces[namespace] | 54 return self._namespaces[namespace] |
194 | 55 |
56 def flush_all(self): | |
57 self._namespaces = {} | |
58 return False | |
59 | |
195 memcache = InMemoryMemcache() | 60 memcache = InMemoryMemcache() |
196 | |
197 class webapp2(object): | |
198 class RequestHandler(object): | |
199 """A fake webapp2.RequestHandler class for Handler to extend. | |
200 """ | |
201 def __init__(self, request, response): | |
202 self.request = request | |
203 self.response = response | |
204 self.response.status = 200 | |
205 | |
206 def redirect(self, path, permanent=False): | |
207 self.response.status = 301 if permanent else 302 | |
208 self.response.headers['Location'] = path | |
209 | |
210 class _Db_Result(object): | |
211 def __init__(self, data): | |
212 self._data = data | |
213 | |
214 class _Result(object): | |
215 def __init__(self, value): | |
216 self.value = value | |
217 | |
218 def get(self): | |
219 return self._Result(self._data) | |
220 | |
221 class db(object): | |
222 _store = {} | |
223 | |
224 class StringProperty(object): | |
225 pass | |
226 | |
227 class BlobProperty(object): | |
228 pass | |
229 | |
230 class Key(object): | |
231 def __init__(self, key): | |
232 self._key = key | |
233 | |
234 @staticmethod | |
235 def from_path(model_name, path): | |
236 return db.Key('%s/%s' % (model_name, path)) | |
237 | |
238 def __eq__(self, obj): | |
239 return self.__class__ == obj.__class__ and self._key == obj._key | |
240 | |
241 def __hash__(self): | |
242 return hash(self._key) | |
243 | |
244 def __str__(self): | |
245 return str(self._key) | |
246 | |
247 class Model(object): | |
248 key = None | |
249 | |
250 def __init__(self, **optargs): | |
251 cls = self.__class__ | |
252 for k, v in optargs.iteritems(): | |
253 assert hasattr(cls, k), '%s does not define property %s' % ( | |
254 cls.__name__, k) | |
255 setattr(self, k, v) | |
256 | |
257 @staticmethod | |
258 def gql(query, key): | |
259 return _Db_Result(db._store.get(key)) | |
260 | |
261 def put(self): | |
262 db._store[self.key_] = self.value | |
263 | |
264 @staticmethod | |
265 def get_async(key): | |
266 return _RPC(result=db._store.get(key)) | |
267 | |
268 @staticmethod | |
269 def delete_async(key): | |
270 db._store.pop(key, None) | |
271 return _RPC() | |
272 | |
273 @staticmethod | |
274 def put_async(value): | |
275 db._store[value.key] = value | |
276 return _RPC() | |
277 | |
278 class BlobReferenceProperty(object): | |
279 pass | |
280 | |
281 # Executes any queued tasks synchronously as they are queued. | |
282 _task_runner = None | |
283 | |
284 def SetTaskRunnerForTest(task_runner): | |
285 global _task_runner | |
286 _task_runner = task_runner | |
287 | |
288 class SynchronousTaskQueue(object): | |
289 class Task(object): | |
290 def __init__(self, url=None, params={}): | |
291 self.url_ = url | |
292 self.params_ = params | |
293 | |
294 def GetUrl(self): | |
295 return self.url_ | |
296 | |
297 def GetCommit(self): | |
298 return self.params_.get('commit') | |
299 | |
300 class Queue(object): | |
301 def __init__(self, name='default'): | |
302 pass | |
303 | |
304 def add(self, task): | |
305 global _task_runner | |
306 if _task_runner: | |
307 _task_runner(task.GetUrl(), task.GetCommit()) | |
308 return _RPC() | |
309 | |
310 def purge(self): | |
311 return _RPC() | |
312 | |
313 taskqueue = SynchronousTaskQueue() | |
OLD | NEW |