OLD | NEW |
---|---|
1 # Copyright 2014 The LUCI Authors. All rights reserved. | 1 # Copyright 2014 The LUCI Authors. All rights reserved. |
2 # Use of this source code is governed by the Apache v2.0 license that can be | 2 # Use of this source code is governed by the Apache v2.0 license that can be |
3 # found in the LICENSE file. | 3 # found in the LICENSE file. |
4 | 4 |
5 """This module defines Isolate Server model(s).""" | 5 """This module defines Isolate Server model(s).""" |
6 | 6 |
7 import datetime | 7 import datetime |
8 import hashlib | 8 import hashlib |
9 import logging | 9 import logging |
10 import random | 10 import random |
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
128 | 128 |
129 def entry_key_from_id(key_id): | 129 def entry_key_from_id(key_id): |
130 """Returns the ndb.Key for the key_id.""" | 130 """Returns the ndb.Key for the key_id.""" |
131 hash_key = key_id.rsplit('/', 1)[1] | 131 hash_key = key_id.rsplit('/', 1)[1] |
132 N = config.settings().sharding_letters | 132 N = config.settings().sharding_letters |
133 return ndb.Key( | 133 return ndb.Key( |
134 ContentEntry, key_id, | 134 ContentEntry, key_id, |
135 parent=datastore_utils.shard_key(hash_key, N, 'ContentShard')) | 135 parent=datastore_utils.shard_key(hash_key, N, 'ContentShard')) |
136 | 136 |
137 | 137 |
138 def get_content(namespace, hash_key): | |
139 """Returns the content from either memcache or datastore. | |
M-A Ruel
2016/04/14 19:22:36
Returns the content from either memcache or datast
kjlubick
2016/04/14 19:59:07
Done.
| |
140 | |
141 This does NOT return data from GCS, it is up to the client to do that. | |
142 | |
143 The first argument in the tuple is the content, the second is either | |
M-A Ruel
2016/04/14 19:22:36
Returns:
tuple(content, ContentEntry)
At most
kjlubick
2016/04/14 19:59:08
Done.
| |
144 None, if loaded from memcache, or the entity, if loaded from datastore. | |
145 | |
146 Raises LookupError if the content cannot be found. | |
147 Raises ValueError if the hash_key is invalid. | |
148 """ | |
149 memcache_entry = memcache.get(hash_key, namespace='table_%s' % namespace) | |
150 if memcache_entry is not None: | |
151 return (memcache_entry, None) | |
152 else: | |
153 # Raises ValueError | |
154 key = get_entry_key(namespace, hash_key) | |
155 entity = key.get() | |
156 if entity is None: | |
157 raise LookupError("namespace %s, key %s does not refer to anything" % | |
158 (namespace, hash_key)) | |
159 return (entity.content, entity) | |
160 | |
161 | |
138 def expiration_jitter(now, expiration): | 162 def expiration_jitter(now, expiration): |
139 """Returns expiration/next_tag pair to set in a ContentEntry.""" | 163 """Returns expiration/next_tag pair to set in a ContentEntry.""" |
140 jittered = random.uniform(1, 1.2) * expiration | 164 jittered = random.uniform(1, 1.2) * expiration |
141 expiration = now + datetime.timedelta(seconds=jittered) | 165 expiration = now + datetime.timedelta(seconds=jittered) |
142 next_tag = now + datetime.timedelta(seconds=jittered*0.1) | 166 next_tag = now + datetime.timedelta(seconds=jittered*0.1) |
143 return expiration, next_tag | 167 return expiration, next_tag |
144 | 168 |
145 | 169 |
146 def expand_content(namespace, source): | 170 def expand_content(namespace, source): |
147 """Yields expanded data from source.""" | 171 """Yields expanded data from source.""" |
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
211 ndb.delete_multi(keys_to_delete) | 235 ndb.delete_multi(keys_to_delete) |
212 # Note that some content entries may NOT have corresponding GS files. That | 236 # Note that some content entries may NOT have corresponding GS files. That |
213 # happens for small entries stored inline in the datastore or memcache. Since | 237 # happens for small entries stored inline in the datastore or memcache. Since |
214 # this function operates only on keys, it can't distinguish "large" entries | 238 # this function operates only on keys, it can't distinguish "large" entries |
215 # stored in GS from "small" ones stored inline. So instead it tries to delete | 239 # stored in GS from "small" ones stored inline. So instead it tries to delete |
216 # all corresponding GS files, silently skipping ones that are not there. | 240 # all corresponding GS files, silently skipping ones that are not there. |
217 gcs.delete_files( | 241 gcs.delete_files( |
218 config.settings().gs_bucket, | 242 config.settings().gs_bucket, |
219 (i.id() for i in keys_to_delete), | 243 (i.id() for i in keys_to_delete), |
220 ignore_missing=True) | 244 ignore_missing=True) |
OLD | NEW |