Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(302)

Side by Side Diff: third_party/gsutil/boto/tests/integration/dynamodb/test_layer2.py

Issue 12042069: Scripts to download files from google storage based on sha1 sums (Closed) Base URL: https://chromium.googlesource.com/chromium/tools/depot_tools.git@master
Patch Set: Review fixes, updated gsutil Created 7 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
2 # All rights reserved.
3 #
4 # Permission is hereby granted, free of charge, to any person obtaining a
5 # copy of this software and associated documentation files (the
6 # "Software"), to deal in the Software without restriction, including
7 # without limitation the rights to use, copy, modify, merge, publish, dis-
8 # tribute, sublicense, and/or sell copies of the Software, and to permit
9 # persons to whom the Software is furnished to do so, subject to the fol-
10 # lowing conditions:
11 #
12 # The above copyright notice and this permission notice shall be included
13 # in all copies or substantial portions of the Software.
14 #
15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 # IN THE SOFTWARE.
22
23 """
24 Tests for Layer2 of Amazon DynamoDB
25 """
26 import unittest
27 import time
28 import uuid
29 from decimal import Decimal
30
31 from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
32 from boto.dynamodb.exceptions import DynamoDBConditionalCheckFailedError
33 from boto.dynamodb.layer2 import Layer2
34 from boto.dynamodb.types import get_dynamodb_type, Binary
35 from boto.dynamodb.condition import BEGINS_WITH, CONTAINS, GT
36
37
38 class DynamoDBLayer2Test (unittest.TestCase):
39 dynamodb = True
40
41 def setUp(self):
42 self.dynamodb = Layer2()
43 self.hash_key_name = 'forum_name'
44 self.hash_key_proto_value = ''
45 self.range_key_name = 'subject'
46 self.range_key_proto_value = ''
47 self.table_name = 'sample_data_%s' % int(time.time())
48
49 def create_sample_table(self):
50 schema = self.dynamodb.create_schema(
51 self.hash_key_name, self.hash_key_proto_value,
52 self.range_key_name,
53 self.range_key_proto_value)
54 table = self.create_table(self.table_name, schema, 5, 5)
55 table.refresh(wait_for_active=True)
56 return table
57
58 def create_table(self, table_name, schema, read_units, write_units):
59 result = self.dynamodb.create_table(table_name, schema, read_units, writ e_units)
60 self.addCleanup(self.dynamodb.delete_table, result)
61 return result
62
63 def test_layer2_basic(self):
64 print '--- running Amazon DynamoDB Layer2 tests ---'
65 c = self.dynamodb
66
67 # First create a schema for the table
68 schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value,
69 self.range_key_name,
70 self.range_key_proto_value)
71
72 # Create another schema without a range key
73 schema2 = c.create_schema('post_id', '')
74
75 # Now create a table
76 index = int(time.time())
77 table_name = 'test-%d' % index
78 read_units = 5
79 write_units = 5
80 table = self.create_table(table_name, schema, read_units, write_units)
81 assert table.name == table_name
82 assert table.schema.hash_key_name == self.hash_key_name
83 assert table.schema.hash_key_type == get_dynamodb_type(self.hash_key_pro to_value)
84 assert table.schema.range_key_name == self.range_key_name
85 assert table.schema.range_key_type == get_dynamodb_type(self.range_key_p roto_value)
86 assert table.read_units == read_units
87 assert table.write_units == write_units
88 assert table.item_count == 0
89 assert table.size_bytes == 0
90
91 # Create the second table
92 table2_name = 'test-%d' % (index + 1)
93 table2 = self.create_table(table2_name, schema2, read_units, write_units )
94
95 # Wait for table to become active
96 table.refresh(wait_for_active=True)
97 table2.refresh(wait_for_active=True)
98
99 # List tables and make sure new one is there
100 table_names = c.list_tables()
101 assert table_name in table_names
102 assert table2_name in table_names
103
104 # Update the tables ProvisionedThroughput
105 new_read_units = 10
106 new_write_units = 5
107 table.update_throughput(new_read_units, new_write_units)
108
109 # Wait for table to be updated
110 table.refresh(wait_for_active=True)
111 assert table.read_units == new_read_units
112 assert table.write_units == new_write_units
113
114 # Put an item
115 item1_key = 'Amazon DynamoDB'
116 item1_range = 'DynamoDB Thread 1'
117 item1_attrs = {
118 'Message': 'DynamoDB thread 1 message text',
119 'LastPostedBy': 'User A',
120 'Views': 0,
121 'Replies': 0,
122 'Answered': 0,
123 'Public': True,
124 'Tags': set(['index', 'primarykey', 'table']),
125 'LastPostDateTime': '12/9/2011 11:36:03 PM'}
126
127 # Test a few corner cases with new_item
128
129 # Try supplying a hash_key as an arg and as an item in attrs
130 item1_attrs[self.hash_key_name] = 'foo'
131 foobar_item = table.new_item(item1_key, item1_range, item1_attrs)
132 assert foobar_item.hash_key == item1_key
133
134 # Try supplying a range_key as an arg and as an item in attrs
135 item1_attrs[self.range_key_name] = 'bar'
136 foobar_item = table.new_item(item1_key, item1_range, item1_attrs)
137 assert foobar_item.range_key == item1_range
138
139 # Try supplying hash and range key in attrs dict
140 foobar_item = table.new_item(attrs=item1_attrs)
141 assert foobar_item.hash_key == 'foo'
142 assert foobar_item.range_key == 'bar'
143
144 del item1_attrs[self.hash_key_name]
145 del item1_attrs[self.range_key_name]
146
147 item1 = table.new_item(item1_key, item1_range, item1_attrs)
148 # make sure the put() succeeds
149 try:
150 item1.put()
151 except c.layer1.ResponseError, e:
152 raise Exception("Item put failed: %s" % e)
153
154 # Try to get an item that does not exist.
155 self.assertRaises(DynamoDBKeyNotFoundError,
156 table.get_item, 'bogus_key', item1_range)
157
158 # Now do a consistent read and check results
159 item1_copy = table.get_item(item1_key, item1_range,
160 consistent_read=True)
161 assert item1_copy.hash_key == item1.hash_key
162 assert item1_copy.range_key == item1.range_key
163 for attr_name in item1_copy:
164 val = item1_copy[attr_name]
165 if isinstance(val, (int, long, float, basestring)):
166 assert val == item1[attr_name]
167
168 # Try retrieving only select attributes
169 attributes = ['Message', 'Views']
170 item1_small = table.get_item(item1_key, item1_range,
171 attributes_to_get=attributes,
172 consistent_read=True)
173 for attr_name in item1_small:
174 # The item will include the attributes we asked for as
175 # well as the hashkey and rangekey, so filter those out.
176 if attr_name not in (item1_small.hash_key_name,
177 item1_small.range_key_name):
178 assert attr_name in attributes
179
180 self.assertTrue(table.has_item(item1_key, range_key=item1_range,
181 consistent_read=True))
182
183 # Try to delete the item with the wrong Expected value
184 expected = {'Views': 1}
185 self.assertRaises(DynamoDBConditionalCheckFailedError,
186 item1.delete, expected_value=expected)
187
188 # Try to delete a value while expecting a non-existant attribute
189 expected = {'FooBar': True}
190 try:
191 item1.delete(expected_value=expected)
192 except c.layer1.ResponseError, e:
193 pass
194
195 # Now update the existing object
196 item1.add_attribute('Replies', 2)
197
198 removed_attr = 'Public'
199 item1.delete_attribute(removed_attr)
200
201 removed_tag = item1_attrs['Tags'].copy().pop()
202 item1.delete_attribute('Tags', set([removed_tag]))
203
204 replies_by_set = set(['Adam', 'Arnie'])
205 item1.put_attribute('RepliesBy', replies_by_set)
206 retvals = item1.save(return_values='ALL_OLD')
207 # Need more tests here for variations on return_values
208 assert 'Attributes' in retvals
209
210 # Check for correct updates
211 item1_updated = table.get_item(item1_key, item1_range,
212 consistent_read=True)
213 assert item1_updated['Replies'] == item1_attrs['Replies'] + 2
214 self.assertFalse(removed_attr in item1_updated)
215 self.assertTrue(removed_tag not in item1_updated['Tags'])
216 self.assertTrue('RepliesBy' in item1_updated)
217 self.assertTrue(item1_updated['RepliesBy'] == replies_by_set)
218
219 # Put a few more items into the table
220 item2_key = 'Amazon DynamoDB'
221 item2_range = 'DynamoDB Thread 2'
222 item2_attrs = {
223 'Message': 'DynamoDB thread 2 message text',
224 'LastPostedBy': 'User A',
225 'Views': 0,
226 'Replies': 0,
227 'Answered': 0,
228 'Tags': set(["index", "primarykey", "table"]),
229 'LastPost2DateTime': '12/9/2011 11:36:03 PM'}
230 item2 = table.new_item(item2_key, item2_range, item2_attrs)
231 item2.put()
232
233 item3_key = 'Amazon S3'
234 item3_range = 'S3 Thread 1'
235 item3_attrs = {
236 'Message': 'S3 Thread 1 message text',
237 'LastPostedBy': 'User A',
238 'Views': 0,
239 'Replies': 0,
240 'Answered': 0,
241 'Tags': set(['largeobject', 'multipart upload']),
242 'LastPostDateTime': '12/9/2011 11:36:03 PM'
243 }
244 item3 = table.new_item(item3_key, item3_range, item3_attrs)
245 item3.put()
246
247 # Put an item into the second table
248 table2_item1_key = uuid.uuid4().hex
249 table2_item1_attrs = {
250 'DateTimePosted': '25/1/2011 12:34:56 PM',
251 'Text': 'I think boto rocks and so does DynamoDB'
252 }
253 table2_item1 = table2.new_item(table2_item1_key,
254 attrs=table2_item1_attrs)
255 table2_item1.put()
256
257 # Try a few queries
258 items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH(' DynamoDB'))
259 n = 0
260 for item in items:
261 n += 1
262 assert n == 2
263 assert items.consumed_units > 0
264
265 items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH(' DynamoDB'),
266 request_limit=1, max_results=1)
267 n = 0
268 for item in items:
269 n += 1
270 assert n == 1
271 assert items.consumed_units > 0
272
273 # Try a few scans
274 items = table.scan()
275 n = 0
276 for item in items:
277 n += 1
278 assert n == 3
279 assert items.consumed_units > 0
280
281 items = table.scan(scan_filter={'Replies': GT(0)})
282 n = 0
283 for item in items:
284 n += 1
285 assert n == 1
286 assert items.consumed_units > 0
287
288 # Test some integer and float attributes
289 integer_value = 42
290 float_value = 345.678
291 item3['IntAttr'] = integer_value
292 item3['FloatAttr'] = float_value
293
294 # Test booleans
295 item3['TrueBoolean'] = True
296 item3['FalseBoolean'] = False
297
298 # Test some set values
299 integer_set = set([1, 2, 3, 4, 5])
300 float_set = set([1.1, 2.2, 3.3, 4.4, 5.5])
301 mixed_set = set([1, 2, 3.3, 4, 5.555])
302 str_set = set(['foo', 'bar', 'fie', 'baz'])
303 item3['IntSetAttr'] = integer_set
304 item3['FloatSetAttr'] = float_set
305 item3['MixedSetAttr'] = mixed_set
306 item3['StrSetAttr'] = str_set
307 item3.put()
308
309 # Now do a consistent read
310 item4 = table.get_item(item3_key, item3_range, consistent_read=True)
311 assert item4['IntAttr'] == integer_value
312 assert item4['FloatAttr'] == float_value
313 assert bool(item4['TrueBoolean']) is True
314 assert bool(item4['FalseBoolean']) is False
315 # The values will not necessarily be in the same order as when
316 # we wrote them to the DB.
317 for i in item4['IntSetAttr']:
318 assert i in integer_set
319 for i in item4['FloatSetAttr']:
320 assert i in float_set
321 for i in item4['MixedSetAttr']:
322 assert i in mixed_set
323 for i in item4['StrSetAttr']:
324 assert i in str_set
325
326 # Try a batch get
327 batch_list = c.new_batch_list()
328 batch_list.add_batch(table, [(item2_key, item2_range),
329 (item3_key, item3_range)])
330 response = batch_list.submit()
331 assert len(response['Responses'][table.name]['Items']) == 2
332
333 # Try an empty batch get
334 batch_list = c.new_batch_list()
335 batch_list.add_batch(table, [])
336 response = batch_list.submit()
337 assert response == {}
338
339 # Try a few batch write operations
340 item4_key = 'Amazon S3'
341 item4_range = 'S3 Thread 2'
342 item4_attrs = {
343 'Message': 'S3 Thread 2 message text',
344 'LastPostedBy': 'User A',
345 'Views': 0,
346 'Replies': 0,
347 'Answered': 0,
348 'Tags': set(['largeobject', 'multipart upload']),
349 'LastPostDateTime': '12/9/2011 11:36:03 PM'
350 }
351 item5_key = 'Amazon S3'
352 item5_range = 'S3 Thread 3'
353 item5_attrs = {
354 'Message': 'S3 Thread 3 message text',
355 'LastPostedBy': 'User A',
356 'Views': 0,
357 'Replies': 0,
358 'Answered': 0,
359 'Tags': set(['largeobject', 'multipart upload']),
360 'LastPostDateTime': '12/9/2011 11:36:03 PM'
361 }
362 item4 = table.new_item(item4_key, item4_range, item4_attrs)
363 item5 = table.new_item(item5_key, item5_range, item5_attrs)
364 batch_list = c.new_batch_write_list()
365 batch_list.add_batch(table, puts=[item4, item5])
366 response = batch_list.submit()
367 # should really check for unprocessed items
368
369 # Do some generator gymnastics
370 results = table.scan(scan_filter={'Tags': CONTAINS('table')})
371 assert results.scanned_count == 5
372 results = table.scan(request_limit=2, max_results=5)
373 assert results.count == 2
374 for item in results:
375 if results.count == 2:
376 assert results.remaining == 4
377 results.remaining -= 2
378 results.next_response()
379 else:
380 assert results.count == 4
381 assert results.remaining in (0, 1)
382 assert results.count == 4
383 results = table.scan(request_limit=6, max_results=4)
384 assert len(list(results)) == 4
385 assert results.count == 4
386
387 batch_list = c.new_batch_write_list()
388 batch_list.add_batch(table, deletes=[(item4_key, item4_range),
389 (item5_key, item5_range)])
390 response = batch_list.submit()
391
392 # Try queries
393 results = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH ('DynamoDB'))
394 n = 0
395 for item in results:
396 n += 1
397 assert n == 2
398
399 # Try to delete the item with the right Expected value
400 expected = {'Views': 0}
401 item1.delete(expected_value=expected)
402
403 self.assertFalse(table.has_item(item1_key, range_key=item1_range,
404 consistent_read=True))
405 # Now delete the remaining items
406 ret_vals = item2.delete(return_values='ALL_OLD')
407 # some additional checks here would be useful
408 assert ret_vals['Attributes'][self.hash_key_name] == item2_key
409 assert ret_vals['Attributes'][self.range_key_name] == item2_range
410
411 item3.delete()
412 table2_item1.delete()
413 print '--- tests completed ---'
414
415 def test_binary_attrs(self):
416 c = self.dynamodb
417 schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value,
418 self.range_key_name,
419 self.range_key_proto_value)
420 index = int(time.time())
421 table_name = 'test-%d' % index
422 read_units = 5
423 write_units = 5
424 table = self.create_table(table_name, schema, read_units, write_units)
425 table.refresh(wait_for_active=True)
426 item1_key = 'Amazon S3'
427 item1_range = 'S3 Thread 1'
428 item1_attrs = {
429 'Message': 'S3 Thread 1 message text',
430 'LastPostedBy': 'User A',
431 'Views': 0,
432 'Replies': 0,
433 'Answered': 0,
434 'BinaryData': Binary('\x01\x02\x03\x04'),
435 'BinarySequence': set([Binary('\x01\x02'), Binary('\x03\x04')]),
436 'Tags': set(['largeobject', 'multipart upload']),
437 'LastPostDateTime': '12/9/2011 11:36:03 PM'
438 }
439 item1 = table.new_item(item1_key, item1_range, item1_attrs)
440 item1.put()
441
442 retrieved = table.get_item(item1_key, item1_range, consistent_read=True)
443 self.assertEqual(retrieved['Message'], 'S3 Thread 1 message text')
444 self.assertEqual(retrieved['Views'], 0)
445 self.assertEqual(retrieved['Tags'],
446 set(['largeobject', 'multipart upload']))
447 self.assertEqual(retrieved['BinaryData'], Binary('\x01\x02\x03\x04'))
448 # Also comparable directly to bytes:
449 self.assertEqual(retrieved['BinaryData'], bytes('\x01\x02\x03\x04'))
450 self.assertEqual(retrieved['BinarySequence'],
451 set([Binary('\x01\x02'), Binary('\x03\x04')]))
452
453 def test_put_decimal_attrs(self):
454 self.dynamodb.use_decimals()
455 table = self.create_sample_table()
456 item = table.new_item('foo', 'bar')
457 item['decimalvalue'] = Decimal('1.12345678912345')
458 item.put()
459 retrieved = table.get_item('foo', 'bar')
460 self.assertEqual(retrieved['decimalvalue'], Decimal('1.12345678912345'))
461
462 def test_lossy_float_conversion(self):
463 table = self.create_sample_table()
464 item = table.new_item('foo', 'bar')
465 item['floatvalue'] = 1.12345678912345
466 item.put()
467 retrieved = table.get_item('foo', 'bar')['floatvalue']
468 # Notice how this is not equal to the original value.
469 self.assertNotEqual(1.12345678912345, retrieved)
470 # Instead, it's truncated:
471 self.assertEqual(1.12345678912, retrieved)
472
473 def test_large_integers(self):
474 # It's not just floating point numbers, large integers
475 # can trigger rouding issues.
476 self.dynamodb.use_decimals()
477 table = self.create_sample_table()
478 item = table.new_item('foo', 'bar')
479 item['decimalvalue'] = Decimal('129271300103398600')
480 item.put()
481 retrieved = table.get_item('foo', 'bar')
482 self.assertEqual(retrieved['decimalvalue'], Decimal('129271300103398600' ))
483 # Also comparable directly to an int.
484 self.assertEqual(retrieved['decimalvalue'], 129271300103398600)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698