OLD | NEW |
(Empty) | |
| 1 # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/ |
| 2 # All rights reserved. |
| 3 # |
| 4 # Permission is hereby granted, free of charge, to any person obtaining a |
| 5 # copy of this software and associated documentation files (the |
| 6 # "Software"), to deal in the Software without restriction, including |
| 7 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 8 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 9 # persons to whom the Software is furnished to do so, subject to the fol- |
| 10 # lowing conditions: |
| 11 # |
| 12 # The above copyright notice and this permission notice shall be included |
| 13 # in all copies or substantial portions of the Software. |
| 14 # |
| 15 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS |
| 16 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- |
| 17 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT |
| 18 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
| 19 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
| 20 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
| 21 # IN THE SOFTWARE. |
| 22 |
| 23 """ |
| 24 Tests for Layer2 of Amazon DynamoDB |
| 25 """ |
| 26 import time |
| 27 import uuid |
| 28 from decimal import Decimal |
| 29 |
| 30 from tests.unit import unittest |
| 31 from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError |
| 32 from boto.dynamodb.exceptions import DynamoDBConditionalCheckFailedError |
| 33 from boto.dynamodb.layer2 import Layer2 |
| 34 from boto.dynamodb.types import get_dynamodb_type, Binary |
| 35 from boto.dynamodb.condition import BEGINS_WITH, CONTAINS, GT |
| 36 from boto.compat import six, long_type |
| 37 |
| 38 |
| 39 class DynamoDBLayer2Test(unittest.TestCase): |
| 40 dynamodb = True |
| 41 |
| 42 def setUp(self): |
| 43 self.dynamodb = Layer2() |
| 44 self.hash_key_name = 'forum_name' |
| 45 self.hash_key_proto_value = '' |
| 46 self.range_key_name = 'subject' |
| 47 self.range_key_proto_value = '' |
| 48 self.table_name = 'sample_data_%s' % int(time.time()) |
| 49 |
| 50 def create_sample_table(self): |
| 51 schema = self.dynamodb.create_schema( |
| 52 self.hash_key_name, self.hash_key_proto_value, |
| 53 self.range_key_name, |
| 54 self.range_key_proto_value) |
| 55 table = self.create_table(self.table_name, schema, 5, 5) |
| 56 table.refresh(wait_for_active=True) |
| 57 return table |
| 58 |
| 59 def create_table(self, table_name, schema, read_units, write_units): |
| 60 result = self.dynamodb.create_table(table_name, schema, read_units, writ
e_units) |
| 61 self.addCleanup(self.dynamodb.delete_table, result) |
| 62 return result |
| 63 |
| 64 def test_layer2_basic(self): |
| 65 print('--- running Amazon DynamoDB Layer2 tests ---') |
| 66 c = self.dynamodb |
| 67 |
| 68 # First create a schema for the table |
| 69 schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value, |
| 70 self.range_key_name, |
| 71 self.range_key_proto_value) |
| 72 |
| 73 # Create another schema without a range key |
| 74 schema2 = c.create_schema('post_id', '') |
| 75 |
| 76 # Now create a table |
| 77 index = int(time.time()) |
| 78 table_name = 'test-%d' % index |
| 79 read_units = 5 |
| 80 write_units = 5 |
| 81 table = self.create_table(table_name, schema, read_units, write_units) |
| 82 assert table.name == table_name |
| 83 assert table.schema.hash_key_name == self.hash_key_name |
| 84 assert table.schema.hash_key_type == get_dynamodb_type(self.hash_key_pro
to_value) |
| 85 assert table.schema.range_key_name == self.range_key_name |
| 86 assert table.schema.range_key_type == get_dynamodb_type(self.range_key_p
roto_value) |
| 87 assert table.read_units == read_units |
| 88 assert table.write_units == write_units |
| 89 assert table.item_count == 0 |
| 90 assert table.size_bytes == 0 |
| 91 |
| 92 # Create the second table |
| 93 table2_name = 'test-%d' % (index + 1) |
| 94 table2 = self.create_table(table2_name, schema2, read_units, write_units
) |
| 95 |
| 96 # Wait for table to become active |
| 97 table.refresh(wait_for_active=True) |
| 98 table2.refresh(wait_for_active=True) |
| 99 |
| 100 # List tables and make sure new one is there |
| 101 table_names = c.list_tables() |
| 102 assert table_name in table_names |
| 103 assert table2_name in table_names |
| 104 |
| 105 # Update the tables ProvisionedThroughput |
| 106 new_read_units = 10 |
| 107 new_write_units = 5 |
| 108 table.update_throughput(new_read_units, new_write_units) |
| 109 |
| 110 # Wait for table to be updated |
| 111 table.refresh(wait_for_active=True) |
| 112 assert table.read_units == new_read_units |
| 113 assert table.write_units == new_write_units |
| 114 |
| 115 # Put an item |
| 116 item1_key = 'Amazon DynamoDB' |
| 117 item1_range = 'DynamoDB Thread 1' |
| 118 item1_attrs = { |
| 119 'Message': 'DynamoDB thread 1 message text', |
| 120 'LastPostedBy': 'User A', |
| 121 'Views': 0, |
| 122 'Replies': 0, |
| 123 'Answered': 0, |
| 124 'Public': True, |
| 125 'Tags': set(['index', 'primarykey', 'table']), |
| 126 'LastPostDateTime': '12/9/2011 11:36:03 PM'} |
| 127 |
| 128 # Test a few corner cases with new_item |
| 129 |
| 130 # Try supplying a hash_key as an arg and as an item in attrs |
| 131 item1_attrs[self.hash_key_name] = 'foo' |
| 132 foobar_item = table.new_item(item1_key, item1_range, item1_attrs) |
| 133 assert foobar_item.hash_key == item1_key |
| 134 |
| 135 # Try supplying a range_key as an arg and as an item in attrs |
| 136 item1_attrs[self.range_key_name] = 'bar' |
| 137 foobar_item = table.new_item(item1_key, item1_range, item1_attrs) |
| 138 assert foobar_item.range_key == item1_range |
| 139 |
| 140 # Try supplying hash and range key in attrs dict |
| 141 foobar_item = table.new_item(attrs=item1_attrs) |
| 142 assert foobar_item.hash_key == 'foo' |
| 143 assert foobar_item.range_key == 'bar' |
| 144 |
| 145 del item1_attrs[self.hash_key_name] |
| 146 del item1_attrs[self.range_key_name] |
| 147 |
| 148 item1 = table.new_item(item1_key, item1_range, item1_attrs) |
| 149 # make sure the put() succeeds |
| 150 try: |
| 151 item1.put() |
| 152 except c.layer1.ResponseError as e: |
| 153 raise Exception("Item put failed: %s" % e) |
| 154 |
| 155 # Try to get an item that does not exist. |
| 156 self.assertRaises(DynamoDBKeyNotFoundError, |
| 157 table.get_item, 'bogus_key', item1_range) |
| 158 |
| 159 # Now do a consistent read and check results |
| 160 item1_copy = table.get_item(item1_key, item1_range, |
| 161 consistent_read=True) |
| 162 assert item1_copy.hash_key == item1.hash_key |
| 163 assert item1_copy.range_key == item1.range_key |
| 164 for attr_name in item1_attrs: |
| 165 val = item1_copy[attr_name] |
| 166 if isinstance(val, (int, long_type, float, six.string_types)): |
| 167 assert val == item1[attr_name] |
| 168 |
| 169 # Try retrieving only select attributes |
| 170 attributes = ['Message', 'Views'] |
| 171 item1_small = table.get_item(item1_key, item1_range, |
| 172 attributes_to_get=attributes, |
| 173 consistent_read=True) |
| 174 for attr_name in item1_small: |
| 175 # The item will include the attributes we asked for as |
| 176 # well as the hashkey and rangekey, so filter those out. |
| 177 if attr_name not in (item1_small.hash_key_name, |
| 178 item1_small.range_key_name): |
| 179 assert attr_name in attributes |
| 180 |
| 181 self.assertTrue(table.has_item(item1_key, range_key=item1_range, |
| 182 consistent_read=True)) |
| 183 |
| 184 # Try to delete the item with the wrong Expected value |
| 185 expected = {'Views': 1} |
| 186 self.assertRaises(DynamoDBConditionalCheckFailedError, |
| 187 item1.delete, expected_value=expected) |
| 188 |
| 189 # Try to delete a value while expecting a non-existant attribute |
| 190 expected = {'FooBar': True} |
| 191 try: |
| 192 item1.delete(expected_value=expected) |
| 193 except c.layer1.ResponseError: |
| 194 pass |
| 195 |
| 196 # Now update the existing object |
| 197 item1.add_attribute('Replies', 2) |
| 198 |
| 199 removed_attr = 'Public' |
| 200 item1.delete_attribute(removed_attr) |
| 201 |
| 202 removed_tag = item1_attrs['Tags'].copy().pop() |
| 203 item1.delete_attribute('Tags', set([removed_tag])) |
| 204 |
| 205 replies_by_set = set(['Adam', 'Arnie']) |
| 206 item1.put_attribute('RepliesBy', replies_by_set) |
| 207 retvals = item1.save(return_values='ALL_OLD') |
| 208 # Need more tests here for variations on return_values |
| 209 assert 'Attributes' in retvals |
| 210 |
| 211 # Check for correct updates |
| 212 item1_updated = table.get_item(item1_key, item1_range, |
| 213 consistent_read=True) |
| 214 assert item1_updated['Replies'] == item1_attrs['Replies'] + 2 |
| 215 self.assertFalse(removed_attr in item1_updated) |
| 216 self.assertTrue(removed_tag not in item1_updated['Tags']) |
| 217 self.assertTrue('RepliesBy' in item1_updated) |
| 218 self.assertTrue(item1_updated['RepliesBy'] == replies_by_set) |
| 219 |
| 220 # Put a few more items into the table |
| 221 item2_key = 'Amazon DynamoDB' |
| 222 item2_range = 'DynamoDB Thread 2' |
| 223 item2_attrs = { |
| 224 'Message': 'DynamoDB thread 2 message text', |
| 225 'LastPostedBy': 'User A', |
| 226 'Views': 0, |
| 227 'Replies': 0, |
| 228 'Answered': 0, |
| 229 'Tags': set(["index", "primarykey", "table"]), |
| 230 'LastPost2DateTime': '12/9/2011 11:36:03 PM'} |
| 231 item2 = table.new_item(item2_key, item2_range, item2_attrs) |
| 232 item2.put() |
| 233 |
| 234 item3_key = 'Amazon S3' |
| 235 item3_range = 'S3 Thread 1' |
| 236 item3_attrs = { |
| 237 'Message': 'S3 Thread 1 message text', |
| 238 'LastPostedBy': 'User A', |
| 239 'Views': 0, |
| 240 'Replies': 0, |
| 241 'Answered': 0, |
| 242 'Tags': set(['largeobject', 'multipart upload']), |
| 243 'LastPostDateTime': '12/9/2011 11:36:03 PM' |
| 244 } |
| 245 item3 = table.new_item(item3_key, item3_range, item3_attrs) |
| 246 item3.put() |
| 247 |
| 248 # Put an item into the second table |
| 249 table2_item1_key = uuid.uuid4().hex |
| 250 table2_item1_attrs = { |
| 251 'DateTimePosted': '25/1/2011 12:34:56 PM', |
| 252 'Text': 'I think boto rocks and so does DynamoDB' |
| 253 } |
| 254 table2_item1 = table2.new_item(table2_item1_key, |
| 255 attrs=table2_item1_attrs) |
| 256 table2_item1.put() |
| 257 |
| 258 # Try a few queries |
| 259 items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH('
DynamoDB')) |
| 260 n = 0 |
| 261 for item in items: |
| 262 n += 1 |
| 263 assert n == 2 |
| 264 assert items.consumed_units > 0 |
| 265 |
| 266 items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH('
DynamoDB'), |
| 267 request_limit=1, max_results=1) |
| 268 n = 0 |
| 269 for item in items: |
| 270 n += 1 |
| 271 assert n == 1 |
| 272 assert items.consumed_units > 0 |
| 273 |
| 274 # Try a few scans |
| 275 items = table.scan() |
| 276 n = 0 |
| 277 for item in items: |
| 278 n += 1 |
| 279 assert n == 3 |
| 280 assert items.consumed_units > 0 |
| 281 |
| 282 items = table.scan(scan_filter={'Replies': GT(0)}) |
| 283 n = 0 |
| 284 for item in items: |
| 285 n += 1 |
| 286 assert n == 1 |
| 287 assert items.consumed_units > 0 |
| 288 |
| 289 # Test some integer and float attributes |
| 290 integer_value = 42 |
| 291 float_value = 345.678 |
| 292 item3['IntAttr'] = integer_value |
| 293 item3['FloatAttr'] = float_value |
| 294 |
| 295 # Test booleans |
| 296 item3['TrueBoolean'] = True |
| 297 item3['FalseBoolean'] = False |
| 298 |
| 299 # Test some set values |
| 300 integer_set = set([1, 2, 3, 4, 5]) |
| 301 float_set = set([1.1, 2.2, 3.3, 4.4, 5.5]) |
| 302 mixed_set = set([1, 2, 3.3, 4, 5.555]) |
| 303 str_set = set(['foo', 'bar', 'fie', 'baz']) |
| 304 item3['IntSetAttr'] = integer_set |
| 305 item3['FloatSetAttr'] = float_set |
| 306 item3['MixedSetAttr'] = mixed_set |
| 307 item3['StrSetAttr'] = str_set |
| 308 item3.put() |
| 309 |
| 310 # Now do a consistent read |
| 311 item4 = table.get_item(item3_key, item3_range, consistent_read=True) |
| 312 assert item4['IntAttr'] == integer_value |
| 313 assert item4['FloatAttr'] == float_value |
| 314 assert bool(item4['TrueBoolean']) is True |
| 315 assert bool(item4['FalseBoolean']) is False |
| 316 # The values will not necessarily be in the same order as when |
| 317 # we wrote them to the DB. |
| 318 for i in item4['IntSetAttr']: |
| 319 assert i in integer_set |
| 320 for i in item4['FloatSetAttr']: |
| 321 assert i in float_set |
| 322 for i in item4['MixedSetAttr']: |
| 323 assert i in mixed_set |
| 324 for i in item4['StrSetAttr']: |
| 325 assert i in str_set |
| 326 |
| 327 # Try a batch get |
| 328 batch_list = c.new_batch_list() |
| 329 batch_list.add_batch(table, [(item2_key, item2_range), |
| 330 (item3_key, item3_range)]) |
| 331 response = batch_list.submit() |
| 332 assert len(response['Responses'][table.name]['Items']) == 2 |
| 333 |
| 334 # Try an empty batch get |
| 335 batch_list = c.new_batch_list() |
| 336 batch_list.add_batch(table, []) |
| 337 response = batch_list.submit() |
| 338 assert response == {} |
| 339 |
| 340 # Try a few batch write operations |
| 341 item4_key = 'Amazon S3' |
| 342 item4_range = 'S3 Thread 2' |
| 343 item4_attrs = { |
| 344 'Message': 'S3 Thread 2 message text', |
| 345 'LastPostedBy': 'User A', |
| 346 'Views': 0, |
| 347 'Replies': 0, |
| 348 'Answered': 0, |
| 349 'Tags': set(['largeobject', 'multipart upload']), |
| 350 'LastPostDateTime': '12/9/2011 11:36:03 PM' |
| 351 } |
| 352 item5_key = 'Amazon S3' |
| 353 item5_range = 'S3 Thread 3' |
| 354 item5_attrs = { |
| 355 'Message': 'S3 Thread 3 message text', |
| 356 'LastPostedBy': 'User A', |
| 357 'Views': 0, |
| 358 'Replies': 0, |
| 359 'Answered': 0, |
| 360 'Tags': set(['largeobject', 'multipart upload']), |
| 361 'LastPostDateTime': '12/9/2011 11:36:03 PM' |
| 362 } |
| 363 item4 = table.new_item(item4_key, item4_range, item4_attrs) |
| 364 item5 = table.new_item(item5_key, item5_range, item5_attrs) |
| 365 batch_list = c.new_batch_write_list() |
| 366 batch_list.add_batch(table, puts=[item4, item5]) |
| 367 response = batch_list.submit() |
| 368 # should really check for unprocessed items |
| 369 |
| 370 # Do some generator gymnastics |
| 371 results = table.scan(scan_filter={'Tags': CONTAINS('table')}) |
| 372 assert results.scanned_count == 5 |
| 373 results = table.scan(request_limit=2, max_results=5) |
| 374 assert results.count == 2 |
| 375 for item in results: |
| 376 if results.count == 2: |
| 377 assert results.remaining == 4 |
| 378 results.remaining -= 2 |
| 379 results.next_response() |
| 380 else: |
| 381 assert results.count == 4 |
| 382 assert results.remaining in (0, 1) |
| 383 assert results.count == 4 |
| 384 results = table.scan(request_limit=6, max_results=4) |
| 385 assert len(list(results)) == 4 |
| 386 assert results.count == 4 |
| 387 |
| 388 batch_list = c.new_batch_write_list() |
| 389 batch_list.add_batch(table, deletes=[(item4_key, item4_range), |
| 390 (item5_key, item5_range)]) |
| 391 response = batch_list.submit() |
| 392 |
| 393 # Try queries |
| 394 results = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH
('DynamoDB')) |
| 395 n = 0 |
| 396 for item in results: |
| 397 n += 1 |
| 398 assert n == 2 |
| 399 |
| 400 # Try to delete the item with the right Expected value |
| 401 expected = {'Views': 0} |
| 402 item1.delete(expected_value=expected) |
| 403 |
| 404 self.assertFalse(table.has_item(item1_key, range_key=item1_range, |
| 405 consistent_read=True)) |
| 406 # Now delete the remaining items |
| 407 ret_vals = item2.delete(return_values='ALL_OLD') |
| 408 # some additional checks here would be useful |
| 409 assert ret_vals['Attributes'][self.hash_key_name] == item2_key |
| 410 assert ret_vals['Attributes'][self.range_key_name] == item2_range |
| 411 |
| 412 item3.delete() |
| 413 table2_item1.delete() |
| 414 print('--- tests completed ---') |
| 415 |
| 416 def test_binary_attrs(self): |
| 417 c = self.dynamodb |
| 418 schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value, |
| 419 self.range_key_name, |
| 420 self.range_key_proto_value) |
| 421 index = int(time.time()) |
| 422 table_name = 'test-%d' % index |
| 423 read_units = 5 |
| 424 write_units = 5 |
| 425 table = self.create_table(table_name, schema, read_units, write_units) |
| 426 table.refresh(wait_for_active=True) |
| 427 item1_key = 'Amazon S3' |
| 428 item1_range = 'S3 Thread 1' |
| 429 item1_attrs = { |
| 430 'Message': 'S3 Thread 1 message text', |
| 431 'LastPostedBy': 'User A', |
| 432 'Views': 0, |
| 433 'Replies': 0, |
| 434 'Answered': 0, |
| 435 'BinaryData': Binary(b'\x01\x02\x03\x04'), |
| 436 'BinarySequence': set([Binary(b'\x01\x02'), Binary(b'\x03\x04')]), |
| 437 'Tags': set(['largeobject', 'multipart upload']), |
| 438 'LastPostDateTime': '12/9/2011 11:36:03 PM' |
| 439 } |
| 440 item1 = table.new_item(item1_key, item1_range, item1_attrs) |
| 441 item1.put() |
| 442 |
| 443 retrieved = table.get_item(item1_key, item1_range, consistent_read=True) |
| 444 self.assertEqual(retrieved['Message'], 'S3 Thread 1 message text') |
| 445 self.assertEqual(retrieved['Views'], 0) |
| 446 self.assertEqual(retrieved['Tags'], |
| 447 set(['largeobject', 'multipart upload'])) |
| 448 self.assertEqual(retrieved['BinaryData'], Binary(b'\x01\x02\x03\x04')) |
| 449 # Also comparable directly to bytes: |
| 450 self.assertEqual(retrieved['BinaryData'], b'\x01\x02\x03\x04') |
| 451 self.assertEqual(retrieved['BinarySequence'], |
| 452 set([Binary(b'\x01\x02'), Binary(b'\x03\x04')])) |
| 453 |
| 454 def test_put_decimal_attrs(self): |
| 455 self.dynamodb.use_decimals() |
| 456 table = self.create_sample_table() |
| 457 item = table.new_item('foo', 'bar') |
| 458 item['decimalvalue'] = Decimal('1.12345678912345') |
| 459 item.put() |
| 460 retrieved = table.get_item('foo', 'bar') |
| 461 self.assertEqual(retrieved['decimalvalue'], Decimal('1.12345678912345')) |
| 462 |
| 463 @unittest.skipIf(six.PY3, "skipping lossy_float_conversion test for Python 3
.x") |
| 464 def test_lossy_float_conversion(self): |
| 465 table = self.create_sample_table() |
| 466 item = table.new_item('foo', 'bar') |
| 467 item['floatvalue'] = 1.12345678912345 |
| 468 item.put() |
| 469 retrieved = table.get_item('foo', 'bar')['floatvalue'] |
| 470 # Notice how this is not equal to the original value. |
| 471 self.assertNotEqual(1.12345678912345, retrieved) |
| 472 # Instead, it's truncated: |
| 473 self.assertEqual(1.12345678912, retrieved) |
| 474 |
| 475 def test_large_integers(self): |
| 476 # It's not just floating point numbers, large integers |
| 477 # can trigger rouding issues. |
| 478 self.dynamodb.use_decimals() |
| 479 table = self.create_sample_table() |
| 480 item = table.new_item('foo', 'bar') |
| 481 item['decimalvalue'] = Decimal('129271300103398600') |
| 482 item.put() |
| 483 retrieved = table.get_item('foo', 'bar') |
| 484 self.assertEqual(retrieved['decimalvalue'], Decimal('129271300103398600'
)) |
| 485 # Also comparable directly to an int. |
| 486 self.assertEqual(retrieved['decimalvalue'], 129271300103398600) |
| 487 |
| 488 def test_put_single_letter_attr(self): |
| 489 # When an attr is added that is a single letter, if it overlaps with |
| 490 # the built-in "types", the decoding used to fall down. Assert that |
| 491 # it's now working correctly. |
| 492 table = self.create_sample_table() |
| 493 item = table.new_item('foo', 'foo1') |
| 494 item.put_attribute('b', 4) |
| 495 stored = item.save(return_values='UPDATED_NEW') |
| 496 self.assertEqual(stored['Attributes'], {'b': 4}) |
OLD | NEW |