| OLD | NEW |
| 1 # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. | 1 # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. |
| 2 # All rights reserved. | 2 # All rights reserved. |
| 3 # | 3 # |
| 4 # Permission is hereby granted, free of charge, to any person obtaining a | 4 # Permission is hereby granted, free of charge, to any person obtaining a |
| 5 # copy of this software and associated documentation files (the | 5 # copy of this software and associated documentation files (the |
| 6 # "Software"), to deal in the Software without restriction, including | 6 # "Software"), to deal in the Software without restriction, including |
| 7 # without limitation the rights to use, copy, modify, merge, publish, dis- | 7 # without limitation the rights to use, copy, modify, merge, publish, dis- |
| 8 # tribute, sublicense, and/or sell copies of the Software, and to permit | 8 # tribute, sublicense, and/or sell copies of the Software, and to permit |
| 9 # persons to whom the Software is furnished to do so, subject to the fol- | 9 # persons to whom the Software is furnished to do so, subject to the fol- |
| 10 # lowing conditions: | 10 # lowing conditions: |
| (...skipping 13 matching lines...) Expand all Loading... |
| 24 Tests for DynamoDB v2 high-level abstractions. | 24 Tests for DynamoDB v2 high-level abstractions. |
| 25 """ | 25 """ |
| 26 from __future__ import with_statement | 26 from __future__ import with_statement |
| 27 | 27 |
| 28 import os | 28 import os |
| 29 import time | 29 import time |
| 30 | 30 |
| 31 from tests.unit import unittest | 31 from tests.unit import unittest |
| 32 from boto.dynamodb2 import exceptions | 32 from boto.dynamodb2 import exceptions |
| 33 from boto.dynamodb2.fields import (HashKey, RangeKey, KeysOnlyIndex, | 33 from boto.dynamodb2.fields import (HashKey, RangeKey, KeysOnlyIndex, |
| 34 GlobalKeysOnlyIndex) | 34 GlobalKeysOnlyIndex, GlobalIncludeIndex) |
| 35 from boto.dynamodb2.items import Item | 35 from boto.dynamodb2.items import Item |
| 36 from boto.dynamodb2.table import Table | 36 from boto.dynamodb2.table import Table |
| 37 from boto.dynamodb2.types import NUMBER | 37 from boto.dynamodb2.types import NUMBER |
| 38 | 38 |
| 39 try: | 39 try: |
| 40 import json | 40 import json |
| 41 except ImportError: | 41 except ImportError: |
| 42 import simplejson as json | 42 import simplejson as json |
| 43 | 43 |
| 44 | 44 |
| (...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 102 batch.delete_item(username='alice', friend_count=2) | 102 batch.delete_item(username='alice', friend_count=2) |
| 103 batch.put_item({ | 103 batch.put_item({ |
| 104 'username': 'bob', | 104 'username': 'bob', |
| 105 'first_name': 'Bob', | 105 'first_name': 'Bob', |
| 106 'last_name': 'Smith', | 106 'last_name': 'Smith', |
| 107 'friend_count': 1 | 107 'friend_count': 1 |
| 108 }) | 108 }) |
| 109 | 109 |
| 110 time.sleep(5) | 110 time.sleep(5) |
| 111 | 111 |
| 112 # Does it exist? It should? |
| 113 self.assertTrue(users.has_item(username='jane', friend_count=3)) |
| 114 # But this shouldn't be there... |
| 115 self.assertFalse(users.has_item( |
| 116 username='mrcarmichaeljones', |
| 117 friend_count=72948 |
| 118 )) |
| 119 |
| 112 # Test getting an item & updating it. | 120 # Test getting an item & updating it. |
| 113 # This is the "safe" variant (only write if there have been no | 121 # This is the "safe" variant (only write if there have been no |
| 114 # changes). | 122 # changes). |
| 115 jane = users.get_item(username='jane', friend_count=3) | 123 jane = users.get_item(username='jane', friend_count=3) |
| 116 self.assertEqual(jane['first_name'], 'Jane') | 124 self.assertEqual(jane['first_name'], 'Jane') |
| 117 jane['last_name'] = 'Doh' | 125 jane['last_name'] = 'Doh' |
| 118 self.assertTrue(jane.save()) | 126 self.assertTrue(jane.save()) |
| 119 | 127 |
| 120 # Test strongly consistent getting of an item. | 128 # Test strongly consistent getting of an item. |
| 121 # Additionally, test the overwrite behavior. | 129 # Additionally, test the overwrite behavior. |
| (...skipping 82 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 204 }) | 212 }) |
| 205 self.assertTrue(sadie.partial_save()) | 213 self.assertTrue(sadie.partial_save()) |
| 206 serverside_sadie = users.get_item( | 214 serverside_sadie = users.get_item( |
| 207 username='sadie', | 215 username='sadie', |
| 208 friend_count=7, | 216 friend_count=7, |
| 209 consistent=True | 217 consistent=True |
| 210 ) | 218 ) |
| 211 self.assertEqual(serverside_sadie['first_name'], 'Sadie') | 219 self.assertEqual(serverside_sadie['first_name'], 'Sadie') |
| 212 | 220 |
| 213 # Test the eventually consistent query. | 221 # Test the eventually consistent query. |
| 214 results = users.query( | 222 results = users.query_2( |
| 215 username__eq='johndoe', | 223 username__eq='johndoe', |
| 216 last_name__eq='Doe', | 224 last_name__eq='Doe', |
| 217 index='LastNameIndex', | 225 index='LastNameIndex', |
| 218 attributes=('username',), | 226 attributes=('username',), |
| 219 reverse=True | 227 reverse=True |
| 220 ) | 228 ) |
| 221 | 229 |
| 222 for res in results: | 230 for res in results: |
| 223 self.assertTrue(res['username'] in ['johndoe',]) | 231 self.assertTrue(res['username'] in ['johndoe',]) |
| 224 self.assertEqual(res.keys(), ['username']) | 232 self.assertEqual(res.keys(), ['username']) |
| 225 | 233 |
| 234 # Ensure that queries with attributes don't return the hash key. |
| 235 results = users.query_2( |
| 236 username__eq='johndoe', |
| 237 friend_count__eq=4, |
| 238 attributes=('first_name',) |
| 239 ) |
| 240 |
| 241 for res in results: |
| 242 self.assertTrue(res['first_name'] in ['John',]) |
| 243 self.assertEqual(res.keys(), ['first_name']) |
| 226 | 244 |
| 227 # Test the strongly consistent query. | 245 # Test the strongly consistent query. |
| 228 c_results = users.query( | 246 c_results = users.query_2( |
| 229 username__eq='johndoe', | 247 username__eq='johndoe', |
| 230 last_name__eq='Doe', | 248 last_name__eq='Doe', |
| 231 index='LastNameIndex', | 249 index='LastNameIndex', |
| 232 reverse=True, | 250 reverse=True, |
| 233 consistent=True | 251 consistent=True |
| 234 ) | 252 ) |
| 235 | 253 |
| 236 for res in c_results: | 254 for res in c_results: |
| 237 self.assertTrue(res['username'] in ['johndoe',]) | 255 self.assertTrue(res['username'] in ['johndoe',]) |
| 238 | 256 |
| 257 # Test a query with query filters |
| 258 results = users.query_2( |
| 259 username__eq='johndoe', |
| 260 query_filter={ |
| 261 'first_name__beginswith': 'J' |
| 262 }, |
| 263 attributes=('first_name',) |
| 264 ) |
| 265 |
| 266 for res in results: |
| 267 self.assertTrue(res['first_name'] in ['John']) |
| 268 |
| 239 # Test scans without filters. | 269 # Test scans without filters. |
| 240 all_users = users.scan(limit=7) | 270 all_users = users.scan(limit=7) |
| 241 self.assertEqual(all_users.next()['username'], 'bob') | 271 self.assertEqual(all_users.next()['username'], 'bob') |
| 242 self.assertEqual(all_users.next()['username'], 'jane') | 272 self.assertEqual(all_users.next()['username'], 'jane') |
| 243 self.assertEqual(all_users.next()['username'], 'johndoe') | 273 self.assertEqual(all_users.next()['username'], 'johndoe') |
| 244 | 274 |
| 245 # Test scans with a filter. | 275 # Test scans with a filter. |
| 246 filtered_users = users.scan(limit=2, username__beginswith='j') | 276 filtered_users = users.scan(limit=2, username__beginswith='j') |
| 247 self.assertEqual(filtered_users.next()['username'], 'jane') | 277 self.assertEqual(filtered_users.next()['username'], 'jane') |
| 248 self.assertEqual(filtered_users.next()['username'], 'johndoe') | 278 self.assertEqual(filtered_users.next()['username'], 'johndoe') |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 297 self.assertEqual(admins.throughput['read'], 5) | 327 self.assertEqual(admins.throughput['read'], 5) |
| 298 self.assertEqual(admins.indexes, []) | 328 self.assertEqual(admins.indexes, []) |
| 299 | 329 |
| 300 # A single query term should fail on a table with *ONLY* a HashKey. | 330 # A single query term should fail on a table with *ONLY* a HashKey. |
| 301 self.assertRaises( | 331 self.assertRaises( |
| 302 exceptions.QueryError, | 332 exceptions.QueryError, |
| 303 admins.query, | 333 admins.query, |
| 304 username__eq='johndoe' | 334 username__eq='johndoe' |
| 305 ) | 335 ) |
| 306 # But it shouldn't break on more complex tables. | 336 # But it shouldn't break on more complex tables. |
| 307 res = users.query(username__eq='johndoe') | 337 res = users.query_2(username__eq='johndoe') |
| 308 | 338 |
| 309 # Test putting with/without sets. | 339 # Test putting with/without sets. |
| 310 mau5_created = users.put_item(data={ | 340 mau5_created = users.put_item(data={ |
| 311 'username': 'mau5', | 341 'username': 'mau5', |
| 312 'first_name': 'dead', | 342 'first_name': 'dead', |
| 313 'last_name': 'mau5', | 343 'last_name': 'mau5', |
| 314 'friend_count': 2, | 344 'friend_count': 2, |
| 315 'friends': set(['skrill', 'penny']), | 345 'friends': set(['skrill', 'penny']), |
| 316 }) | 346 }) |
| 317 self.assertTrue(mau5_created) | 347 self.assertTrue(mau5_created) |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 388 }, | 418 }, |
| 389 global_indexes={ | 419 global_indexes={ |
| 390 'StuffIndex': { | 420 'StuffIndex': { |
| 391 'read': 1, | 421 'read': 1, |
| 392 'write': 2 | 422 'write': 2 |
| 393 } | 423 } |
| 394 } | 424 } |
| 395 ) | 425 ) |
| 396 | 426 |
| 397 # Wait again for the changes to finish propagating. | 427 # Wait again for the changes to finish propagating. |
| 398 time.sleep(120) | 428 time.sleep(150) |
| 429 |
| 430 def test_gsi_with_just_hash_key(self): |
| 431 # GSI allows for querying off of different keys. This is behavior we |
| 432 # previously disallowed (due to standard & LSI queries). |
| 433 # See https://forums.aws.amazon.com/thread.jspa?threadID=146212&tstart=0 |
| 434 users = Table.create('gsi_query_users', schema=[ |
| 435 HashKey('user_id') |
| 436 ], throughput={ |
| 437 'read': 5, |
| 438 'write': 3, |
| 439 }, |
| 440 global_indexes=[ |
| 441 GlobalIncludeIndex('UsernameIndex', parts=[ |
| 442 HashKey('username'), |
| 443 ], includes=['user_id', 'username'], throughput={ |
| 444 'read': 3, |
| 445 'write': 1, |
| 446 }) |
| 447 ]) |
| 448 self.addCleanup(users.delete) |
| 449 |
| 450 # Wait for it. |
| 451 time.sleep(60) |
| 452 |
| 453 users.put_item(data={ |
| 454 'user_id': '7', |
| 455 'username': 'johndoe', |
| 456 'first_name': 'John', |
| 457 'last_name': 'Doe', |
| 458 }) |
| 459 users.put_item(data={ |
| 460 'user_id': '24', |
| 461 'username': 'alice', |
| 462 'first_name': 'Alice', |
| 463 'last_name': 'Expert', |
| 464 }) |
| 465 users.put_item(data={ |
| 466 'user_id': '35', |
| 467 'username': 'jane', |
| 468 'first_name': 'Jane', |
| 469 'last_name': 'Doe', |
| 470 }) |
| 471 |
| 472 # Try the main key. Should be fine. |
| 473 rs = users.query_2( |
| 474 user_id__eq='24' |
| 475 ) |
| 476 results = sorted([user['username'] for user in rs]) |
| 477 self.assertEqual(results, ['alice']) |
| 478 |
| 479 # Now try the GSI. Also should work. |
| 480 rs = users.query_2( |
| 481 username__eq='johndoe', |
| 482 index='UsernameIndex' |
| 483 ) |
| 484 results = sorted([user['username'] for user in rs]) |
| 485 self.assertEqual(results, ['johndoe']) |
| 399 | 486 |
| 400 def test_query_with_limits(self): | 487 def test_query_with_limits(self): |
| 401 # Per the DDB team, it's recommended to do many smaller gets with a | 488 # Per the DDB team, it's recommended to do many smaller gets with a |
| 402 # reduced page size. | 489 # reduced page size. |
| 403 # Clamp down the page size while ensuring that the correct number of | 490 # Clamp down the page size while ensuring that the correct number of |
| 404 # results are still returned. | 491 # results are still returned. |
| 405 posts = Table.create('posts', schema=[ | 492 posts = Table.create('posts', schema=[ |
| 406 HashKey('thread'), | 493 HashKey('thread'), |
| 407 RangeKey('posted_on') | 494 RangeKey('posted_on') |
| 408 ], throughput={ | 495 ], throughput={ |
| (...skipping 13 matching lines...) Expand all Loading... |
| 422 with open(test_data_path, 'r') as test_data: | 509 with open(test_data_path, 'r') as test_data: |
| 423 data = json.load(test_data) | 510 data = json.load(test_data) |
| 424 | 511 |
| 425 with posts.batch_write() as batch: | 512 with posts.batch_write() as batch: |
| 426 for post in data: | 513 for post in data: |
| 427 batch.put_item(post) | 514 batch.put_item(post) |
| 428 | 515 |
| 429 time.sleep(5) | 516 time.sleep(5) |
| 430 | 517 |
| 431 # Test the reduced page size. | 518 # Test the reduced page size. |
| 432 results = posts.query( | 519 results = posts.query_2( |
| 433 thread__eq='Favorite chiptune band?', | 520 thread__eq='Favorite chiptune band?', |
| 434 posted_on__gte='2013-12-24T00:00:00', | 521 posted_on__gte='2013-12-24T00:00:00', |
| 435 max_page_size=2 | 522 max_page_size=2 |
| 436 ) | 523 ) |
| 437 | 524 |
| 438 all_posts = list(results) | 525 all_posts = list(results) |
| 439 self.assertEqual( | 526 self.assertEqual( |
| 440 [post['posted_by'] for post in all_posts], | 527 [post['posted_by'] for post in all_posts], |
| 441 ['joe', 'jane', 'joe', 'joe', 'jane', 'joe'] | 528 ['joe', 'jane', 'joe', 'joe', 'jane', 'joe'] |
| 442 ) | 529 ) |
| 443 self.assertEqual(results._fetches, 3) | 530 self.assertTrue(results._fetches >= 3) |
| 531 |
| 532 def test_query_with_reverse(self): |
| 533 posts = Table.create('more-posts', schema=[ |
| 534 HashKey('thread'), |
| 535 RangeKey('posted_on') |
| 536 ], throughput={ |
| 537 'read': 5, |
| 538 'write': 5, |
| 539 }) |
| 540 self.addCleanup(posts.delete) |
| 541 |
| 542 # Wait for it. |
| 543 time.sleep(60) |
| 544 |
| 545 # Add some data. |
| 546 test_data_path = os.path.join( |
| 547 os.path.dirname(__file__), |
| 548 'forum_test_data.json' |
| 549 ) |
| 550 with open(test_data_path, 'r') as test_data: |
| 551 data = json.load(test_data) |
| 552 |
| 553 with posts.batch_write() as batch: |
| 554 for post in data: |
| 555 batch.put_item(post) |
| 556 |
| 557 time.sleep(5) |
| 558 |
| 559 # Test the default order (ascending). |
| 560 results = posts.query_2( |
| 561 thread__eq='Favorite chiptune band?', |
| 562 posted_on__gte='2013-12-24T00:00:00' |
| 563 ) |
| 564 self.assertEqual( |
| 565 [post['posted_on'] for post in results], |
| 566 [ |
| 567 '2013-12-24T12:30:54', |
| 568 '2013-12-24T12:35:40', |
| 569 '2013-12-24T13:45:30', |
| 570 '2013-12-24T14:15:14', |
| 571 '2013-12-24T14:25:33', |
| 572 '2013-12-24T15:22:22', |
| 573 ] |
| 574 ) |
| 575 |
| 576 # Test the explicit ascending order. |
| 577 results = posts.query_2( |
| 578 thread__eq='Favorite chiptune band?', |
| 579 posted_on__gte='2013-12-24T00:00:00', |
| 580 reverse=False |
| 581 ) |
| 582 self.assertEqual( |
| 583 [post['posted_on'] for post in results], |
| 584 [ |
| 585 '2013-12-24T12:30:54', |
| 586 '2013-12-24T12:35:40', |
| 587 '2013-12-24T13:45:30', |
| 588 '2013-12-24T14:15:14', |
| 589 '2013-12-24T14:25:33', |
| 590 '2013-12-24T15:22:22', |
| 591 ] |
| 592 ) |
| 593 |
| 594 # Test the explicit descending order. |
| 595 results = posts.query_2( |
| 596 thread__eq='Favorite chiptune band?', |
| 597 posted_on__gte='2013-12-24T00:00:00', |
| 598 reverse=True |
| 599 ) |
| 600 self.assertEqual( |
| 601 [post['posted_on'] for post in results], |
| 602 [ |
| 603 '2013-12-24T15:22:22', |
| 604 '2013-12-24T14:25:33', |
| 605 '2013-12-24T14:15:14', |
| 606 '2013-12-24T13:45:30', |
| 607 '2013-12-24T12:35:40', |
| 608 '2013-12-24T12:30:54', |
| 609 ] |
| 610 ) |
| 611 |
| 612 # Test the old, broken style. |
| 613 results = posts.query( |
| 614 thread__eq='Favorite chiptune band?', |
| 615 posted_on__gte='2013-12-24T00:00:00' |
| 616 ) |
| 617 self.assertEqual( |
| 618 [post['posted_on'] for post in results], |
| 619 [ |
| 620 '2013-12-24T15:22:22', |
| 621 '2013-12-24T14:25:33', |
| 622 '2013-12-24T14:15:14', |
| 623 '2013-12-24T13:45:30', |
| 624 '2013-12-24T12:35:40', |
| 625 '2013-12-24T12:30:54', |
| 626 ] |
| 627 ) |
| 628 results = posts.query( |
| 629 thread__eq='Favorite chiptune band?', |
| 630 posted_on__gte='2013-12-24T00:00:00', |
| 631 reverse=True |
| 632 ) |
| 633 self.assertEqual( |
| 634 [post['posted_on'] for post in results], |
| 635 [ |
| 636 '2013-12-24T12:30:54', |
| 637 '2013-12-24T12:35:40', |
| 638 '2013-12-24T13:45:30', |
| 639 '2013-12-24T14:15:14', |
| 640 '2013-12-24T14:25:33', |
| 641 '2013-12-24T15:22:22', |
| 642 ] |
| 643 ) |
| OLD | NEW |