| OLD | NEW |
| (Empty) |
| 1 #!/usr/bin/env python | |
| 2 # -*- coding: utf-8 -*- | |
| 3 # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ | |
| 4 # | |
| 5 # Permission is hereby granted, free of charge, to any person obtaining a | |
| 6 # copy of this software and associated documentation files (the | |
| 7 # "Software"), to deal in the Software without restriction, including | |
| 8 # without limitation the rights to use, copy, modify, merge, publish, dis- | |
| 9 # tribute, sublicense, and/or sell copies of the Software, and to permit | |
| 10 # persons to whom the Software is furnished to do so, subject to the fol- | |
| 11 # lowing conditions: | |
| 12 # | |
| 13 # The above copyright notice and this permission notice shall be included | |
| 14 # in all copies or substantial portions of the Software. | |
| 15 # | |
| 16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS | |
| 17 # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- | |
| 18 # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | |
| 19 # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |
| 20 # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
| 21 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS | |
| 22 # IN THE SOFTWARE. | |
| 23 | |
| 24 """ | |
| 25 Some unit tests for the S3Connection | |
| 26 """ | |
| 27 | |
| 28 import unittest | |
| 29 import time | |
| 30 import os | |
| 31 import urllib | |
| 32 from boto.s3.connection import S3Connection | |
| 33 from boto.exception import S3PermissionsError | |
| 34 | |
| 35 class S3ConnectionTest (unittest.TestCase): | |
| 36 | |
| 37 def test_1_basic(self): | |
| 38 print '--- running S3Connection tests ---' | |
| 39 c = S3Connection() | |
| 40 # create a new, empty bucket | |
| 41 bucket_name = 'test-%d' % int(time.time()) | |
| 42 bucket = c.create_bucket(bucket_name) | |
| 43 # now try a get_bucket call and see if it's really there | |
| 44 bucket = c.get_bucket(bucket_name) | |
| 45 # test logging | |
| 46 logging_bucket = c.create_bucket(bucket_name + '-log') | |
| 47 logging_bucket.set_as_logging_target() | |
| 48 bucket.enable_logging(target_bucket=logging_bucket, target_prefix=bucket
.name) | |
| 49 bucket.disable_logging() | |
| 50 c.delete_bucket(logging_bucket) | |
| 51 k = bucket.new_key() | |
| 52 k.name = 'foobar' | |
| 53 s1 = 'This is a test of file upload and download' | |
| 54 s2 = 'This is a second string to test file upload and download' | |
| 55 k.set_contents_from_string(s1) | |
| 56 fp = open('foobar', 'wb') | |
| 57 # now get the contents from s3 to a local file | |
| 58 k.get_contents_to_file(fp) | |
| 59 fp.close() | |
| 60 fp = open('foobar') | |
| 61 # check to make sure content read from s3 is identical to original | |
| 62 assert s1 == fp.read(), 'corrupted file' | |
| 63 fp.close() | |
| 64 # test generated URLs | |
| 65 url = k.generate_url(3600) | |
| 66 file = urllib.urlopen(url) | |
| 67 assert s1 == file.read(), 'invalid URL %s' % url | |
| 68 url = k.generate_url(3600, force_http=True) | |
| 69 file = urllib.urlopen(url) | |
| 70 assert s1 == file.read(), 'invalid URL %s' % url | |
| 71 bucket.delete_key(k) | |
| 72 # test a few variations on get_all_keys - first load some data | |
| 73 # for the first one, let's override the content type | |
| 74 phony_mimetype = 'application/x-boto-test' | |
| 75 headers = {'Content-Type': phony_mimetype} | |
| 76 k.name = 'foo/bar' | |
| 77 k.set_contents_from_string(s1, headers) | |
| 78 k.name = 'foo/bas' | |
| 79 k.set_contents_from_filename('foobar') | |
| 80 k.name = 'foo/bat' | |
| 81 k.set_contents_from_string(s1) | |
| 82 k.name = 'fie/bar' | |
| 83 k.set_contents_from_string(s1) | |
| 84 k.name = 'fie/bas' | |
| 85 k.set_contents_from_string(s1) | |
| 86 k.name = 'fie/bat' | |
| 87 k.set_contents_from_string(s1) | |
| 88 # try resetting the contents to another value | |
| 89 md5 = k.md5 | |
| 90 k.set_contents_from_string(s2) | |
| 91 assert k.md5 != md5 | |
| 92 os.unlink('foobar') | |
| 93 all = bucket.get_all_keys() | |
| 94 assert len(all) == 6 | |
| 95 rs = bucket.get_all_keys(prefix='foo') | |
| 96 assert len(rs) == 3 | |
| 97 rs = bucket.get_all_keys(prefix='', delimiter='/') | |
| 98 assert len(rs) == 2 | |
| 99 rs = bucket.get_all_keys(maxkeys=5) | |
| 100 assert len(rs) == 5 | |
| 101 # test the lookup method | |
| 102 k = bucket.lookup('foo/bar') | |
| 103 assert isinstance(k, bucket.key_class) | |
| 104 assert k.content_type == phony_mimetype | |
| 105 k = bucket.lookup('notthere') | |
| 106 assert k == None | |
| 107 # try some metadata stuff | |
| 108 k = bucket.new_key() | |
| 109 k.name = 'has_metadata' | |
| 110 mdkey1 = 'meta1' | |
| 111 mdval1 = 'This is the first metadata value' | |
| 112 k.set_metadata(mdkey1, mdval1) | |
| 113 mdkey2 = 'meta2' | |
| 114 mdval2 = 'This is the second metadata value' | |
| 115 k.set_metadata(mdkey2, mdval2) | |
| 116 # try a unicode metadata value | |
| 117 mdval3 = u'föö' | |
| 118 mdkey3 = 'meta3' | |
| 119 k.set_metadata(mdkey3, mdval3) | |
| 120 k.set_contents_from_string(s1) | |
| 121 k = bucket.lookup('has_metadata') | |
| 122 assert k.get_metadata(mdkey1) == mdval1 | |
| 123 assert k.get_metadata(mdkey2) == mdval2 | |
| 124 assert k.get_metadata(mdkey3) == mdval3 | |
| 125 k = bucket.new_key() | |
| 126 k.name = 'has_metadata' | |
| 127 k.get_contents_as_string() | |
| 128 assert k.get_metadata(mdkey1) == mdval1 | |
| 129 assert k.get_metadata(mdkey2) == mdval2 | |
| 130 assert k.get_metadata(mdkey3) == mdval3 | |
| 131 bucket.delete_key(k) | |
| 132 # test list and iterator | |
| 133 rs1 = bucket.list() | |
| 134 num_iter = 0 | |
| 135 for r in rs1: | |
| 136 num_iter = num_iter + 1 | |
| 137 rs = bucket.get_all_keys() | |
| 138 num_keys = len(rs) | |
| 139 assert num_iter == num_keys | |
| 140 # try a key with a funny character | |
| 141 k = bucket.new_key() | |
| 142 k.name = 'testnewline\n' | |
| 143 k.set_contents_from_string('This is a test') | |
| 144 rs = bucket.get_all_keys() | |
| 145 assert len(rs) == num_keys + 1 | |
| 146 bucket.delete_key(k) | |
| 147 rs = bucket.get_all_keys() | |
| 148 assert len(rs) == num_keys | |
| 149 # try some acl stuff | |
| 150 bucket.set_acl('public-read') | |
| 151 policy = bucket.get_acl() | |
| 152 assert len(policy.acl.grants) == 2 | |
| 153 bucket.set_acl('private') | |
| 154 policy = bucket.get_acl() | |
| 155 assert len(policy.acl.grants) == 1 | |
| 156 k = bucket.lookup('foo/bar') | |
| 157 k.set_acl('public-read') | |
| 158 policy = k.get_acl() | |
| 159 assert len(policy.acl.grants) == 2 | |
| 160 k.set_acl('private') | |
| 161 policy = k.get_acl() | |
| 162 assert len(policy.acl.grants) == 1 | |
| 163 # try the convenience methods for grants | |
| 164 bucket.add_user_grant('FULL_CONTROL', | |
| 165 'c1e724fbfa0979a4448393c59a8c055011f739b6d102fb37a
65f26414653cd67') | |
| 166 try: | |
| 167 bucket.add_email_grant('foobar', 'foo@bar.com') | |
| 168 except S3PermissionsError: | |
| 169 pass | |
| 170 # now try to create an RRS key | |
| 171 k = bucket.new_key('reduced_redundancy') | |
| 172 k.set_contents_from_string('This key has reduced redundancy', | |
| 173 reduced_redundancy=True) | |
| 174 | |
| 175 # now try to inject a response header | |
| 176 data = k.get_contents_as_string(response_headers={'response-content-type
' : 'foo/bar'}) | |
| 177 assert k.content_type == 'foo/bar' | |
| 178 | |
| 179 # now delete all keys in bucket | |
| 180 for k in bucket: | |
| 181 if k.name == 'reduced_redundancy': | |
| 182 assert k.storage_class == 'REDUCED_REDUNDANCY' | |
| 183 bucket.delete_key(k) | |
| 184 # now delete bucket | |
| 185 time.sleep(5) | |
| 186 c.delete_bucket(bucket) | |
| 187 print '--- tests completed ---' | |
| OLD | NEW |